initial import of Terrasaur code
9
.gitignore
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
/.idea/
|
||||
/bin/
|
||||
/doc/_build
|
||||
/dist/
|
||||
/target/
|
||||
/3rd-party
|
||||
.DS_Store
|
||||
doc/tools/shortDescriptions.rst
|
||||
doc/tools/index.rst
|
||||
8
CHANGELOG.md
Normal file
@@ -0,0 +1,8 @@
|
||||
# SBCLT Changelog
|
||||
|
||||
Repository is https://gitlab.jhuapl.edu/sbmt/sbclt/command-line-tools. Git hash for each merge to main branch (except the most recent) are listed below in reverse chronological order.
|
||||
|
||||
## November 26, 2024
|
||||
|
||||
- Initial release
|
||||
|
||||
59
README.md
@@ -1 +1,60 @@
|
||||
# Terrasaur
|
||||
|
||||
Terrasaur is a suite of programs written in Java and C++. These stand-alone
|
||||
command line analysis and shape model manipulation programs that complement the
|
||||
[Small Body Mapping Tool](https://sbmt.jhuapl.edu/) and create data products that
|
||||
are well-suited to being visualized in the SBMT GUI. Among other functions,
|
||||
these programs facilitate the building of digital terrain models (DTMs) of
|
||||
small bodies, permit assessing the quality of these DTMs, and construct a
|
||||
broad suite of DTM products that characterize the geophysical and surface
|
||||
properties of small bodies.
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Prerequisites
|
||||
|
||||
The Terrasaur package requires Java 21 or later. Some freely available versions are
|
||||
|
||||
* [Amazon Corretto](https://aws.amazon.com/corretto/)
|
||||
* [Azul Zulu](https://www.azul.com/downloads/?package=jdk)
|
||||
* [Eclipse Temurin](https://adoptium.net/)
|
||||
* [OpenJDK](https://jdk.java.net/). Most Linux distributions and HomeBrew have OpenJDK packages.
|
||||
|
||||
### Build the package
|
||||
|
||||
This is an optional step if you don't want to use the prebuilt
|
||||
package. Check out the code:
|
||||
|
||||
git clone https://github.com/JHUAPL/Terrasaur.git
|
||||
|
||||
The 3rd party executables and libraries are assumed to exist in
|
||||
`3rd-party/$(uname -s)_$(uname -m)` (e.g. `3rd-party/Darwin_x86_64` on an
|
||||
Intel macOS machine). The script to build the 3rd party products is in the
|
||||
`support-libraries` directory.
|
||||
|
||||
cd support-libraries
|
||||
./buildAll.bash ../3rd-party/$(uname -s)_$(uname -m)
|
||||
|
||||
Maven must be installed to build the software. Once the `3rd-party`
|
||||
directory has been built, compile the Terrasaur package using the
|
||||
`mkPackage.bash` script:
|
||||
|
||||
./mkPackage.bash
|
||||
|
||||
This will create executable and source packages in the dist directory,
|
||||
named SBCLT-YYYY.MM.DD.tar.gz and SBCLT-YYYY.MM.DD-src.tar.gz
|
||||
|
||||
[Sphinx](https://www.sphinx-doc.org/en/master/) with the
|
||||
[PD](https://sphinx-themes.org/sample-sites/sphinx-theme-pd/)
|
||||
theme is used to create the documentation in the `doc/` folder.
|
||||
|
||||
### Install the executable package
|
||||
|
||||
cd (your destination directory)
|
||||
tar xfz (path to command-line-tools)/dist/Terrasaur-YYYY.MM.DD.tar.gz
|
||||
|
||||
The `scripts/` directory contains all the applications in the
|
||||
package. Running without any arguments will display a usage message.
|
||||
|
||||
The `doc/` directory contains documentation including a javadoc
|
||||
directory.
|
||||
|
||||
20
doc/Makefile
Normal file
@@ -0,0 +1,20 @@
|
||||
# Minimal makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line, and also
|
||||
# from the environment for the first two.
|
||||
SPHINXOPTS ?=
|
||||
SPHINXBUILD ?= sphinx-build
|
||||
SOURCEDIR = .
|
||||
BUILDDIR = _build
|
||||
|
||||
# Put it first so that "make" without argument is like "make help".
|
||||
help:
|
||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
||||
.PHONY: help Makefile
|
||||
|
||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||
%: Makefile
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
13
doc/_static/style.css
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
.wy-nav-content {
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
.figures-row {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
}
|
||||
.figures-row img {
|
||||
max-width: 90%; /* Adjust percentage as needed */
|
||||
height: auto; /* Maintains aspect ratio */
|
||||
}
|
||||
|
||||
4
doc/_templates/layout.html
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{% extends "!layout.html" %}
|
||||
{% block extrahead %}
|
||||
<link href="{{ pathto("_static/style.css", True) }}" rel="stylesheet" type="text/css">
|
||||
{% endblock %}
|
||||
62
doc/conf.py
Normal file
@@ -0,0 +1,62 @@
|
||||
# Configuration file for the Sphinx documentation builder.
|
||||
#
|
||||
# This file only contains a selection of the most common options. For a full
|
||||
# list see the documentation:
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
||||
|
||||
# -- Themes used -------------------------------------------------------------
|
||||
|
||||
import sphinx_theme_pd
|
||||
|
||||
# -- Path setup --------------------------------------------------------------
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#
|
||||
# import os
|
||||
# import sys
|
||||
# sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
|
||||
project = 'Terrasaur'
|
||||
copyright = '2024, Johns Hopkins University Applied Physics Laboratory'
|
||||
author = 'Hari.Nair@jhuapl.edu'
|
||||
highlight_language = 'none'
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
# This pattern also affects html_static_path and html_extra_path.
|
||||
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
||||
|
||||
|
||||
# -- Options for HTML output -------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
#
|
||||
# html_theme = 'alabaster'
|
||||
html_theme = 'sphinx_theme_pd'
|
||||
html_theme_path = [sphinx_theme_pd.get_html_theme_path()]
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
#html_style = 'style.css'
|
||||
|
||||
def setup(app):
|
||||
app.add_css_file('style.css')
|
||||
BIN
doc/dist/Terrasaur-2025.03.03-e1a0e14-src.tar.gz
vendored
Normal file
BIN
doc/dist/Terrasaur-2025.03.03-e1a0e14_*.tar.gz
vendored
Normal file
54
doc/index.rst
Normal file
@@ -0,0 +1,54 @@
|
||||
Terrasaur documentation
|
||||
=============================================
|
||||
|
||||
This package contains command line analysis tools contained in Terrasaur.
|
||||
|
||||
Prerequisites
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
The Terrasaur package requires Java 21 or later. Some freely available versions are
|
||||
|
||||
* `Amazon Corretto <https://aws.amazon.com/corretto/>`__
|
||||
* `Azul Zulu <https://www.azul.com/downloads/?package=jdk>`__
|
||||
* `Eclipse Temurin <https://adoptium.net/>`__
|
||||
* `OpenJDK <https://jdk.java.net/>`__. Most Linux distributions and HomeBrew have OpenJDK packages.
|
||||
|
||||
Download
|
||||
~~~~~~~~
|
||||
|
||||
Packages for use on Mac OS X and Linux are available at ...
|
||||
|
||||
Windows users may use the Linux package with the `Windows Subsystem for Linux <https://docs.microsoft.com/en-us/windows/wsl/>`__.
|
||||
|
||||
Install
|
||||
~~~~~~~
|
||||
|
||||
::
|
||||
|
||||
cd (your destination directory)
|
||||
tar xfz Terrasaur-YYYY.MM.DD.tar.gz
|
||||
|
||||
The scripts/ directory contains all of the applications in the package. Running without any arguments will display a usage message.
|
||||
|
||||
The doc/ directory contains documentation including a javadoc directory.
|
||||
|
||||
Documentation and Examples
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
`This <tools/index.html>`__ page shows the usage of each utility.
|
||||
|
||||
Examples for use are shown at ...
|
||||
|
||||
Build from source
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
Source code is available at ...
|
||||
|
||||
Third party libraries are available at ...
|
||||
|
||||
Contents
|
||||
~~~~~~~~
|
||||
|
||||
.. toctree::
|
||||
|
||||
tools/index
|
||||
35
doc/make.bat
Normal file
@@ -0,0 +1,35 @@
|
||||
@ECHO OFF
|
||||
|
||||
pushd %~dp0
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set SOURCEDIR=.
|
||||
set BUILDDIR=_build
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
%SPHINXBUILD% >NUL 2>NUL
|
||||
if errorlevel 9009 (
|
||||
echo.
|
||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.http://sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
goto end
|
||||
|
||||
:help
|
||||
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
|
||||
:end
|
||||
popd
|
||||
88
doc/make_doc.bash
Executable file
@@ -0,0 +1,88 @@
|
||||
#!/bin/bash
|
||||
|
||||
# This program generates a file containing documentation of all
|
||||
# programs in this software system by running all programs with no
|
||||
# arguments and piping the usage output to a file.
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
echo "usage: $0 scriptDir"
|
||||
echo "e.g. $0 ../Terrasaur-2024.12.21/scripts"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
rootDir=$(
|
||||
cd "$(dirname "$0")"
|
||||
pwd -P
|
||||
)
|
||||
scriptDir=$1
|
||||
|
||||
docfile=${rootDir}/tools/shortDescriptions.rst
|
||||
indexfile=${rootDir}/tools/index.rst
|
||||
rm -f "$docfile" "$indexfile"
|
||||
|
||||
cat >>"$indexfile" <<EOF
|
||||
===========
|
||||
Tools Index
|
||||
===========
|
||||
|
||||
EOF
|
||||
|
||||
cat >>"$docfile" <<EOF
|
||||
==================
|
||||
Terrasaur Programs
|
||||
==================
|
||||
|
||||
EOF
|
||||
|
||||
programsToSkip=()
|
||||
|
||||
for f in $(find "${scriptDir}" -type f -maxdepth 1 | sort -f); do
|
||||
f=$(basename "$f")
|
||||
flink=$(echo "$f" | awk '{print tolower($0)}' | sed -e 's/[^[:alnum:]|-]/\-/g')
|
||||
|
||||
skip=0
|
||||
# Ignore programs that begin with lowercase letter or the string "Immutable"
|
||||
if [[ "$f" =~ ^([a-z].*|Immutable.*) ]]; then
|
||||
skip=1
|
||||
fi
|
||||
|
||||
for program in "${programsToSkip[@]}"; do
|
||||
if [[ "$f" == "$program" ]]; then
|
||||
skip=1
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [ $skip -eq 1 ]; then
|
||||
echo "Skipping $f"
|
||||
continue
|
||||
fi
|
||||
|
||||
echo "Generating documentation for $f"
|
||||
|
||||
shortDescription=$("${scriptDir}"/"$f" -shortDescription)
|
||||
|
||||
mkdir -p toolDescriptions
|
||||
"${scriptDir}"/"$f" >toolDescriptions/"${f}".txt
|
||||
|
||||
if [ -e tools/"${f}".rst ]; then
|
||||
cat >>"$indexfile" <<EOF
|
||||
:doc:\`$f\`: $shortDescription
|
||||
|
||||
EOF
|
||||
else
|
||||
cat >>"$indexfile" <<EOF
|
||||
\`$f <shortDescriptions.html#${flink}>\`__: $shortDescription
|
||||
|
||||
EOF
|
||||
cat >>"$docfile" <<EOF
|
||||
$f
|
||||
$(printf '=%.0s' {1..100})
|
||||
|
||||
.. include:: ../toolDescriptions/${f}.txt
|
||||
:literal:
|
||||
|
||||
|
||||
EOF
|
||||
fi
|
||||
done
|
||||
133
doc/tools/CompareOBJ.rst
Normal file
@@ -0,0 +1,133 @@
|
||||
.. _CompareOBJ:
|
||||
|
||||
##########
|
||||
CompareOBJ
|
||||
##########
|
||||
|
||||
*****
|
||||
Usage
|
||||
*****
|
||||
|
||||
CompareOBJ reports the differences between two OBJ shape files. It can also find the
|
||||
optimal rotation and/or translation to best fit one model to the other.
|
||||
|
||||
.. include:: ../toolDescriptions/CompareOBJ.txt
|
||||
:literal:
|
||||
|
||||
********
|
||||
Examples
|
||||
********
|
||||
|
||||
Local Model Comparison
|
||||
######################
|
||||
|
||||
Download the :download:`reference<./support_files/EVAL20_wtr.obj>` and :download:`comparison<./support_files/EVAL20.obj>`
|
||||
shape models. You can view them in a tool such as
|
||||
`ParaView<https://www.paraview.org/>`.
|
||||
|
||||
.. figure:: images/CompareOBJ_local_1.png
|
||||
|
||||
This image shows the reference (pink) and comparison (grey) shape models.
|
||||
|
||||
Run CompareOBJ to find the optimal transform to align the comparison with the reference:
|
||||
::
|
||||
|
||||
CompareOBJ -computeOptimalRotationAndTranslation -model F3H-1/EVAL20.obj \
|
||||
-reference F3H-1/EVAL20_wtr.obj -computeVerticalError verticalError.txt \
|
||||
-saveOptimalShape optimal.obj -savePlateDiff plateDiff.txt -savePlateIndex plateIndex.txt
|
||||
|
||||
The screen output is
|
||||
::
|
||||
|
||||
Translation: 4.62205663e-04,7.83045454e-04,-2.29286045e-04
|
||||
Rotation quaternion: 9.99865353e-01,5.09597815e-03,1.50904931e-02,3.94767030e-03
|
||||
Rotation angle (degrees) and axis: 1.88048990e+00,3.10547953e-01,9.19611823e-01,2.40570289e-01
|
||||
Center of rotation: -2.80636488e-02,2.59626158e-01,-3.68889950e-02
|
||||
4x4 Transformation matrix:
|
||||
9.99513386e-01 -7.74047588e-03 3.02171569e-02 4.62205663e-04
|
||||
8.04807917e-03 9.99916894e-01 -1.00714394e-02 7.83045454e-04
|
||||
-3.01366880e-02 1.03097286e-02 9.99492616e-01 -2.29286045e-04
|
||||
0.00000000e+00 0.00000000e+00 0.00000000e+00 1.00000000e+00
|
||||
Using 319740 of 319740 points (excluding 0.0% largest distances)
|
||||
Min Distance: 2.7101076120651766E-11
|
||||
Max Distance: 1.3343678431994644E-5
|
||||
Mean Distance: 3.6381274026976392E-6
|
||||
Mean Square Distance: 1.9828385012474584E-11
|
||||
Root Mean Square Distance: 4.452907478544168E-6
|
||||
Mean Vertical Distance: 3.643513246881461E-6
|
||||
Mean Square Vertical Distance: 1.9878841624828557E-11
|
||||
Root Mean Square Vertical Distance: 4.458569459459901E-6
|
||||
Direction perpendicular to plane: 0.1308102982873679 0.9899926953698763 0.05294458401250757
|
||||
Magnitude of projection perpendicular to plane: 8.235310856890277E-4
|
||||
Projection vector of translation parallel to plane: 3.544793163646046E-4 -3.2244305568425196E-5 -2.728875559193712E-4
|
||||
Magnitude of projection vector of translation parallel to plane: 4.485118718022471E-4
|
||||
319740 plates used in error calculation out of 320000 total in the shape model
|
||||
|
||||
|
||||
.. figure:: images/CompareOBJ_local_2.png
|
||||
|
||||
This image shows the reference (pink), comparison (grey), and transformed comparison (blue) shape models.
|
||||
|
||||
Global Model Comparison
|
||||
#######################
|
||||
|
||||
Use the :download:`low resolution Bennu<./support_files/Bennu49k.obj>` shape model for this
|
||||
example. We will use the ShapeFormatConverter tool to create a transformed
|
||||
model for comparison:
|
||||
|
||||
::
|
||||
|
||||
ShapeFormatConverter -input Bennu/Bennu49k.obj -output BennuComparison.obj \
|
||||
-rotate 5,0,0,1 -translate 0.01,-0.01,0.01
|
||||
|
||||
This rotates the shape model by 5 degrees about the z axis and then translates
|
||||
by the vector [0.01, -0.01, 0.01]. The units and frame are the same used in
|
||||
the OBJ file, namely km and IAU_BENNU.
|
||||
|
||||
.. figure:: images/CompareOBJ_global_1.png
|
||||
|
||||
This image shows the reference (pink) and comparison (grey, 50% opacity) shape models.
|
||||
|
||||
Run CompareOBJ to find the optimal transform to align the comparison with the reference:
|
||||
|
||||
::
|
||||
|
||||
CompareOBJ -computeOptimalRotationAndTranslation \
|
||||
-model BennuComparison.obj \
|
||||
-reference Bennu/Bennu49k.obj \
|
||||
-computeVerticalError CompareOBJ/terrasaur-verticalError.txt \
|
||||
-saveOptimalShape CompareOBJ/terrasaur-optimal.obj \
|
||||
-savePlateDiff CompareOBJ/terrasaur-plateDiff.txt \
|
||||
-savePlateIndex CompareOBJ/terrasaur-plateIndex.txt
|
||||
|
||||
The screen output is
|
||||
|
||||
::
|
||||
|
||||
Translation: -9.96162524e-03,7.99729762e-03,-1.00000044e-02
|
||||
Rotation quaternion: 9.99048223e-01,-2.15657319e-08,-3.22625083e-08,4.36193625e-02
|
||||
Rotation angle (degrees) and axis: 4.99999715e+00,-4.94407314e-07,-7.39637317e-07,1.00000000e+00
|
||||
Center of rotation: -1.29539855e-02,-9.43799800e-03,8.39669000e-03
|
||||
4x4 Transformation matrix:
|
||||
9.96194702e-01 -8.71556932e-02 -6.63449701e-08 -9.96162524e-03
|
||||
8.71556932e-02 9.96194702e-01 4.02758721e-08 7.99729762e-03
|
||||
6.25822362e-08 -4.59049522e-08 1.00000000e+00 -1.00000044e-02
|
||||
0.00000000e+00 0.00000000e+00 0.00000000e+00 1.00000000e+00
|
||||
Using 49152 of 49152 points (excluding 0.0% largest distances)
|
||||
Min Distance: 0.0
|
||||
Max Distance: 3.563071622544315E-8
|
||||
Mean Distance: 7.04333531909585E-9
|
||||
Mean Square Distance: 7.536457599802241E-17
|
||||
Root Mean Square Distance: 8.681277325257063E-9
|
||||
Mean Vertical Distance: 7.043335319394247E-9
|
||||
Mean Square Vertical Distance: 7.536457599439208E-17
|
||||
Root Mean Square Vertical Distance: 8.681277325047973E-9
|
||||
Direction perpendicular to plane: 0.6497386220463811 0.7600912355533176 -0.010051699174815498
|
||||
Magnitude of projection perpendicular to plane: -2.9325978832850564E-4
|
||||
Projection vector of translation parallel to plane: -0.00977108302826207 0.00822020181894647 -0.010002952125253225
|
||||
Magnitude of projection vector of translation parallel to plane: 0.016220506549103512
|
||||
49152 plates used in error calculation out of 49152 total in the shape model
|
||||
|
||||
.. figure:: images/CompareOBJ_global_2.png
|
||||
|
||||
This image shows the reference (pink), comparison (grey, 50% opacity), and transformed comparison (blue) shape models.
|
||||
17
doc/tools/CreateSBMTStructure.rst
Normal file
@@ -0,0 +1,17 @@
|
||||
###################
|
||||
CreateSBMTStructure
|
||||
###################
|
||||
|
||||
*****
|
||||
Usage
|
||||
*****
|
||||
|
||||
CreateSBMTStructure will create an SBMT Ellipse file given a set of points
|
||||
on the surface. Each set of three points will generate an ellipse where
|
||||
the first two points define the long axis, and the third point is the short
|
||||
axis.
|
||||
|
||||
|
||||
.. include:: ../toolDescriptions/CreateSBMTStructure.txt
|
||||
:literal:
|
||||
|
||||
8
doc/tools/DumpConfig.rst
Normal file
@@ -0,0 +1,8 @@
|
||||
.. _DumpConfig:
|
||||
|
||||
==========
|
||||
DumpConfig
|
||||
==========
|
||||
|
||||
.. include:: ../toolDescriptions/DumpConfig.txt
|
||||
:literal:
|
||||
91
doc/tools/RenderShapeFromSumFile.rst
Normal file
@@ -0,0 +1,91 @@
|
||||
.. _RenderShapeFromSumFile:
|
||||
|
||||
######################
|
||||
RenderShapeFromSumFile
|
||||
######################
|
||||
|
||||
*****
|
||||
Usage
|
||||
*****
|
||||
|
||||
RenderShapeFromSumFile generates a simulated image from a sumfile.
|
||||
|
||||
.. include:: ../toolDescriptions/RenderShapeFromSumFile.txt
|
||||
:literal:
|
||||
|
||||
********
|
||||
Examples
|
||||
********
|
||||
|
||||
:download:`This<./support_files/g_12570mm_alt_obj_0000n00000_v014.obj>`
|
||||
is a very low resolution shape model of Bennu from the OSIRIS-REx mission. You can substitute higher resolution
|
||||
models from ???. You will also need :download:`M605862153F5.SUM<support_files/M605862153F5.SUM>` which contains
|
||||
observation geometry we will use to create a simulated image.
|
||||
|
||||
Run RenderShapeFromSumFile:
|
||||
|
||||
::
|
||||
|
||||
RenderShapeFromSumFile -model g_12570mm_alt_obj_0000n00000_v014.obj -sumFile M605862153F5.SUM -output M605862153F5.png
|
||||
|
||||
.. container:: figures-row
|
||||
|
||||
.. figure:: images/M605862153F5_12570.png
|
||||
:alt: This simulated image uses the supplied 12 m/pixel shape model.
|
||||
|
||||
This simulated image uses the supplied 12 m/pixel shape model.
|
||||
|
||||
.. figure:: images/M605862153F5_00870.png
|
||||
:alt: This simulated image uses a 87 cm/pixel shape model.
|
||||
|
||||
This simulated image uses a 87 cm/pixel shape model.
|
||||
|
||||
.. figure:: images/ocams20190314t190123s972_map_iofl2pan_77236.png
|
||||
:alt: This is the actual image from MAPCAM.
|
||||
|
||||
This is the actual image from MAPCAM.
|
||||
|
||||
Generate a FITS file. The FITS header contains additional information such as illumination
|
||||
angles and range for each pixel in the image:
|
||||
|
||||
::
|
||||
|
||||
RenderShapeFromSumFile -model g_12570mm_alt_obj_0000n00000_v014.obj -sumFile M605862153F5.SUM -output M605862153F5.fits
|
||||
|
||||
Here is the header from the FITS file:
|
||||
|
||||
::
|
||||
|
||||
SIMPLE = T / Java FITS: Wed Jan 15 14:53:52 EST 2025
|
||||
BITPIX = -64 / bits per data element
|
||||
NAXIS = 3 / dimensionality of data
|
||||
NAXIS1 = 2048 / n'th data dimension
|
||||
NAXIS2 = 2048 / n'th data dimension
|
||||
NAXIS3 = 11 / n'th data dimension
|
||||
EXTEND = T / allow extensions
|
||||
UTC = '2019 MAR 14 19:01:23.961' / Time from the SUM file
|
||||
TITLE = 'M605862153F5' / Title of SUM file
|
||||
PLANE1 = 'brightness' / from 0 to 1
|
||||
PLANE2 = 'incidence' / degrees
|
||||
PLANE3 = 'emission' / degrees
|
||||
PLANE4 = 'phase ' / degrees
|
||||
PLANE5 = 'range ' / kilometers
|
||||
PLANE6 = 'facetX ' / kilometers
|
||||
PLANE7 = 'facetY ' / kilometers
|
||||
PLANE8 = 'facetZ ' / kilometers
|
||||
PLANE9 = 'normalX ' / X component of unit normal
|
||||
PLANE10 = 'normalY ' / Y component of unit normal
|
||||
PLANE11 = 'normalZ ' / Z component of unit normal
|
||||
MMFL = 125.1963 / From SUM file
|
||||
SCOBJ = '{1.4378777439; 3.5947718256; 0.1647121385}' / From SUM file
|
||||
CX = '{-0.0219390994; -0.0187737183; 0.9995830248}' / From SUM file
|
||||
CY = '{0.9129225176; -0.407945255; 0.0123752087}' / From SUM file
|
||||
CZ = '{0.4075428232; 0.9128133525; 0.0260889017}' / From SUM file
|
||||
SZ = '{-0.4941045298; -0.8687927376; 0.0325559925}' / From SUM file
|
||||
KMAT1 = '{117.647; 0; 0}' / From SUM file
|
||||
KMAT2 = '{0; -117.636; 0}' / From SUM file
|
||||
DIST = '[0.0, 0.0, 0.0, 0.0]' / From SUM file
|
||||
SIGVSO = '{1; 1; 1}' / From SUM file
|
||||
SIGPTG = '{1; 1; 1}' / From SUM file
|
||||
END
|
||||
|
||||
46
doc/tools/ShapeFormatConverter.rst
Normal file
@@ -0,0 +1,46 @@
|
||||
.. _ShapeFormatConverter:
|
||||
|
||||
####################
|
||||
ShapeFormatConverter
|
||||
####################
|
||||
|
||||
*****
|
||||
Usage
|
||||
*****
|
||||
|
||||
ShapeFormatConverter can transform and convert shape models. Supported input formats are
|
||||
icq, fits, llr, obj, pds, plt, ply, sbmt, stl, sum, or vtk. Supported output formats are
|
||||
obj, plt, sbmt, stl, sum, or vtk.
|
||||
|
||||
.. include:: ../toolDescriptions/ShapeFormatConverter.txt
|
||||
:literal:
|
||||
|
||||
********
|
||||
Examples
|
||||
********
|
||||
|
||||
Download :download:`rectangular_cuboid.obj <./support_files/rectangular_cuboid.obj>`. This is a rectangular
|
||||
cuboid centered on the origin with dimension 2 in the X direction, 4 in the Y direction,
|
||||
and 8 in the Z direction.
|
||||
|
||||
Turn it into a cube and save as an STL file:
|
||||
|
||||
::
|
||||
|
||||
ShapeFormatConverter -input rectangular_cuboid.obj -output cube.stl -scale 1,0.5,0.25
|
||||
|
||||
.. figure:: images/ShapeFormatConverter_cube.png
|
||||
|
||||
This image shows the original shape and the resized cube (pink)
|
||||
|
||||
|
||||
Rotate it by 90 degrees around (1, 1, 1):
|
||||
|
||||
::
|
||||
|
||||
ShapeFormatConverter -input rectangular_cuboid.obj -output rotated.stl -rotate 90,1,1,1
|
||||
|
||||
.. figure:: images/ShapeFormatConverter_rotated.png
|
||||
|
||||
This image shows the original shape and the rotated shape (pink)
|
||||
|
||||
BIN
doc/tools/images/ColorSpots-n.png
Normal file
|
After Width: | Height: | Size: 150 KiB |
BIN
doc/tools/images/CompareOBJ_global_1.png
Normal file
|
After Width: | Height: | Size: 72 KiB |
BIN
doc/tools/images/CompareOBJ_global_2.png
Normal file
|
After Width: | Height: | Size: 76 KiB |
BIN
doc/tools/images/CompareOBJ_local_1.png
Normal file
|
After Width: | Height: | Size: 80 KiB |
BIN
doc/tools/images/CompareOBJ_local_2.png
Normal file
|
After Width: | Height: | Size: 95 KiB |
BIN
doc/tools/images/M605862153F5_00870.png
Normal file
|
After Width: | Height: | Size: 847 KiB |
BIN
doc/tools/images/M605862153F5_03240.png
Normal file
|
After Width: | Height: | Size: 417 KiB |
BIN
doc/tools/images/M605862153F5_12570.png
Normal file
|
After Width: | Height: | Size: 151 KiB |
BIN
doc/tools/images/ShapeFormatConverter_cube.png
Normal file
|
After Width: | Height: | Size: 19 KiB |
BIN
doc/tools/images/ShapeFormatConverter_rotated.png
Normal file
|
After Width: | Height: | Size: 5.6 KiB |
BIN
doc/tools/images/ocams20190314t190123s972_map_iofl2pan_77236.png
Normal file
|
After Width: | Height: | Size: 1.8 MiB |
74557
doc/tools/support_files/Bennu49k.obj
Normal file
480801
doc/tools/support_files/EVAL20.obj
Normal file
480801
doc/tools/support_files/EVAL20_wtr.obj
Normal file
16
doc/tools/support_files/M605862153F5.SUM
Normal file
@@ -0,0 +1,16 @@
|
||||
M605862153F5
|
||||
2019 MAR 14 19:01:23.961
|
||||
1024 1024 3000 65535 NPX, NLN, THRSH
|
||||
1.2519630000e+02 5.1300000000e+02 5.1300000000e+02 MMFL, CTR
|
||||
1.4378777439e+00 3.5947718256e+00 1.6471213851e-01 SCOBJ
|
||||
-2.1939099413e-02 -1.8773718273e-02 9.9958302478e-01 CX
|
||||
9.1292251760e-01 -4.0794525500e-01 1.2375208737e-02 CY
|
||||
4.0754282325e-01 9.1281335246e-01 2.6088901655e-02 CZ
|
||||
-4.9410452981e-01 -8.6879273764e-01 3.2555992468e-02 SZ
|
||||
117.6470 0.0000 0.0000 0.0000 -117.6360 0.0000 K-MATRIX
|
||||
0.00000 0.00000 0.00000 0.00000 DISTORTION
|
||||
1.0000000000e+00 1.0000000000e+00 1.0000000000e+00 SIGMA_VSO
|
||||
1.0000000000e-03 1.0000000000e-03 1.0000000000e-03 SIGMA_PTG
|
||||
LANDMARKS
|
||||
LIMB FITS
|
||||
END FILE
|
||||
18892
doc/tools/support_files/g_12570mm_alt_obj_0000n00000_v014.obj
Normal file
25
doc/tools/support_files/rectangular_cuboid.obj
Normal file
@@ -0,0 +1,25 @@
|
||||
#
|
||||
# Rectangle example for Terrasaur
|
||||
#
|
||||
|
||||
v -1.0 -2.0 -4.0
|
||||
v -1.0 -2.0 4.0
|
||||
v -1.0 2.0 -4.0
|
||||
v -1.0 2.0 4.0
|
||||
v 1.0 -2.0 -4.0
|
||||
v 1.0 -2.0 4.0
|
||||
v 1.0 2.0 -4.0
|
||||
v 1.0 2.0 4.0
|
||||
|
||||
f 3 2 1
|
||||
f 2 3 4
|
||||
f 5 6 7
|
||||
f 8 7 6
|
||||
f 8 4 3
|
||||
f 3 7 8
|
||||
f 1 2 6
|
||||
f 6 5 1
|
||||
f 1 5 7
|
||||
f 7 3 1
|
||||
f 6 2 4
|
||||
f 8 4 6
|
||||
10000
doc/tools/support_files/xyzrandom.txt
Normal file
257
mkPackage.bash
Executable file
@@ -0,0 +1,257 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Configure these for the package
|
||||
packageName=Terrasaur
|
||||
appPackage="terrasaur"
|
||||
scriptPath=$(
|
||||
cd "$(dirname "$0")" || exit 1
|
||||
pwd -P
|
||||
)
|
||||
srcPath="${scriptPath}/src/main/java"
|
||||
srcFile="${srcPath}/${appPackage}/utils/AppVersion.java"
|
||||
appSrcDir="${appPackage}/apps"
|
||||
|
||||
function build_jar() {
|
||||
rev=$1
|
||||
|
||||
cd "${scriptPath}" || exit 1
|
||||
|
||||
# store the version number in pom.xml
|
||||
cp -p pom.xml pom.bak
|
||||
sed "s,<version>0.0.1-SNAPSHOT</version>,<version>$rev</version>,g" pom.bak >pom.xml
|
||||
|
||||
# install dependencies to the local maven repository
|
||||
if [ -d dependency ]; then
|
||||
cd dependency || exit 1
|
||||
|
||||
for pom in *.pom; do
|
||||
base=$(basename "$pom" .pom)
|
||||
jar="${base}.jar"
|
||||
|
||||
# Extract groupId, artifactId, and version from the POM file
|
||||
groupId=$(grep -m1 '<groupId>' "$pom" | sed -E 's|.*<groupId>(.*)</groupId>.*|\1|' | tr '.' '/')
|
||||
artifactId=$(grep -m1 '<artifactId>' "$pom" | sed -E 's|.*<artifactId>(.*)</artifactId>.*|\1|')
|
||||
version=$(grep -m1 '<version>' "$pom" | sed -E 's|.*<version>(.*)</version>.*|\1|')
|
||||
|
||||
if [ -z "$groupId" ] || [ -z "$artifactId" ] || [ -z "$version" ]; then
|
||||
echo "Skipping ${base}: Unable to extract Maven coordinates."
|
||||
continue
|
||||
fi
|
||||
|
||||
# Construct the expected JAR and POM paths in the local Maven repository
|
||||
repo_path="$HOME/.m2/repository/$groupId/$artifactId/$version"
|
||||
jar_path="$repo_path/$artifactId-$version.jar"
|
||||
pom_path="$repo_path/$artifactId-$version.pom"
|
||||
|
||||
if [ -f "$jar_path" ] || [ -f "$pom_path" ]; then
|
||||
echo "${base} is already installed in local repository."
|
||||
continue
|
||||
fi
|
||||
|
||||
# Install the artifact
|
||||
if [ -e "$jar" ]; then
|
||||
mvn -q install:install-file -Dfile="$jar" -DpomFile="$pom"
|
||||
else
|
||||
mvn -q install:install-file -Dfile="$pom" -DpomFile="$pom"
|
||||
fi
|
||||
|
||||
echo "Installed ${base} in local repository"
|
||||
done
|
||||
|
||||
cd ..
|
||||
else
|
||||
# install the third party jar files
|
||||
mvn -q install:install-file -Dfile=3rd-party/"${ARCH}"/spice/spice.jar -DgroupId=gov.nasa.jpl.naif -DartifactId=spice -Dversion=N0067 -Dpackaging=jar
|
||||
mvn -q install:install-file -Dfile=3rd-party/"${ARCH}"/vtk/lib/java/vtk.jar -DgroupId=com.kitware -DartifactId=vtk-apl -Dversion=9.2.6-apl -Dpackaging=jar
|
||||
|
||||
# Deploy to surfshop
|
||||
|
||||
# echo mvn deploy:deploy-file -Dfile=3rd-party/"${ARCH}"/spice/spice.jar -DgroupId=gov.nasa.jpl.naif -DartifactId=spice -Dversion=N0067 -Dpackaging=jar -DrepositoryId=third-party -Durl=http://surfshop:8082/artifactory/libs-3rdparty-local/
|
||||
# echo mvn deploy:deploy-file -Dfile=3rd-party/"${ARCH}"/vtk/lib/java/vtk.jar -DgroupId=com.kitware -DartifactId=vtk-apl -Dversion=9.2.6-apl -Dpackaging=jar -DrepositoryId=third-party -Durl=http://surfshop:8082/artifactory/libs-3rdparty-local/
|
||||
|
||||
fi
|
||||
|
||||
# ARCH needed for maven-surefire-plugin
|
||||
export ARCH
|
||||
mvn clean install
|
||||
|
||||
# restore the old pom file
|
||||
mv pom.bak pom.xml
|
||||
|
||||
# install the maven products
|
||||
rsync -a "${scriptPath}"/target/${packageName}.jar "${libDir}"
|
||||
rsync -a "${scriptPath}"/target/${packageName}_lib "${libDir}"
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
rsync -a "${scriptPath}"/3rd-party/ ${libDir}
|
||||
|
||||
}
|
||||
|
||||
function make_scripts() {
|
||||
|
||||
classes=$(jar tf "${scriptPath}"/target/${packageName}.jar | grep $appSrcDir | grep -v '\$' | grep -v "package-info" | grep class)
|
||||
|
||||
for class in $classes; do
|
||||
base=$(basename "$class" ".class")
|
||||
tool=${scriptDir}/${base}
|
||||
path=$(dirname "$class" | sed 's,/,.,g').${base}
|
||||
echo "#!/bin/bash" >${tool}
|
||||
echo 'script_dir=$(dirname $(which $0))' >>${tool}
|
||||
echo 'script_dir=$(cd $script_dir; pwd -P)' >>${tool}
|
||||
echo 'root=$(dirname $script_dir)' >>${tool}
|
||||
echo 'MEMSIZE=""' >>${tool}
|
||||
echo 'ARCH=$(uname -s)_$(uname -m)' >>${tool}
|
||||
echo 'export PATH="${root}/lib/${ARCH}/altwg:${root}/lib/${ARCH}/spice/JNISpice/exe:${PATH}"' >>${tool}
|
||||
echo 'JAVA_LIBRARY_PATH=""' >>${tool}
|
||||
echo 'if [ ! -z $JAVA_HOME ]; then' >>${tool}
|
||||
echo ' JAVA_LIBRARY_PATH="${JAVA_HOME}/lib:${JAVA_LIBRARY_PATH}"' >>${tool}
|
||||
echo 'fi' >>${tool}
|
||||
echo 'JAVA_LIBRARY_PATH="${root}/lib/${ARCH}/spice/JNISpice/lib:${JAVA_LIBRARY_PATH}"' >>${tool}
|
||||
echo 'JAVA_LIBRARY_PATH="${root}/lib/${ARCH}/vtk/lib:${JAVA_LIBRARY_PATH}"' >>${tool}
|
||||
echo 'JAVA_LIBRARY_PATH="${root}/lib/${ARCH}/vtk/lib/java/vtk-$(uname -s)-$(uname -m):${JAVA_LIBRARY_PATH}"' >>${tool}
|
||||
echo 'if [ "$(uname -s)" == "Darwin" ]; then' >>${tool}
|
||||
echo ' MEMSIZE=$(sysctl hw.memsize | awk '\''{print int($2/1024)}'\'')' >>${tool}
|
||||
echo ' export DYLD_LIBRARY_PATH=$JAVA_LIBRARY_PATH' >>${tool}
|
||||
echo 'elif [ "$(uname -s)" == "Linux" ]; then' >>${tool}
|
||||
echo ' MEMSIZE=$(grep MemTotal /proc/meminfo | awk '\''{print $2}'\'')' >>${tool}
|
||||
echo ' export LD_LIBRARY_PATH=$JAVA_LIBRARY_PATH' >>${tool}
|
||||
echo 'fi' >>${tool}
|
||||
echo 'java=$(which java)' >>${tool}
|
||||
echo 'if [ -z "$java" ]; then' >>${tool}
|
||||
echo ' echo "Java executable not found in your PATH"' >>${tool}
|
||||
echo ' exit 1' >>${tool}
|
||||
echo 'fi' >>${tool}
|
||||
echo 'fullVersion=$($java -version 2>&1 | head -1 |awk -F\" '\''{print $2}'\'')' >>${tool}
|
||||
echo 'version=$(echo $fullVersion | awk -F\. '\''{print $1}'\'')' >>${tool}
|
||||
echo 'if [ "$version" -lt "'$REQUIRED_JAVA_VERSION'" ];then' >>${tool}
|
||||
echo ' echo "minimum Java version required is '$REQUIRED_JAVA_VERSION'. Version found is $fullVersion."' >>${tool}
|
||||
echo ' exit 1' >>${tool}
|
||||
echo 'fi' >>${tool}
|
||||
echo '$java' "-Djava.library.path=\${JAVA_LIBRARY_PATH} -Xmx\${MEMSIZE}K -cp \${root}/lib/*:\${root}/lib/${packageName}_lib/* $path \"\$@\"" >>${tool}
|
||||
|
||||
chmod +x ${tool}
|
||||
done
|
||||
|
||||
}
|
||||
|
||||
function make_doc {
|
||||
cwd=$(pwd)
|
||||
|
||||
# build javadoc
|
||||
javadoc -quiet -Xdoclint:none -cp ${libDir}/*:${libDir}/${packageName}_lib/* -d ${docDir}/javadoc -sourcepath ${srcPath} -subpackages ${appPackage} -overview ${docDir}/src/overview.html
|
||||
/bin/rm -fr "${docDir}"/src
|
||||
|
||||
# sphinx
|
||||
cd ${scriptPath}/doc || exit 1
|
||||
|
||||
python3 -m venv "${scriptPath}"/venv
|
||||
source "${scriptPath}"/venv/bin/activate
|
||||
site_package_path=$(python3 -c 'import sysconfig; print(sysconfig.get_paths()["purelib"])')
|
||||
if [ -z "$PYTHONPATH" ]; then
|
||||
export PYTHONPATH=$site_package_path
|
||||
else
|
||||
export PYTHONPATH=$site_package_path:$PYTHONPATH
|
||||
fi
|
||||
|
||||
python3 -m pip --default-timeout=1000 install -U sphinx
|
||||
python3 -m pip --default-timeout=1000 install sphinx-theme-pd
|
||||
|
||||
./make_doc.bash ${scriptDir}
|
||||
sphinx-build -b html . _build
|
||||
rsync -a _build/ ${docDir}
|
||||
/bin/rm -fr toolDescriptions tools/shortDescriptions.rst tools/index.rst _build
|
||||
|
||||
cd "$cwd" || exit 1
|
||||
}
|
||||
|
||||
### Don't need to modify anything below this line
|
||||
|
||||
# update maven-compiler-plugin block in pom if this version changes
|
||||
REQUIRED_JAVA_VERSION=21
|
||||
|
||||
java=$(which java)
|
||||
if [ -z $java ]; then
|
||||
echo "Java executable not found in your PATH"
|
||||
exit 0
|
||||
fi
|
||||
fullVersion=$(java -version 2>&1 | head -1 | awk -F\" '{print $2}')
|
||||
version=$(echo $fullVersion | awk -F\. '{print $1}')
|
||||
if [ "$version" -lt "$REQUIRED_JAVA_VERSION" ]; then
|
||||
echo "minimum Java version required is $REQUIRED_JAVA_VERSION. Version found is $fullVersion."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ -d .git ]; then
|
||||
|
||||
date=$(git log -1 --format=%cd --date=format:%Y.%m.%d)
|
||||
rev=$(git rev-parse --verify --short HEAD)
|
||||
|
||||
if [[ $(git diff --stat) != '' ]]; then
|
||||
echo 'WARNING: the following files have not been checked in:'
|
||||
git status --short
|
||||
echo "waiting for 5 seconds ..."
|
||||
sleep 5
|
||||
rev=${rev}M
|
||||
fi
|
||||
|
||||
else
|
||||
date=$(date -u +"%Y.%m.%d")
|
||||
rev="UNVERSIONED"
|
||||
fi
|
||||
|
||||
ARCH=$(uname -s)_$(uname -m)
|
||||
pkgBase=${packageName}-${date}
|
||||
|
||||
scriptDir=${pkgBase}/scripts
|
||||
scriptDir=$(
|
||||
mkdir -p "${scriptDir}"
|
||||
cd "${scriptDir}" || exit 1
|
||||
pwd -P
|
||||
)
|
||||
libDir=${pkgBase}/lib
|
||||
libDir=$(
|
||||
mkdir -p "${libDir}"
|
||||
cd "${libDir}" || exit 1
|
||||
pwd -P
|
||||
)
|
||||
docDir=${pkgBase}/doc
|
||||
docDir=$(
|
||||
mkdir -p "${docDir}"
|
||||
cd "${docDir}" || exit 1
|
||||
pwd -P
|
||||
)
|
||||
|
||||
if [ ! -d ${scriptPath}/3rd-party/${ARCH} ]; then
|
||||
echo "third party libraries should be installed in 3rd-party/${ARCH}. Please install them and run this script again."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Build the jar file
|
||||
build_jar ${rev}
|
||||
|
||||
# create the executable scripts
|
||||
make_scripts
|
||||
|
||||
# create documentation
|
||||
make_doc
|
||||
|
||||
# Create distribution files for each architecture
|
||||
mkdir -p dist
|
||||
for arch in 3rd-party/*; do
|
||||
this_arch=$(basename "$arch")
|
||||
tarfile=./dist/${pkgBase}-${rev}_${this_arch}.tar
|
||||
tar --exclude='lib' -cf "$tarfile" "${pkgBase}"
|
||||
tar rf "$tarfile" "${pkgBase}"/lib/"${this_arch}" "${pkgBase}"/lib/${packageName}.jar "${pkgBase}"/lib/${packageName}_lib
|
||||
gzip "${tarfile}"
|
||||
echo "Created ${tarfile}.gz"
|
||||
done
|
||||
|
||||
mvn -q -Dmdep.copyPom=true dependency:copy-dependencies
|
||||
rsync -a README.md CHANGELOG.md mkPackage.bash pom.xml doc src target/dependency "${pkgBase}"-src/
|
||||
tar cfz ./dist/"${pkgBase}"-${rev}-src.tar.gz ./"${pkgBase}"-src
|
||||
echo -e "\nCreated ./dist/${pkgBase}-${rev}-src.tar.gz"
|
||||
/bin/rm -fr ./"${pkgBase}" ./"${pkgBase}"-src
|
||||
|
||||
if [ -d .git ]; then
|
||||
git restore "$srcFile"
|
||||
fi
|
||||
330
pom.xml
Normal file
@@ -0,0 +1,330 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>edu.jhuapl.ses.srn</groupId>
|
||||
<artifactId>Terrasaur</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
|
||||
<!-- Specifies organization -->
|
||||
<organization>
|
||||
<name>Johns Hopkins University Applied Physics Lab</name>
|
||||
<url>https://www.jhuapl.edu</url>
|
||||
</organization>
|
||||
|
||||
<!-- publish to surfshop -->
|
||||
<distributionManagement>
|
||||
<repository>
|
||||
<id>central</id>
|
||||
<name>surfshop-snapshots</name>
|
||||
<url>http://surfshop.jhuapl.edu:8081/artifactory/libs-snapshot-local
|
||||
</url>
|
||||
</repository>
|
||||
</distributionManagement>
|
||||
|
||||
<properties>
|
||||
<package>Terrasaur</package>
|
||||
|
||||
<!-- main method in manifest, not that important which one you pick -->
|
||||
<mainClass>terrasaur.apps.CompareOBJ</mainClass>
|
||||
|
||||
<!-- Sets proper encoding -->
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<project.reporting.outputEncoding>UTF-8
|
||||
</project.reporting.outputEncoding>
|
||||
|
||||
<maven.compiler.release>21</maven.compiler.release>
|
||||
<javafx.version>21.0.5</javafx.version>
|
||||
|
||||
<immutables.version>2.10.1</immutables.version>
|
||||
<jackfruit.version>1.1.1</jackfruit.version>
|
||||
</properties>
|
||||
|
||||
<build>
|
||||
<finalName>${package}</finalName>
|
||||
<sourceDirectory>src/main/java</sourceDirectory>
|
||||
<plugins>
|
||||
|
||||
<plugin>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>3.13.0</version>
|
||||
<configuration>
|
||||
<release>${maven.compiler.release}</release>
|
||||
<annotationProcessorPaths>
|
||||
<annotationProcessorPath>
|
||||
<groupId>org.immutables</groupId>
|
||||
<artifactId>value</artifactId>
|
||||
<version>${immutables.version}</version>
|
||||
</annotationProcessorPath>
|
||||
<annotationProcessorPath>
|
||||
<groupId>org.immutables</groupId>
|
||||
<artifactId>builder</artifactId>
|
||||
<version>${immutables.version}</version>
|
||||
</annotationProcessorPath>
|
||||
<annotationProcessorPath>
|
||||
<groupId>edu.jhuapl.ses</groupId>
|
||||
<artifactId>jackfruit</artifactId>
|
||||
<version>${jackfruit.version}</version>
|
||||
</annotationProcessorPath>
|
||||
</annotationProcessorPaths>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<version>3.8.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>copy-dependencies</id>
|
||||
<phase>prepare-package</phase>
|
||||
<goals>
|
||||
<goal>copy-dependencies</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<outputDirectory>${project.build.directory}/${package}_lib
|
||||
</outputDirectory>
|
||||
<overWriteReleases>false</overWriteReleases>
|
||||
<overWriteSnapshots>false</overWriteSnapshots>
|
||||
<overWriteIfNewer>true</overWriteIfNewer>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>3.5.2</version>
|
||||
<configuration>
|
||||
<argLine>
|
||||
-Djava.library.path=${project.basedir}/3rd-party/${env.ARCH}/spice/JNISpice/lib
|
||||
</argLine>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-enforcer-plugin</artifactId>
|
||||
<version>3.5.0</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>enforce-maven</id>
|
||||
<goals>
|
||||
<goal>enforce</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<rules>
|
||||
<requireMavenVersion>
|
||||
<version>3.6.3</version>
|
||||
</requireMavenVersion>
|
||||
</rules>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-source-plugin</artifactId>
|
||||
<version>3.3.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>attach-sources</id>
|
||||
<goals>
|
||||
<goal>jar</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-resources-plugin</artifactId>
|
||||
<version>3.3.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>copy-resources</id>
|
||||
<phase>validate</phase>
|
||||
<goals>
|
||||
<goal>copy-resources</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<outputDirectory>${project.basedir}/target/${package}
|
||||
</outputDirectory>
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>resources</directory>
|
||||
<filtering>true</filtering>
|
||||
</resource>
|
||||
</resources>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
<version>3.4.2</version>
|
||||
<configuration>
|
||||
<archive>
|
||||
<manifest>
|
||||
<addClasspath>true</addClasspath>
|
||||
<classpathPrefix>lib/</classpathPrefix>
|
||||
<mainClass>${mainClass}</mainClass>
|
||||
</manifest>
|
||||
<manifestEntries>
|
||||
<Class-Path>.</Class-Path>
|
||||
</manifestEntries>
|
||||
</archive>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>exec-maven-plugin</artifactId>
|
||||
<version>3.5.0</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>generate-sources</phase>
|
||||
<id>createVersionFile</id>
|
||||
<goals>
|
||||
<goal>exec</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<executable>${project.basedir}/src/main/bash/createVersionFile.bash
|
||||
</executable>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
<dependencies>
|
||||
<!-- javafx dependencies -->
|
||||
<dependency>
|
||||
<groupId>org.openjfx</groupId>
|
||||
<artifactId>javafx-controls</artifactId>
|
||||
<version>${javafx.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.openjfx</groupId>
|
||||
<artifactId>javafx-fxml</artifactId>
|
||||
<version>${javafx.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.openjfx</groupId>
|
||||
<artifactId>javafx-graphics</artifactId>
|
||||
<version>${javafx.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- adding all the platform libs only adds a couple more MB -->
|
||||
<dependency>
|
||||
<groupId>org.openjfx</groupId>
|
||||
<artifactId>javafx-graphics</artifactId>
|
||||
<version>${javafx.version}</version>
|
||||
<classifier>win</classifier>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.openjfx</groupId>
|
||||
<artifactId>javafx-graphics</artifactId>
|
||||
<version>${javafx.version}</version>
|
||||
<classifier>mac</classifier>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.openjfx</groupId>
|
||||
<artifactId>javafx-graphics</artifactId>
|
||||
<version>${javafx.version}</version>
|
||||
<classifier>linux</classifier>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-beanutils</groupId>
|
||||
<artifactId>commons-beanutils</artifactId>
|
||||
<version>1.10.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-cli</groupId>
|
||||
<artifactId>commons-cli</artifactId>
|
||||
<version>1.9.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-configuration2</artifactId>
|
||||
<version>2.11.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-csv</artifactId>
|
||||
<version>1.13.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
<artifactId>commons-io</artifactId>
|
||||
<version>2.18.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.beust</groupId>
|
||||
<artifactId>jcommander</artifactId>
|
||||
<version>1.72</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.code.gson</groupId>
|
||||
<artifactId>gson</artifactId>
|
||||
<version>2.12.1</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.kitware</groupId>
|
||||
<artifactId>vtk-apl</artifactId>
|
||||
<version>9.2.6-apl</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>edu.jhuapl.ses</groupId>
|
||||
<artifactId>picante</artifactId>
|
||||
<version>1.0.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>edu.jhuapl.ses</groupId>
|
||||
<artifactId>jackfruit</artifactId>
|
||||
<version>${jackfruit.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>gov.nasa.gsfc.heasarc</groupId>
|
||||
<artifactId>nom-tam-fits</artifactId>
|
||||
<version>1.20.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>gov.nasa.jpl.naif</groupId>
|
||||
<artifactId>spice</artifactId>
|
||||
<version>N0067</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-text</artifactId>
|
||||
<version>1.13.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-api</artifactId>
|
||||
<version>2.24.3</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-core</artifactId>
|
||||
<version>2.24.3</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.immutables</groupId>
|
||||
<artifactId>value</artifactId>
|
||||
<version>${immutables.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.immutables</groupId>
|
||||
<artifactId>builder</artifactId>
|
||||
<version>${immutables.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>maven_central</id>
|
||||
<name>Maven Central</name>
|
||||
<url>https://repo.maven.apache.org/maven2/</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
</project>
|
||||
60
src/main/bash/createVersionFile.bash
Executable file
@@ -0,0 +1,60 @@
|
||||
#!/bin/bash
|
||||
|
||||
# This script is run from maven. See the exec-maven-plugin block in the pom.xml file.
|
||||
|
||||
# these should be consistent with the root-level mkPackage.bash script
|
||||
package=Terrasaur
|
||||
srcFile="../java/terrasaur/utils/AppVersion.java"
|
||||
|
||||
cd $(dirname $0)
|
||||
|
||||
date=$(date -u +"%Y-%b-%d %H:%M:%S %Z")
|
||||
|
||||
rev=$(git rev-parse --verify --short HEAD)
|
||||
if [ $? -gt 0 ]; then
|
||||
lastCommit=$(date -u +"%y.%m.%d")
|
||||
rev="UNVERSIONED"
|
||||
else
|
||||
lastCommit=$(git log -1 --format=%cd --date=format:%y.%m.%d)
|
||||
rev=$(git rev-parse --verify --short HEAD)
|
||||
|
||||
if [[ $(git diff --stat) != '' ]]; then
|
||||
if [[ $(git status -s --untracked=no | grep -v pom.xml | grep -v pom.bak | grep -v .m2 | grep -v $srcFile) != '' ]]; then
|
||||
rev=${rev}M
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
mkdir -p $(dirname $srcFile)
|
||||
|
||||
touch $srcFile
|
||||
|
||||
cat <<EOF > $srcFile
|
||||
|
||||
package terrasaur.utils;
|
||||
|
||||
public class AppVersion {
|
||||
public final static String lastCommit = "$lastCommit";
|
||||
// an M at the end of gitRevision means this was built from a "dirty" git repository
|
||||
public final static String gitRevision = "$rev";
|
||||
public final static String applicationName = "$package";
|
||||
public final static String dateString = "$date";
|
||||
|
||||
private AppVersion() {}
|
||||
|
||||
/**
|
||||
* $package version $lastCommit-$rev built $date
|
||||
*/
|
||||
public static String getFullString() {
|
||||
return String.format("%s version %s-%s built %s", applicationName, lastCommit, gitRevision, dateString);
|
||||
}
|
||||
|
||||
/**
|
||||
* $package version $lastCommit-$rev
|
||||
*/
|
||||
public static String getVersionString() {
|
||||
return String.format("%s version %s-%s", applicationName, lastCommit, gitRevision);
|
||||
}
|
||||
}
|
||||
|
||||
EOF
|
||||
325
src/main/java/terrasaur/altwg/pipeline/ALTWGProductNamer.java
Normal file
@@ -0,0 +1,325 @@
|
||||
package terrasaur.altwg.pipeline;
|
||||
|
||||
import terrasaur.enums.AltwgDataType;
|
||||
import terrasaur.fits.FitsHdr.FitsHdrBuilder;
|
||||
import terrasaur.fits.HeaderTag;
|
||||
import terrasaur.utils.StringUtil;
|
||||
|
||||
public class ALTWGProductNamer implements ProductNamer {
|
||||
|
||||
public ALTWGProductNamer() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the productName and return the portion of the name corresponding to a given field. Fields
|
||||
* are assumed separated by "_" in the filename.
|
||||
*
|
||||
* @param productName
|
||||
* @param fieldNum
|
||||
* @return
|
||||
*/
|
||||
@Override
|
||||
public String getNameFrag(String productName, int fieldNum) {
|
||||
|
||||
String[] fields = productName.split("_");
|
||||
String returnField = "ERROR";
|
||||
if (fieldNum > fields.length) {
|
||||
System.out.println(
|
||||
"ERROR, field:" + fieldNum + " requested is beyond the number of fields found.");
|
||||
System.out.println("returning:" + returnField);
|
||||
} else {
|
||||
returnField = fields[fieldNum];
|
||||
}
|
||||
return returnField;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String productbaseName(
|
||||
FitsHdrBuilder hdrBuilder, AltwgDataType altwgProduct, boolean isGlobal) {
|
||||
|
||||
String gsd = "gsd";
|
||||
String dataSrc = "dataSrc";
|
||||
String productType = altwgProduct.getFileFrag();
|
||||
String prodVer = getVersion(hdrBuilder);
|
||||
|
||||
// extract ground sample distance. gsdD is in mm!
|
||||
double gsdD = gsdFromHdr(hdrBuilder);
|
||||
|
||||
int gsdI = (int) Math.round(gsdD);
|
||||
String fileUnits = "mm";
|
||||
gsd = String.format("%05d", gsdI) + fileUnits;
|
||||
|
||||
// System.out.println("gsd:" + gsd);
|
||||
// System.out.println("file units:" + fileUnits);
|
||||
|
||||
HeaderTag key = HeaderTag.DATASRC;
|
||||
if (hdrBuilder.containsKey(key)) {
|
||||
dataSrc = hdrBuilder.getCard(key).getValue().toLowerCase();
|
||||
|
||||
// check whether dataSrc needs to be modified
|
||||
dataSrc = HeaderTag.getSDP(dataSrc);
|
||||
// data source should only be 3 chars long
|
||||
if (dataSrc.length() > 3) {
|
||||
dataSrc = dataSrc.substring(0, 3);
|
||||
}
|
||||
}
|
||||
|
||||
key = HeaderTag.CLON;
|
||||
String cLon = null;
|
||||
if (hdrBuilder.containsKey(key)) {
|
||||
cLon = hdrBuilder.getCard(key).getValue();
|
||||
}
|
||||
if (cLon == null) {
|
||||
if (isGlobal) {
|
||||
// set center longitude to 0.0 if value not parsed and this is a global product
|
||||
cLon = "0.0";
|
||||
} else {
|
||||
String errMesg = "ERROR! Could not parse CLON from fits header!";
|
||||
throw new RuntimeException(errMesg);
|
||||
}
|
||||
}
|
||||
// System.out.println("clon:" + cLon);
|
||||
key = HeaderTag.CLAT;
|
||||
String cLat = null;
|
||||
if (hdrBuilder.containsKey(key)) {
|
||||
cLat = hdrBuilder.getCard(key).getValue();
|
||||
}
|
||||
if (cLat == null) {
|
||||
if (isGlobal) {
|
||||
// set center latitude to 0.0 if value not parsed and this is a global product
|
||||
cLat = "0.0";
|
||||
} else {
|
||||
String errMesg = "ERROR! Could not parse CLAT from fits header!";
|
||||
throw new RuntimeException(errMesg);
|
||||
}
|
||||
}
|
||||
// System.out.println("clat" + cLat);
|
||||
|
||||
String region = "l";
|
||||
if (isGlobal) {
|
||||
region = "g";
|
||||
}
|
||||
|
||||
String clahLon = ALTWGProductNamer.clahLon(cLat, cLon);
|
||||
|
||||
// pds likes having filenames all in the same case, so chose lowercase
|
||||
String outFile =
|
||||
ALTWGProductNamer.altwgBaseName(region, gsd, dataSrc, productType, clahLon, prodVer);
|
||||
return outFile;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the product version string. Returns initial default value if product version keyword
|
||||
* not found in builder.
|
||||
*
|
||||
* @param hdrBuilder
|
||||
*/
|
||||
@Override
|
||||
public String getVersion(FitsHdrBuilder hdrBuilder) {
|
||||
String prodVer = "prodVer";
|
||||
|
||||
// note: this has been changed to MAP_VER in the SIS
|
||||
HeaderTag key = HeaderTag.MAP_VER;
|
||||
// key = HeaderTag.PRODVERS;
|
||||
if (hdrBuilder.containsKey(key)) {
|
||||
prodVer = hdrBuilder.getCard(key).getValue();
|
||||
prodVer = prodVer.replaceAll("\\.", "");
|
||||
}
|
||||
|
||||
return prodVer;
|
||||
}
|
||||
|
||||
// Given the fields return the altwg PDS base name
|
||||
public static String altwgBaseName(
|
||||
String region, String gsd, String dataSource, String desc, String lahLon, String version) {
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
String delim = "_";
|
||||
sb.append(region);
|
||||
sb.append(delim);
|
||||
sb.append(gsd);
|
||||
sb.append(delim);
|
||||
|
||||
// data source should only be 3 characters long
|
||||
if (dataSource.length() > 3) {
|
||||
System.out.println("WARNING! dataSource:" + dataSource + " longer than 3 chars!");
|
||||
dataSource = dataSource.substring(0, 3);
|
||||
System.out.println(
|
||||
"Will set data source to:"
|
||||
+ dataSource
|
||||
+ " but"
|
||||
+ " this might NOT conform to the ALTWG naming convention!");
|
||||
}
|
||||
sb.append(dataSource);
|
||||
sb.append(delim);
|
||||
sb.append(desc);
|
||||
sb.append(delim);
|
||||
sb.append(lahLon);
|
||||
sb.append(delim);
|
||||
sb.append("v");
|
||||
|
||||
// remove '.' from version string
|
||||
version = version.replaceAll("\\.", "");
|
||||
sb.append(version);
|
||||
|
||||
// pds likes having filenames all in the same case, so chose lowercase
|
||||
String outFile = sb.toString().toLowerCase();
|
||||
|
||||
return outFile;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse center lat, lon strings and return the formatted clahLon portion of the PDS filename.
|
||||
*
|
||||
* @param clat
|
||||
* @param clon
|
||||
* @return
|
||||
*/
|
||||
public static String clahLon(String clat, String clon) {
|
||||
|
||||
// String cLon = "";
|
||||
|
||||
// remove all whitespace that may exist in the strings
|
||||
clat = clat.replaceAll("\\s+", "");
|
||||
clon = clon.replaceAll("\\s+", "");
|
||||
|
||||
double cLonD = StringUtil.parseSafeD(clon);
|
||||
double cLatD = StringUtil.parseSafeD(clat);
|
||||
|
||||
String clahLon = clahLon(cLatD, cLonD);
|
||||
return clahLon;
|
||||
}
|
||||
|
||||
public static String clahLon(double cLatD, double cLonD) {
|
||||
|
||||
String cLon = "";
|
||||
|
||||
if (cLonD == Double.NaN) {
|
||||
|
||||
// unable to parse center longitude using normal method (see V in getProductCards())
|
||||
cLon = "xxxxxx";
|
||||
|
||||
} else {
|
||||
|
||||
if (cLonD < 0) {
|
||||
// transform to 0-360
|
||||
cLonD = cLonD + 360D;
|
||||
}
|
||||
// format double to 2 significant digits
|
||||
cLon = String.format("%06.2f", cLonD);
|
||||
}
|
||||
|
||||
// remove decimal point
|
||||
cLon = cLon.replace(".", "");
|
||||
|
||||
String cLat = "";
|
||||
|
||||
// System.out.println("cLatD:" + Double.toString(cLatD));
|
||||
if (cLatD == Double.NaN) {
|
||||
|
||||
// unable to parse center latitude
|
||||
cLat = "xxxxxx";
|
||||
|
||||
} else {
|
||||
|
||||
double tol = 0.0101D;
|
||||
|
||||
// determine whether latitude is within tolerance of its rounded value.
|
||||
// if so then use rounded value
|
||||
double roundValue = Math.round(cLatD);
|
||||
double diffTol = Math.abs(roundValue - cLatD);
|
||||
if (diffTol < tol) {
|
||||
cLatD = roundValue;
|
||||
}
|
||||
String hemiSphere = (cLatD >= 0) ? "N" : "S";
|
||||
|
||||
if (cLatD < 0) {
|
||||
// remove negative sign if in southern hemisphere
|
||||
cLatD = cLatD * -1.0D;
|
||||
}
|
||||
// format cLat to 2 significant digits
|
||||
cLat = String.format("%05.2f", cLatD);
|
||||
cLat = cLat.replace(".", "");
|
||||
|
||||
// trim to length 4.
|
||||
cLat = cLat.substring(0, Math.min(cLat.length(), 4));
|
||||
cLat = cLat + hemiSphere;
|
||||
}
|
||||
|
||||
String clahLon = cLat + cLon;
|
||||
return clahLon;
|
||||
}
|
||||
|
||||
/**
|
||||
* return GSD parsed from FitsHdrBuilder. Returns 0 if valid GSD could not be parsed. GSD is in
|
||||
* mm.
|
||||
*
|
||||
* @param hdrBuilder
|
||||
* @return
|
||||
*/
|
||||
@Override
|
||||
public double gsdFromHdr(FitsHdrBuilder hdrBuilder) {
|
||||
|
||||
String gsd = "gsd";
|
||||
double gsdD = Double.NaN;
|
||||
|
||||
// extract ground sample distance using GSD first
|
||||
HeaderTag key = HeaderTag.GSD;
|
||||
if (hdrBuilder.containsKey(key)) {
|
||||
gsd = hdrBuilder.getCard(key).getValue();
|
||||
gsdD = StringUtil.parseSafeD(gsd);
|
||||
if (gsdD < 0D) {
|
||||
// keyword value not initialized
|
||||
gsdD = Double.NaN;
|
||||
System.out.println("WARNING! keyword GSD not set!");
|
||||
}
|
||||
} else {
|
||||
System.out.println("could not find " + key.toString() + " to parse GSD from.");
|
||||
}
|
||||
if (Double.isNaN(gsdD)) {
|
||||
// could not parse GSD into valid number, try GSDI
|
||||
key = HeaderTag.GSDI;
|
||||
if (hdrBuilder.containsKey(key)) {
|
||||
gsdD = StringUtil.parseSafeD(hdrBuilder.getCard(key).getValue());
|
||||
if (gsdD < 0D) {
|
||||
// keyword value not initialized
|
||||
gsdD = Double.NaN;
|
||||
System.out.println("WARNING! keyword GSDI not set!");
|
||||
}
|
||||
} else {
|
||||
System.out.println("could not find " + key.toString() + " to parse GSD from.");
|
||||
}
|
||||
if (Double.isNaN(gsdD)) {
|
||||
// still cannot parse gsd. Set to -999
|
||||
System.out.println(
|
||||
"WARNING: No valid values of GSD or GSDI could be parsed from fits header!");
|
||||
System.out.println("Setting gsd = -999");
|
||||
gsdD = -999D;
|
||||
}
|
||||
}
|
||||
|
||||
if (hdrBuilder.getCard(key).getComment().contains("[cm]")) {
|
||||
|
||||
// mandated to use mm! change the units
|
||||
gsdD = gsdD * 10.0D;
|
||||
}
|
||||
|
||||
return gsdD;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NameConvention getNameConvention() {
|
||||
return NameConvention.ALTPRODUCT;
|
||||
}
|
||||
|
||||
@Override
|
||||
/** Parse the filename using the ALTWGProduct naming convention and return the GSD value. */
|
||||
public double gsdFromFilename(String filename) {
|
||||
String[] splitStr = filename.split("_");
|
||||
// GSD is second element
|
||||
String gsd = splitStr[1];
|
||||
gsd = gsd.replace("mm", "");
|
||||
return StringUtil.parseSafeD(gsd);
|
||||
}
|
||||
}
|
||||
190
src/main/java/terrasaur/altwg/pipeline/AltwgMLNNamer.java
Normal file
@@ -0,0 +1,190 @@
|
||||
package terrasaur.altwg.pipeline;
|
||||
|
||||
import terrasaur.enums.AltwgDataType;
|
||||
import terrasaur.fits.FitsHdr.FitsHdrBuilder;
|
||||
import terrasaur.fits.HeaderTag;
|
||||
import terrasaur.utils.StringUtil;
|
||||
|
||||
public class AltwgMLNNamer implements ProductNamer {
|
||||
|
||||
public AltwgMLNNamer() {
|
||||
super();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getNameFrag(String productName, int fieldNum) {
|
||||
|
||||
String nameFrag = "";
|
||||
|
||||
return nameFrag;
|
||||
}
|
||||
|
||||
/**
|
||||
* ALTWG MLN naming convention applies only to one productType - the ALTWG NFT-MLN.
|
||||
*
|
||||
* @param hdrBuilder - contains values that are used to create the MLN according to naming
|
||||
* convention.
|
||||
* @param altwgProduct - N/A. Included here as part of the interface structure.
|
||||
* @param isGlobal - N/A. Included here as part of the interface structure.
|
||||
*/
|
||||
@Override
|
||||
public String productbaseName(
|
||||
FitsHdrBuilder hdrBuilder, AltwgDataType altwgProduct, boolean isGlobal) {
|
||||
|
||||
// initialize string fragments for NFT name. This will help identify
|
||||
// which string fragments have not been updated by the method.
|
||||
String gsd = "gsd";
|
||||
String dataSrc = "dataSrc";
|
||||
String dataSrcfile = "dataSrcFile";
|
||||
String productType = "nftdtm";
|
||||
String cLon = "cLon";
|
||||
String cLat = "cLat";
|
||||
String prodVer = "prodVer";
|
||||
String productID = "prodID";
|
||||
|
||||
// find relevant information in the hdrBuilder map.
|
||||
double gsdD = gsdFromHdr(hdrBuilder);
|
||||
int gsdI = (int) Math.round(gsdD);
|
||||
String fileUnits = "mm";
|
||||
gsd = String.format("%05d", gsdI) + fileUnits;
|
||||
// HeaderTag key = HeaderTag.GSD;
|
||||
// if (hdrBuilder.containsKey(key)) {
|
||||
// gsd = hdrBuilder.getCard(key).getValue();
|
||||
//
|
||||
// double gsdD = Double.parseDouble(gsd);
|
||||
// String fileUnits = "";
|
||||
// if (hdrBuilder.getCard(key).getComment().contains("[mm]")) {
|
||||
// fileUnits = "mm";
|
||||
// } else if (hdrBuilder.getCard(key).getComment().contains("[cm]")) {
|
||||
//
|
||||
// // mandated to use mm! change the units
|
||||
// gsdD = gsdD * 10.0D;
|
||||
// gsdI = (int) Math.round(gsdD);
|
||||
// }
|
||||
//
|
||||
// System.out.println("gsd:" + gsd);
|
||||
// System.out.println("file units:" + fileUnits);
|
||||
//
|
||||
// }
|
||||
|
||||
HeaderTag key = HeaderTag.DATASRC;
|
||||
if (hdrBuilder.containsKey(key)) {
|
||||
dataSrc = hdrBuilder.getCard(key).getValue().toLowerCase();
|
||||
// data source should only be 3 chars long
|
||||
if (dataSrc.length() > 3) {
|
||||
dataSrc = dataSrc.substring(0, 3);
|
||||
}
|
||||
}
|
||||
|
||||
key = HeaderTag.CLON;
|
||||
if (hdrBuilder.containsKey(key)) {
|
||||
cLon = hdrBuilder.getCard(key).getValue();
|
||||
}
|
||||
|
||||
key = HeaderTag.CLAT;
|
||||
if (hdrBuilder.containsKey(key)) {
|
||||
cLat = hdrBuilder.getCard(key).getValue();
|
||||
}
|
||||
|
||||
key = HeaderTag.DATASRCF;
|
||||
if (hdrBuilder.containsKey(key)) {
|
||||
dataSrcfile = hdrBuilder.getCard(key).getValue();
|
||||
}
|
||||
|
||||
key = HeaderTag.PRODVERS;
|
||||
if (hdrBuilder.containsKey(key)) {
|
||||
prodVer = hdrBuilder.getCard(key).getValue();
|
||||
prodVer = prodVer.replaceAll("\\.", "");
|
||||
}
|
||||
|
||||
// hardcode region to local
|
||||
String region = "l";
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
String delim = "_";
|
||||
sb.append(region);
|
||||
sb.append(delim);
|
||||
sb.append(gsd);
|
||||
sb.append(delim);
|
||||
sb.append(dataSrc);
|
||||
sb.append(delim);
|
||||
sb.append(productType);
|
||||
sb.append(delim);
|
||||
|
||||
/*
|
||||
* determine product ID. For OLA it is the center lat,lon For SPC it is the NFT feature id,
|
||||
* which is assumed to be the first 5 chars in DATASRC
|
||||
*/
|
||||
if (dataSrc.contains("ola")) {
|
||||
|
||||
// follow ALTWG product naming convention for center lat, lon
|
||||
productID = ALTWGProductNamer.clahLon(cLat, cLon);
|
||||
} else {
|
||||
productID = dataSrcfile.substring(0, 5);
|
||||
}
|
||||
sb.append(productID);
|
||||
sb.append(delim);
|
||||
sb.append("v");
|
||||
sb.append(prodVer);
|
||||
|
||||
return sb.toString().toLowerCase();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getVersion(FitsHdrBuilder hdrBuilder) {
|
||||
|
||||
String version = "";
|
||||
|
||||
return version;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract ground sample distance from FitsHdrBuilder. GSD is needed as part of naming convention.
|
||||
* GSD in units of mm.
|
||||
*/
|
||||
@Override
|
||||
public double gsdFromHdr(FitsHdrBuilder hdrBuilder) {
|
||||
|
||||
// find relevant information in the hdrBuilder map.
|
||||
double gsdD = Double.NaN;
|
||||
HeaderTag key = HeaderTag.GSD;
|
||||
if (hdrBuilder.containsKey(key)) {
|
||||
String gsd = hdrBuilder.getCard(key).getValue();
|
||||
|
||||
gsdD = Double.parseDouble(gsd);
|
||||
String fileUnits = "";
|
||||
if (hdrBuilder.getCard(key).getComment().contains("[mm]")) {
|
||||
fileUnits = "mm";
|
||||
} else if (hdrBuilder.getCard(key).getComment().contains("[cm]")) {
|
||||
|
||||
// mandated to use mm! change the units
|
||||
gsdD = gsdD * 10.0D;
|
||||
fileUnits = "mm";
|
||||
}
|
||||
System.out.println("gsd:" + gsd);
|
||||
System.out.println("file units:" + fileUnits);
|
||||
|
||||
} else {
|
||||
String errMesg =
|
||||
"ERROR! Could not find keyword:" + HeaderTag.GSD.toString() + " in hdrBuilder";
|
||||
throw new RuntimeException(errMesg);
|
||||
}
|
||||
|
||||
return gsdD;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NameConvention getNameConvention() {
|
||||
return NameConvention.ALTNFTMLN;
|
||||
}
|
||||
|
||||
@Override
|
||||
/** Parse the filename using the ALTWG MLN naming convention and return the GSD value. */
|
||||
public double gsdFromFilename(String filename) {
|
||||
String[] splitStr = filename.split("_");
|
||||
// GSD is second element
|
||||
String gsd = splitStr[1];
|
||||
gsd = gsd.replace("mm", "");
|
||||
return StringUtil.parseSafeD(gsd);
|
||||
}
|
||||
}
|
||||
304
src/main/java/terrasaur/altwg/pipeline/DartNamer.java
Normal file
@@ -0,0 +1,304 @@
|
||||
package terrasaur.altwg.pipeline;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import terrasaur.enums.AltwgDataType;
|
||||
import terrasaur.fits.FitsHdr.FitsHdrBuilder;
|
||||
import terrasaur.fits.HeaderTag;
|
||||
import terrasaur.utils.StringUtil;
|
||||
|
||||
/**
|
||||
* Determines product names for DART shape models and ancillary products.
|
||||
*
|
||||
* @author espirrc1
|
||||
*/
|
||||
public class DartNamer implements ProductNamer {
|
||||
|
||||
public static String getBaseName(Map<NameFields, String> nameFragments) {
|
||||
|
||||
// check to see if the map contains all the fragments needed. Throw runtimeexception if it
|
||||
// doesn't
|
||||
validateMap(nameFragments);
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
String delim = "_";
|
||||
sb.append(nameFragments.get(NameFields.REGION));
|
||||
sb.append(delim);
|
||||
sb.append(nameFragments.get(NameFields.GSD));
|
||||
sb.append(delim);
|
||||
|
||||
// data source should only be 3 characters long
|
||||
String dataSource = nameFragments.get(NameFields.DATASRC);
|
||||
if (dataSource.length() > 3) {
|
||||
System.out.println("WARNING! dataSource:" + dataSource + " longer than 3 chars!");
|
||||
dataSource = dataSource.substring(0, 3);
|
||||
System.out.println(
|
||||
"Will set data source to:"
|
||||
+ dataSource
|
||||
+ " but"
|
||||
+ " this might NOT conform to the ALTWG naming convention!");
|
||||
}
|
||||
sb.append(dataSource);
|
||||
sb.append(delim);
|
||||
|
||||
sb.append(nameFragments.get(NameFields.DATATYPE));
|
||||
sb.append(delim);
|
||||
sb.append(nameFragments.get(NameFields.TBODY));
|
||||
sb.append(delim);
|
||||
sb.append(nameFragments.get(NameFields.CLATLON));
|
||||
sb.append(delim);
|
||||
sb.append("v");
|
||||
|
||||
// remove '.' from version string
|
||||
String version = nameFragments.get(NameFields.VERSION);
|
||||
version = version.replaceAll("\\.", "");
|
||||
sb.append(version);
|
||||
|
||||
// pds likes having filenames all in the same case, so chose lowercase
|
||||
String outFile = sb.toString().toLowerCase();
|
||||
|
||||
return outFile;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the productName and return the portion of the name corresponding to a given field. Fields
|
||||
* are assumed separated by "_" in the filename.
|
||||
*
|
||||
* @param productName
|
||||
* @param fieldNum
|
||||
* @return
|
||||
*/
|
||||
@Override
|
||||
public String getNameFrag(String productName, int fieldNum) {
|
||||
|
||||
String[] fields = productName.split("_");
|
||||
String returnField = "ERROR";
|
||||
if (fieldNum > fields.length) {
|
||||
System.out.println(
|
||||
"ERROR, field:" + fieldNum + " requested is beyond the number of fields found.");
|
||||
System.out.println("returning:" + returnField);
|
||||
} else {
|
||||
returnField = fields[fieldNum];
|
||||
}
|
||||
return returnField;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String productbaseName(
|
||||
FitsHdrBuilder hdrBuilder, AltwgDataType altwgProduct, boolean isGlobal) {
|
||||
|
||||
String gsd = "gsd";
|
||||
String dataSrc = "dataSrc";
|
||||
|
||||
Map<NameFields, String> nameFragments = new HashMap<NameFields, String>();
|
||||
|
||||
// data type
|
||||
String productType = altwgProduct.getFileFrag();
|
||||
nameFragments.put(NameFields.DATATYPE, productType);
|
||||
|
||||
// product version
|
||||
String prodVer = getVersion(hdrBuilder);
|
||||
nameFragments.put(NameFields.VERSION, prodVer);
|
||||
|
||||
// extract ground sample distance. gsdD is in mm!
|
||||
double gsdD = gsdFromHdr(hdrBuilder);
|
||||
|
||||
int gsdI = (int) Math.round(gsdD);
|
||||
String fileUnits = "mm";
|
||||
gsd = String.format("%05d", gsdI) + fileUnits;
|
||||
nameFragments.put(NameFields.GSD, gsd);
|
||||
|
||||
// data source
|
||||
HeaderTag key = HeaderTag.DATASRC;
|
||||
if (hdrBuilder.containsKey(key)) {
|
||||
dataSrc = hdrBuilder.getCard(key).getValue().toLowerCase();
|
||||
// check whether dataSrc needs to be modified
|
||||
dataSrc = HeaderTag.getSDP(dataSrc);
|
||||
// data source should only be 3 chars long
|
||||
if (dataSrc.length() > 3) {
|
||||
dataSrc = dataSrc.substring(0, 3);
|
||||
}
|
||||
}
|
||||
nameFragments.put(NameFields.DATASRC, dataSrc);
|
||||
|
||||
// center lon
|
||||
key = HeaderTag.CLON;
|
||||
String cLon = null;
|
||||
if (hdrBuilder.containsKey(key)) {
|
||||
cLon = hdrBuilder.getCard(key).getValue();
|
||||
}
|
||||
if (cLon == null) {
|
||||
if (isGlobal) {
|
||||
// set center longitude to 0.0 if value not parsed and this is a global product
|
||||
cLon = "0.0";
|
||||
} else {
|
||||
String errMesg = "ERROR! Could not parse CLON from fits header!";
|
||||
throw new RuntimeException(errMesg);
|
||||
}
|
||||
}
|
||||
|
||||
// center lat
|
||||
key = HeaderTag.CLAT;
|
||||
String cLat = null;
|
||||
if (hdrBuilder.containsKey(key)) {
|
||||
cLat = hdrBuilder.getCard(key).getValue();
|
||||
}
|
||||
if (cLat == null) {
|
||||
if (isGlobal) {
|
||||
// set center latitude to 0.0 if value not parsed and this is a global product
|
||||
cLat = "0.0";
|
||||
} else {
|
||||
String errMesg = "ERROR! Could not parse CLAT from fits header!";
|
||||
throw new RuntimeException(errMesg);
|
||||
}
|
||||
}
|
||||
|
||||
String clahLon = ALTWGProductNamer.clahLon(cLat, cLon);
|
||||
nameFragments.put(NameFields.CLATLON, clahLon);
|
||||
|
||||
// region
|
||||
String region = "l";
|
||||
if (isGlobal) {
|
||||
region = "g";
|
||||
}
|
||||
nameFragments.put(NameFields.REGION, region);
|
||||
|
||||
// target body
|
||||
key = HeaderTag.TARGET;
|
||||
String tBody = "unkn";
|
||||
if (hdrBuilder.containsKey(key)) {
|
||||
tBody = hdrBuilder.getCard(key).getValue();
|
||||
tBody = getBodyStFrag(tBody);
|
||||
}
|
||||
nameFragments.put(NameFields.TBODY, tBody);
|
||||
|
||||
// pds likes having filenames all in the same case, so chose lowercase
|
||||
String outFile = DartNamer.getBaseName(nameFragments);
|
||||
return outFile;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getVersion(FitsHdrBuilder hdrBuilder) {
|
||||
|
||||
String prodVer = "prodVer";
|
||||
|
||||
// note: this has been changed to MAP_VER in the SIS
|
||||
HeaderTag key = HeaderTag.MAP_VER;
|
||||
// key = HeaderTag.PRODVERS;
|
||||
if (hdrBuilder.containsKey(key)) {
|
||||
prodVer = hdrBuilder.getCard(key).getValue();
|
||||
prodVer = prodVer.replaceAll("\\.", "");
|
||||
}
|
||||
|
||||
return prodVer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double gsdFromHdr(FitsHdrBuilder hdrBuilder) {
|
||||
|
||||
String gsd = "gsd";
|
||||
double gsdD = Double.NaN;
|
||||
|
||||
// extract ground sample distance using GSD first
|
||||
HeaderTag key = HeaderTag.GSD;
|
||||
if (hdrBuilder.containsKey(key)) {
|
||||
gsd = hdrBuilder.getCard(key).getValue();
|
||||
gsdD = StringUtil.parseSafeD(gsd);
|
||||
if (gsdD < 0D) {
|
||||
// keyword value not initialized
|
||||
gsdD = Double.NaN;
|
||||
System.out.println("WARNING! keyword GSD not set!");
|
||||
}
|
||||
} else {
|
||||
System.out.println("could not find " + key.toString() + " to parse GSD from.");
|
||||
}
|
||||
if (Double.isNaN(gsdD)) {
|
||||
// could not parse GSD into valid number, try GSDI
|
||||
key = HeaderTag.GSDI;
|
||||
if (hdrBuilder.containsKey(key)) {
|
||||
gsdD = StringUtil.parseSafeD(hdrBuilder.getCard(key).getValue());
|
||||
if (gsdD < 0D) {
|
||||
// keyword value not initialized
|
||||
gsdD = Double.NaN;
|
||||
System.out.println("WARNING! keyword GSDI not set!");
|
||||
}
|
||||
} else {
|
||||
System.out.println("could not find " + key.toString() + " to parse GSD from.");
|
||||
}
|
||||
if (Double.isNaN(gsdD)) {
|
||||
// still cannot parse gsd. Set to -999
|
||||
System.out.println(
|
||||
"WARNING: No valid values of GSD or GSDI could be parsed from fits header!");
|
||||
System.out.println("Setting gsd = -999");
|
||||
gsdD = -999D;
|
||||
}
|
||||
}
|
||||
|
||||
if (hdrBuilder.getCard(key).getComment().contains("[cm]")) {
|
||||
|
||||
// mandated to use mm! change the units
|
||||
gsdD = gsdD * 10.0D;
|
||||
}
|
||||
|
||||
return gsdD;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NameConvention getNameConvention() {
|
||||
return NameConvention.DARTPRODUCT;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse target body string to get the proper string fragment for the target body name.
|
||||
*
|
||||
* @param tBody
|
||||
* @return
|
||||
*/
|
||||
private String getBodyStFrag(String tBody) {
|
||||
|
||||
String returnFrag = tBody;
|
||||
|
||||
if (tBody.toLowerCase().contains("didy")) {
|
||||
returnFrag = "didy";
|
||||
} else {
|
||||
if (tBody.toLowerCase().contains("dimo")) {
|
||||
returnFrag = "dimo";
|
||||
} else {
|
||||
System.out.println("Could not parse target string fragment from:" + tBody);
|
||||
}
|
||||
}
|
||||
|
||||
return returnFrag;
|
||||
}
|
||||
|
||||
private static void validateMap(Map<NameFields, String> nameFragments) {
|
||||
|
||||
NameFields[] reqFields = new NameFields[7];
|
||||
reqFields[0] = NameFields.REGION;
|
||||
reqFields[1] = NameFields.GSD;
|
||||
reqFields[2] = NameFields.DATASRC;
|
||||
reqFields[3] = NameFields.DATATYPE;
|
||||
reqFields[4] = NameFields.TBODY;
|
||||
reqFields[5] = NameFields.CLATLON;
|
||||
reqFields[6] = NameFields.VERSION;
|
||||
|
||||
for (NameFields requiredField : reqFields) {
|
||||
|
||||
if (!nameFragments.containsKey(requiredField)) {
|
||||
String errMesg = "ERROR! Missing required field:" + requiredField.toString();
|
||||
throw new RuntimeException(errMesg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
/** Parse the filename using the DART naming convention and return the GSD value. */
|
||||
public double gsdFromFilename(String filename) {
|
||||
|
||||
String[] splitStr = filename.split("_");
|
||||
// GSD is second element
|
||||
String gsd = splitStr[1];
|
||||
gsd = gsd.replace("mm", "");
|
||||
return StringUtil.parseSafeD(gsd);
|
||||
}
|
||||
}
|
||||
26
src/main/java/terrasaur/altwg/pipeline/NameConvention.java
Normal file
@@ -0,0 +1,26 @@
|
||||
package terrasaur.altwg.pipeline;
|
||||
|
||||
/**
|
||||
* Enum to store the different types of naming conventions.
|
||||
*
|
||||
* @author espirrc1
|
||||
*
|
||||
*/
|
||||
public enum NameConvention {
|
||||
|
||||
ALTPRODUCT, ALTNFTMLN, DARTPRODUCT, NOMATCH, NONEUSED;
|
||||
|
||||
public static NameConvention parseNameConvention(String name) {
|
||||
for (NameConvention nameConvention : values()) {
|
||||
if (nameConvention.toString().toLowerCase().equals(name.toLowerCase())) {
|
||||
System.out.println("parsed naming convention:" + nameConvention.toString());
|
||||
return nameConvention;
|
||||
}
|
||||
}
|
||||
NameConvention nameConvention = NameConvention.NOMATCH;
|
||||
System.out
|
||||
.println("NameConvention.parseNameConvention()" + " could not parse naming convention:"
|
||||
+ name + ". Returning:" + nameConvention.toString());
|
||||
return nameConvention;
|
||||
}
|
||||
}
|
||||
13
src/main/java/terrasaur/altwg/pipeline/NameFields.java
Normal file
@@ -0,0 +1,13 @@
|
||||
package terrasaur.altwg.pipeline;
|
||||
|
||||
/**
|
||||
* Enum to describe the different parts of the product name. Used by concrete classes implementing
|
||||
* ProductNamer.
|
||||
*
|
||||
* @author espirrc1
|
||||
*
|
||||
*/
|
||||
public enum NameFields {
|
||||
|
||||
GSD, DATATYPE, VERSION, DATASRC, CLATLON, REGION, TBODY;
|
||||
}
|
||||
111
src/main/java/terrasaur/altwg/pipeline/NamingFactory.java
Normal file
@@ -0,0 +1,111 @@
|
||||
package terrasaur.altwg.pipeline;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Map;
|
||||
import terrasaur.enums.AltwgDataType;
|
||||
import terrasaur.fits.AltPipelnEnum;
|
||||
import terrasaur.fits.FitsHdr.FitsHdrBuilder;
|
||||
|
||||
/**
|
||||
* Factory class for returning concrete classes that implement the ProductNamer interface.
|
||||
*
|
||||
* @author espirrc1
|
||||
*/
|
||||
public class NamingFactory {
|
||||
|
||||
public static ProductNamer getNamingConvention(NameConvention namingConvention) {
|
||||
|
||||
switch (namingConvention) {
|
||||
case ALTPRODUCT:
|
||||
return new ALTWGProductNamer();
|
||||
|
||||
case ALTNFTMLN:
|
||||
return new AltwgMLNNamer();
|
||||
|
||||
case DARTPRODUCT:
|
||||
return new DartNamer();
|
||||
|
||||
default:
|
||||
System.err.println(
|
||||
"ERROR! Naming convention:" + namingConvention.toString() + " not supported!");
|
||||
throw new RuntimeException();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse for keyword in pipeline config file that specifies what naming convention to use.
|
||||
*
|
||||
* @param pipeConfig
|
||||
* @return
|
||||
*/
|
||||
public static ProductNamer parseNamingConvention(
|
||||
Map<AltPipelnEnum, String> pipeConfig, boolean verbose) {
|
||||
|
||||
ProductNamer productNamer = null;
|
||||
|
||||
if ((pipeConfig.containsKey(AltPipelnEnum.NAMINGCONVENTION))) {
|
||||
String value = pipeConfig.get(AltPipelnEnum.NAMINGCONVENTION);
|
||||
productNamer = parseNamingConvention(value);
|
||||
} else {
|
||||
String errMesg = "ERROR! Naming convention should have been defined in pipeConfig!";
|
||||
throw new RuntimeException(errMesg);
|
||||
}
|
||||
|
||||
return productNamer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse string to determine the naming convention to use. Naming convention supplied by
|
||||
* ProductNamer interface.
|
||||
*
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
public static ProductNamer parseNamingConvention(String value) {
|
||||
|
||||
if (value.length() < 1) {
|
||||
String errMesg = "ERROR! Cannot pass empty string to NamingFactory.parseNamingConvention!";
|
||||
throw new RuntimeException(errMesg);
|
||||
}
|
||||
NameConvention nameConvention = NameConvention.parseNameConvention(value);
|
||||
return NamingFactory.getNamingConvention(nameConvention);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given the naming convention, hdrBuilder, productType, and original output file, return the
|
||||
* renamed output file and cross reference file. Output is returned as a File[] array where
|
||||
* array[0] is the output basename, array[1] is the cross-reference file. If no naming convention
|
||||
* is specified (NONEUSED) then array[0] is the same as outfile, array[1] is null.
|
||||
*
|
||||
* @param namingConvention
|
||||
* @param hdrBuilder
|
||||
* @param productType
|
||||
* @param isGlobal
|
||||
* @param outfile - proposed output filename. If Naming convention results in a renamed OBJ then
|
||||
* this is not used. If no naming convention specified then outputFiles[0] = outfile.
|
||||
* @return
|
||||
*/
|
||||
public static File[] getBaseNameAndCrossRef(
|
||||
NameConvention namingConvention,
|
||||
FitsHdrBuilder hdrBuilder,
|
||||
AltwgDataType productType,
|
||||
boolean isGlobal,
|
||||
String outfile) {
|
||||
|
||||
File[] outputFiles = new File[2];
|
||||
|
||||
// default to no renaming.
|
||||
File crossrefFile = null;
|
||||
String basename = outfile;
|
||||
|
||||
if (namingConvention != NameConvention.NONEUSED) {
|
||||
ProductNamer productNamer = NamingFactory.getNamingConvention(namingConvention);
|
||||
basename = productNamer.productbaseName(hdrBuilder, productType, isGlobal);
|
||||
crossrefFile = new File(outfile + ".crf");
|
||||
}
|
||||
|
||||
outputFiles[0] = new File(basename);
|
||||
outputFiles[1] = crossrefFile;
|
||||
return outputFiles;
|
||||
}
|
||||
}
|
||||
21
src/main/java/terrasaur/altwg/pipeline/ProductNamer.java
Normal file
@@ -0,0 +1,21 @@
|
||||
package terrasaur.altwg.pipeline;
|
||||
|
||||
import terrasaur.enums.AltwgDataType;
|
||||
import terrasaur.fits.FitsHdr.FitsHdrBuilder;
|
||||
|
||||
public interface ProductNamer {
|
||||
|
||||
public String getNameFrag(String productName, int fieldNum);
|
||||
|
||||
public String productbaseName(FitsHdrBuilder hdrBuilder, AltwgDataType altwgProduct,
|
||||
boolean isGlobal);
|
||||
|
||||
public String getVersion(FitsHdrBuilder hdrBuilder);
|
||||
|
||||
public double gsdFromHdr(FitsHdrBuilder hdrBuilder);
|
||||
|
||||
public NameConvention getNameConvention();
|
||||
|
||||
public double gsdFromFilename(String filename);
|
||||
|
||||
}
|
||||
@@ -0,0 +1,289 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.*;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import terrasaur.smallBodyModel.BoundingBox;
|
||||
import terrasaur.smallBodyModel.SmallBodyModel;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.Log4j2Configurator;
|
||||
import terrasaur.utils.NativeLibraryLoader;
|
||||
import terrasaur.utils.PolyDataUtil;
|
||||
import spice.basic.Plane;
|
||||
import spice.basic.Vector3;
|
||||
import vtk.vtkGenericCell;
|
||||
import vtk.vtkPoints;
|
||||
import vtk.vtkPolyData;
|
||||
import vtk.vtksbCellLocator;
|
||||
|
||||
/**
|
||||
* AdjustShapeModelToOtherShapeModel program. See the usage string for more information about this
|
||||
* program.
|
||||
*
|
||||
* @author Eli Kahn
|
||||
* @version 1.0
|
||||
*/
|
||||
public class AdjustShapeModelToOtherShapeModel implements TerrasaurTool {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Adjust vertices of one shape model to lie on the surface of another.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
|
||||
String header =
|
||||
"""
|
||||
\n
|
||||
This program takes 2 shape models in OBJ format and tries to adjust
|
||||
to vertices of the first shape model so they lie on the surface of the
|
||||
second shape model. It does this by shooting a ray starting from the origin
|
||||
in the direction of each point of the first model into the second model and
|
||||
then changes the point of the first model to the intersection point.""";
|
||||
return TerrasaurTool.super.fullDescription(options, header, "");
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(
|
||||
Option.builder("from")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"path to first shape model in OBJ format which will get shifted to the second shape model")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("to")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"path to second shape model in OBJ format which the first shape model will try to match to")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("output")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"path to adjusted shape model in OBJ format generated by this program by shifting first to second")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("filelist")
|
||||
.desc(
|
||||
"""
|
||||
If specified then the second required argument to this program,
|
||||
"to" is a file containing a list of OBJ files to match to.
|
||||
In this situation the ray is shot into each of the shape models in this
|
||||
list and any intersection points are averaged together to produce the
|
||||
final intersection point. Note that any individual shape model in this
|
||||
list may be only a piece of the the complete shape model (e.g. a mapola).
|
||||
However, the global shape model formed when all these pieces are
|
||||
combined together, may not have any holes or gaps.""")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("fit-plane-radius")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"""
|
||||
If present, find a local normal at each point in the first shape
|
||||
model by fitting a plane to all points within the specified radius.
|
||||
Use this normal to adjust the point to the second shape model rather
|
||||
than the radial vector.""")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("local")
|
||||
.desc(
|
||||
"""
|
||||
Use when adjusting a local OBJ file to another. The best fit plane to the
|
||||
first shape model is used to adjust the vertices rather than the radial vector
|
||||
for each point.
|
||||
""")
|
||||
.build());
|
||||
return options;
|
||||
}
|
||||
|
||||
static Vector3D computeMeanPoint(List<Vector3D> points) {
|
||||
Vector3D meanPoint = new Vector3D(0., 0., 0.);
|
||||
for (Vector3D point : points) meanPoint = meanPoint.add(point);
|
||||
meanPoint = meanPoint.scalarMultiply(1. / points.size());
|
||||
return meanPoint;
|
||||
}
|
||||
|
||||
public static void adjustShapeModelToOtherShapeModel(
|
||||
vtkPolyData frompolydata,
|
||||
ArrayList<vtkPolyData> topolydata,
|
||||
double planeRadius,
|
||||
boolean localModel)
|
||||
throws Exception {
|
||||
vtkPoints points = frompolydata.GetPoints();
|
||||
long numberPoints = frompolydata.GetNumberOfPoints();
|
||||
|
||||
boolean fitPlane = (planeRadius > 0);
|
||||
SmallBodyModel sbModel = new SmallBodyModel(frompolydata);
|
||||
double diagonalLength = new BoundingBox(frompolydata.GetBounds()).getDiagonalLength();
|
||||
|
||||
ArrayList<vtksbCellLocator> cellLocators = new ArrayList<>();
|
||||
for (vtkPolyData polydata : topolydata) {
|
||||
vtksbCellLocator cellLocator = new vtksbCellLocator();
|
||||
cellLocator.SetDataSet(polydata);
|
||||
cellLocator.CacheCellBoundsOn();
|
||||
cellLocator.AutomaticOn();
|
||||
cellLocator.BuildLocator();
|
||||
cellLocators.add(cellLocator);
|
||||
}
|
||||
|
||||
vtkGenericCell cell = new vtkGenericCell();
|
||||
double tol = 1e-6;
|
||||
double[] t = new double[1];
|
||||
double[] pcoords = new double[3];
|
||||
int[] subId = new int[1];
|
||||
long[] cell_id = new long[1];
|
||||
|
||||
double[] localNormal = null;
|
||||
if (localModel) {
|
||||
// fit a plane to the local model and check that the normal points outward
|
||||
Plane localPlane = PolyDataUtil.fitPlaneToPolyData(frompolydata);
|
||||
Vector3 localNormalVector = localPlane.getNormal();
|
||||
if (localNormalVector.dot(localPlane.getPoint()) < 0)
|
||||
localNormalVector = localNormalVector.negate();
|
||||
localNormal = localNormalVector.toArray();
|
||||
}
|
||||
|
||||
double[] p = new double[3];
|
||||
Vector3D origin = new Vector3D(0., 0., 0.);
|
||||
for (int i = 0; i < numberPoints; ++i) {
|
||||
points.GetPoint(i, p);
|
||||
|
||||
Vector3D lookDir;
|
||||
|
||||
if (fitPlane) {
|
||||
// fit a plane to the local area
|
||||
System.arraycopy(p, 0, origin.toArray(), 0, 3);
|
||||
lookDir = new Vector3D(sbModel.getNormalAtPoint(p, planeRadius)).normalize();
|
||||
} else if (localModel) {
|
||||
System.arraycopy(p, 0, origin.toArray(), 0, 3);
|
||||
lookDir = new Vector3D(localNormal).normalize();
|
||||
} else {
|
||||
// use radial vector
|
||||
lookDir = new Vector3D(p).normalize();
|
||||
}
|
||||
|
||||
Vector3D lookPt = lookDir.scalarMultiply(diagonalLength);
|
||||
lookPt = lookPt.add(origin);
|
||||
|
||||
List<Vector3D> intersections = new ArrayList<>();
|
||||
for (vtksbCellLocator cellLocator : cellLocators) {
|
||||
double[] intersectPoint = new double[3];
|
||||
|
||||
// trace ray from the lookPt to the origin - first intersection is the farthest intersection
|
||||
// from the origin
|
||||
int result =
|
||||
cellLocator.IntersectWithLine(
|
||||
lookPt.toArray(),
|
||||
origin.toArray(),
|
||||
tol,
|
||||
t,
|
||||
intersectPoint,
|
||||
pcoords,
|
||||
subId,
|
||||
cell_id,
|
||||
cell);
|
||||
Vector3D intersectVector = new Vector3D(intersectPoint);
|
||||
|
||||
if (fitPlane || localModel) {
|
||||
// NOTE: result should return 1 in case of intersection but doesn't sometimes.
|
||||
// Use the norm of intersection point to test for intersection instead.
|
||||
|
||||
NavigableMap<Double, Vector3D> pointsMap = new TreeMap<>();
|
||||
if (intersectVector.getNorm() > 0) {
|
||||
pointsMap.put(origin.subtract(intersectVector).getNorm(), intersectVector);
|
||||
}
|
||||
|
||||
lookPt = lookDir.scalarMultiply(-diagonalLength);
|
||||
lookPt = lookPt.add(origin);
|
||||
result =
|
||||
cellLocator.IntersectWithLine(
|
||||
lookPt.toArray(),
|
||||
origin.toArray(),
|
||||
tol,
|
||||
t,
|
||||
intersectPoint,
|
||||
pcoords,
|
||||
subId,
|
||||
cell_id,
|
||||
cell);
|
||||
|
||||
intersectVector = new Vector3D(intersectPoint);
|
||||
if (intersectVector.getNorm() > 0) {
|
||||
pointsMap.put(origin.subtract(intersectVector).getNorm(), intersectVector);
|
||||
}
|
||||
|
||||
if (!pointsMap.isEmpty()) intersections.add(pointsMap.get(pointsMap.firstKey()));
|
||||
} else {
|
||||
if (result > 0) intersections.add(intersectVector);
|
||||
}
|
||||
}
|
||||
|
||||
if (intersections.isEmpty()) throw new Exception("Error: no intersections at all");
|
||||
|
||||
Vector3D meanIntersectionPoint = computeMeanPoint(intersections);
|
||||
points.SetPoint(i, meanIntersectionPoint.toArray());
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
TerrasaurTool defaultOBJ = new AdjustShapeModelToOtherShapeModel();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
boolean loadListFromFile = cl.hasOption("filelist");
|
||||
double planeRadius = Double.parseDouble(cl.getOptionValue("fit-plane-radius", "-1"));
|
||||
boolean localModel = cl.hasOption("local");
|
||||
|
||||
NativeLibraryLoader.loadVtkLibraries();
|
||||
|
||||
String fromfile = cl.getOptionValue("from");
|
||||
String tofile = cl.getOptionValue("to");
|
||||
String outfile = cl.getOptionValue("output");
|
||||
|
||||
Log4j2Configurator.getInstance();
|
||||
logger.info("loading <from-obj-file>: {}", fromfile);
|
||||
vtkPolyData frompolydata = PolyDataUtil.loadShapeModelAndComputeNormals(fromfile);
|
||||
|
||||
ArrayList<vtkPolyData> topolydata = new ArrayList<>();
|
||||
if (loadListFromFile) {
|
||||
List<String> lines = FileUtils.readLines(new File(tofile), Charset.defaultCharset());
|
||||
for (String file : lines) {
|
||||
|
||||
// checking length prevents trying to load an empty line, such as the
|
||||
// last line of the file.
|
||||
if (file.length() > 1) {
|
||||
logger.info("loading <to-obj-file>: {}", file);
|
||||
topolydata.add(PolyDataUtil.loadShapeModelAndComputeNormals(file));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
logger.info("loading <to-obj-file>: {}", tofile);
|
||||
topolydata.add(PolyDataUtil.loadShapeModelAndComputeNormals(tofile));
|
||||
}
|
||||
|
||||
adjustShapeModelToOtherShapeModel(frompolydata, topolydata, planeRadius, localModel);
|
||||
|
||||
PolyDataUtil.saveShapeModelAsOBJ(frompolydata, outfile);
|
||||
|
||||
logger.info("wrote {}", outfile);
|
||||
}
|
||||
}
|
||||
142
src/main/java/terrasaur/apps/AppendOBJ.java
Normal file
@@ -0,0 +1,142 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.util.Map;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.spi.StandardLevel;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.NativeLibraryLoader;
|
||||
import terrasaur.utils.PolyDataUtil;
|
||||
import vtk.vtkAppendPolyData;
|
||||
import vtk.vtkObjectBase;
|
||||
import vtk.vtkPolyData;
|
||||
|
||||
/**
|
||||
* AppendOBJ program. See the usage string for more information about this program.
|
||||
*
|
||||
* @author Eli Kahn
|
||||
* @version 1.0
|
||||
*/
|
||||
public class AppendOBJ implements TerrasaurTool {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Combine multiple shape files (OBJ or VTK format) into one.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
String header = "This program combines input shape models into a single shape model.";
|
||||
return TerrasaurTool.super.fullDescription(options, header, "");
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(
|
||||
Option.builder("logFile")
|
||||
.hasArg()
|
||||
.desc("If present, save screen output to log file.")
|
||||
.build());
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (StandardLevel l : StandardLevel.values()) sb.append(String.format("%s ", l.name()));
|
||||
options.addOption(
|
||||
Option.builder("logLevel")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"If present, print messages above selected priority. Valid values are "
|
||||
+ sb.toString().trim()
|
||||
+ ". Default is INFO.")
|
||||
.build());
|
||||
|
||||
options.addOption(
|
||||
Option.builder("boundary")
|
||||
.desc("Only save out boundary. This option implies -vtk.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("decimate")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Reduce the number of facets in the output shape model. The argument should be between 0 and 1. "
|
||||
+ "For example, if a model has 100 facets and <arg> is 0.90, "
|
||||
+ "there will be approximately 10 facets after the decimation.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("input")
|
||||
.required()
|
||||
.hasArgs()
|
||||
.desc(
|
||||
"input file(s) to read. Format is derived from the allowed extension: "
|
||||
+ "icq, llr, obj, pds, plt, ply, stl, or vtk. Multiple files can be specified "
|
||||
+ "with a single -input option, separated by whitespace. Alternatively, you may "
|
||||
+ "specify -input multiple times.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("output").required().hasArg().desc("output file to write.").build());
|
||||
options.addOption(
|
||||
Option.builder("vtk").desc("Save output file in VTK format rather than OBJ.").build());
|
||||
return options;
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
TerrasaurTool defaultOBJ = new AppendOBJ();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
boolean boundaryOnly = cl.hasOption("boundary");
|
||||
boolean vtkFormat = boundaryOnly || cl.hasOption("vtk");
|
||||
boolean decimate = cl.hasOption("decimate");
|
||||
double decimationPercentage =
|
||||
decimate ? Double.parseDouble(cl.getOptionValue("decimate")) : 1.0;
|
||||
|
||||
NativeLibraryLoader.loadVtkLibraries();
|
||||
|
||||
String outfile = cl.getOptionValue("output");
|
||||
String[] infiles = cl.getOptionValues("input");
|
||||
|
||||
vtkAppendPolyData append = new vtkAppendPolyData();
|
||||
append.UserManagedInputsOn();
|
||||
append.SetNumberOfInputs(infiles.length);
|
||||
|
||||
for (int i = 0; i < infiles.length; ++i) {
|
||||
logger.info("loading {} {} / {}", infiles[i], i + 1, infiles.length);
|
||||
|
||||
vtkPolyData polydata = PolyDataUtil.loadShapeModel(infiles[i]);
|
||||
|
||||
if (polydata == null) {
|
||||
logger.warn("Cannot load {}", infiles[i]);
|
||||
} else {
|
||||
if (boundaryOnly) {
|
||||
vtkPolyData boundary = PolyDataUtil.getBoundary(polydata);
|
||||
boundary.GetCellData().SetScalars(null);
|
||||
polydata.DeepCopy(boundary);
|
||||
}
|
||||
|
||||
append.SetInputDataByNumber(i, polydata);
|
||||
}
|
||||
System.gc();
|
||||
vtkObjectBase.JAVA_OBJECT_MANAGER.gc(false);
|
||||
}
|
||||
|
||||
append.Update();
|
||||
|
||||
vtkPolyData outputShape = append.GetOutput();
|
||||
if (decimate) PolyDataUtil.decimatePolyData(outputShape, decimationPercentage);
|
||||
|
||||
if (vtkFormat) PolyDataUtil.saveShapeModelAsVTK(outputShape, outfile);
|
||||
else PolyDataUtil.saveShapeModelAsOBJ(append.GetOutput(), outfile);
|
||||
|
||||
logger.info("Wrote " + outfile);
|
||||
}
|
||||
}
|
||||
88
src/main/java/terrasaur/apps/BatchSubmit.java
Normal file
@@ -0,0 +1,88 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.batch.BatchSubmitFactory;
|
||||
import terrasaur.utils.batch.BatchSubmitI;
|
||||
import terrasaur.utils.batch.BatchType;
|
||||
import terrasaur.utils.batch.GridType;
|
||||
|
||||
public class BatchSubmit implements TerrasaurTool {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Run a command on a cluster.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
|
||||
String footer = "\nRun a command on a cluster.\n";
|
||||
|
||||
return TerrasaurTool.super.fullDescription(options, "", footer);
|
||||
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(
|
||||
Option.builder("command")
|
||||
.required()
|
||||
.hasArgs()
|
||||
.desc("Required. Command(s) to run.")
|
||||
.build());
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (GridType type : GridType.values()) sb.append(String.format("%s ", type.name()));
|
||||
options.addOption(
|
||||
Option.builder("gridType")
|
||||
.hasArg()
|
||||
.desc("Grid type. Valid values are " + sb + ". Default is LOCAL.")
|
||||
.build());
|
||||
|
||||
options.addOption(
|
||||
Option.builder("workingDir")
|
||||
.hasArg()
|
||||
.desc("Working directory to run command. Default is current directory.")
|
||||
.build());
|
||||
return options;
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
TerrasaurTool defaultOBJ = new BatchSubmit();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
List<String> cmdList = Arrays.asList(cl.getOptionValues("command"));
|
||||
BatchType batchType = BatchType.GRID_ENGINE;
|
||||
GridType gridType =
|
||||
cl.hasOption("gridType") ? GridType.valueOf(cl.getOptionValue("gridType")) : GridType.LOCAL;
|
||||
|
||||
BatchSubmitI submitter = BatchSubmitFactory.getBatchSubmit(cmdList, batchType, gridType);
|
||||
String workingDir = "";
|
||||
try {
|
||||
submitter.runBatchSubmitinDir(workingDir);
|
||||
} catch (InterruptedException | IOException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
295
src/main/java/terrasaur/apps/CKFromSumFile.java
Normal file
@@ -0,0 +1,295 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.nio.charset.Charset;
|
||||
import java.time.ZoneId;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NavigableMap;
|
||||
import java.util.TreeMap;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.configuration2.PropertiesConfiguration;
|
||||
import org.apache.commons.configuration2.builder.fluent.Configurations;
|
||||
import org.apache.commons.configuration2.ex.ConfigurationException;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import org.apache.commons.text.WordUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.spi.StandardLevel;
|
||||
import spice.basic.KernelDatabase;
|
||||
import spice.basic.Matrix33;
|
||||
import spice.basic.ReferenceFrame;
|
||||
import spice.basic.SpiceErrorException;
|
||||
import spice.basic.SpiceException;
|
||||
import spice.basic.SpiceQuaternion;
|
||||
import spice.basic.TDBTime;
|
||||
import spice.basic.Vector3;
|
||||
import terrasaur.config.CKFromSumFileConfig;
|
||||
import terrasaur.config.CKFromSumFileConfigFactory;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.*;
|
||||
import terrasaur.utils.math.MathConversions;
|
||||
|
||||
public class CKFromSumFile implements TerrasaurTool {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger();
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Create a CK from a list of sumfiles.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
String footer = "Create a CK from a list of sumfiles.";
|
||||
return TerrasaurTool.super.fullDescription(options, "", footer);
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(Option.builder("config").required().hasArg()
|
||||
.desc("Required. Name of configuration file.").build());
|
||||
options.addOption(Option.builder("dumpConfig").hasArg()
|
||||
.desc("Write out an example configuration to the named file.").build());
|
||||
options.addOption(Option.builder("logFile").hasArg()
|
||||
.desc("If present, save screen output to log file.").build());
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (StandardLevel l : StandardLevel.values())
|
||||
sb.append(String.format("%s ", l.name()));
|
||||
options.addOption(Option.builder("logLevel").hasArg()
|
||||
.desc("If present, print messages above selected priority. Valid values are "
|
||||
+ sb.toString().trim() + ". Default is INFO.")
|
||||
.build());
|
||||
options.addOption(Option.builder("sumFile").hasArg().required().desc("""
|
||||
Required. File listing sumfiles to read. This is a text file, one per line.
|
||||
Lines starting with # are ignored.
|
||||
|
||||
Example:
|
||||
|
||||
D717506120G0.SUM
|
||||
D717506126G0.SUM
|
||||
D717506127G0.SUM
|
||||
D717506128G0.SUM
|
||||
D717506129G0.SUM
|
||||
D717506131G0.SUM
|
||||
# This is a comment
|
||||
D717506132G0.SUM
|
||||
""").build());
|
||||
return options;
|
||||
}
|
||||
|
||||
private final CKFromSumFileConfig config;
|
||||
private final NavigableMap<SumFile, String> sumFiles;
|
||||
|
||||
private CKFromSumFile(){config=null;sumFiles=null;}
|
||||
|
||||
public CKFromSumFile(CKFromSumFileConfig config, NavigableMap<SumFile, String> sumFiles) {
|
||||
this.config = config;
|
||||
this.sumFiles = sumFiles;
|
||||
}
|
||||
|
||||
public String writeMSOPCKFiles(String basename, List<String> comments) throws SpiceException {
|
||||
|
||||
ReferenceFrame instrFrame = new ReferenceFrame(config.instrumentFrameName());
|
||||
ReferenceFrame scFrame = new ReferenceFrame(config.spacecraftFrame());
|
||||
ReferenceFrame j2000 = new ReferenceFrame("J2000");
|
||||
ReferenceFrame bodyFixed = new ReferenceFrame(config.bodyFrame());
|
||||
|
||||
ReferenceFrame ref = config.J2000() ? j2000 : bodyFixed;
|
||||
|
||||
logger.debug("Body fixed frame: {}", bodyFixed.getName());
|
||||
logger.debug("Instrument frame: {}", instrFrame.getName());
|
||||
logger.debug("Spacecraft frame: {}", scFrame.getName());
|
||||
logger.debug(" Reference frame: {}", ref.getName());
|
||||
|
||||
File commentFile = new File(basename + "-comments.txt");
|
||||
if (commentFile.exists())
|
||||
if (!commentFile.delete())
|
||||
logger.error("{} exists but cannot be deleted!", commentFile.getPath());
|
||||
|
||||
String setupFile = basename + ".setup";
|
||||
String inputFile = basename + ".inp";
|
||||
|
||||
try (PrintWriter pw = new PrintWriter(commentFile)) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
DateTimeFormatter dtf = DateTimeFormatter.ofPattern("uuuu-MM-dd HH:mm:ss z");
|
||||
ZonedDateTime now = ZonedDateTime.now(ZoneId.of("UTC"));
|
||||
|
||||
sb.append("This CK was created on ").append(dtf.format(now)).append(" from the following sumFiles:\n");
|
||||
for (SumFile sumFile : sumFiles.keySet()) {
|
||||
sb.append(String.format("\t%s %s\n", sumFile.utcString(), sumFiles.get(sumFile)));
|
||||
}
|
||||
sb.append("\n");
|
||||
sb.append("providing the orientation of ").append(scFrame.getName()).append(" with respect to ").append(config.J2000() ? "J2000" : bodyFixed.getName()).append(". ");
|
||||
double first = new TDBTime(sumFiles.firstKey().utcString()).getTDBSeconds();
|
||||
double last = new TDBTime(sumFiles.lastKey().utcString()).getTDBSeconds() + config.extend();
|
||||
sb.append("The coverage period is ").append(new TDBTime(first).toUTCString("ISOC", 3)).append(" to ").append(new TDBTime(last).toUTCString("ISOC", 3)).append(" UTC.");
|
||||
|
||||
String allComments = sb.toString();
|
||||
for (String comment : allComments.split("\\r?\\n"))
|
||||
pw.println(WordUtils.wrap(comment, 80));
|
||||
} catch (FileNotFoundException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
|
||||
Map<String, String> map = new TreeMap<>();
|
||||
map.put("LSK_FILE_NAME", "'" + config.lsk() + "'");
|
||||
map.put("SCLK_FILE_NAME", "'" + config.sclk() + "'");
|
||||
map.put("CK_TYPE", "3");
|
||||
map.put("COMMENTS_FILE_NAME", String.format("'%s'", commentFile.getPath()));
|
||||
map.put("INSTRUMENT_ID", String.format("%d", scFrame.getIDCode()));
|
||||
map.put("REFERENCE_FRAME_NAME",
|
||||
String.format("'%s'", config.J2000() ? "J2000" : bodyFixed.getName()));
|
||||
if (!config.fk().isEmpty())
|
||||
map.put("FRAMES_FILE_NAME", "'" + config.fk() + "'");
|
||||
map.put("ANGULAR_RATE_PRESENT", "'MAKE UP/NO AVERAGING'");
|
||||
map.put("INPUT_TIME_TYPE", "'UTC'");
|
||||
map.put("INPUT_DATA_TYPE", "'SPICE QUATERNIONS'");
|
||||
map.put("PRODUCER_ID", "'Hari.Nair@jhuapl.edu'");
|
||||
|
||||
try (PrintWriter pw = new PrintWriter(setupFile)) {
|
||||
pw.println("\\begindata");
|
||||
for (String key : map.keySet()) {
|
||||
pw.printf("%s = %s\n", key, map.get(key));
|
||||
}
|
||||
} catch (FileNotFoundException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
|
||||
NavigableMap<Double, SpiceQuaternion> attitudeMap = new TreeMap<>();
|
||||
for (SumFile s : sumFiles.keySet()) {
|
||||
TDBTime t = new TDBTime(s.utcString());
|
||||
|
||||
Vector3[] rows = new Vector3[3];
|
||||
rows[0] = MathConversions.toVector3(s.cx());
|
||||
rows[1] = MathConversions.toVector3(s.cy());
|
||||
rows[2] = MathConversions.toVector3(s.cz());
|
||||
|
||||
Vector3 row0 = rows[Math.abs(config.flipX()) - 1];
|
||||
Vector3 row1 = rows[Math.abs(config.flipY()) - 1];
|
||||
Vector3 row2 = rows[Math.abs(config.flipZ()) - 1];
|
||||
|
||||
if (config.flipX() < 0)
|
||||
row0 = row0.negate();
|
||||
if (config.flipY() < 0)
|
||||
row1 = row1.negate();
|
||||
if (config.flipZ() < 0)
|
||||
row2 = row2.negate();
|
||||
|
||||
Matrix33 refToInstr = new Matrix33(row0, row1, row2);
|
||||
|
||||
if (config.J2000()) {
|
||||
Matrix33 j2000ToBodyFixed = j2000.getPositionTransformation(bodyFixed, t);
|
||||
refToInstr = refToInstr.mxm(j2000ToBodyFixed);
|
||||
}
|
||||
|
||||
Matrix33 instrToSc = instrFrame.getPositionTransformation(scFrame, t);
|
||||
Matrix33 refToSc = instrToSc.mxm(refToInstr);
|
||||
|
||||
SpiceQuaternion q = new SpiceQuaternion(refToSc);
|
||||
attitudeMap.put(t.getTDBSeconds(), q);
|
||||
}
|
||||
|
||||
if (config.extend() > 0) {
|
||||
var lastEntry = attitudeMap.lastEntry();
|
||||
attitudeMap.put(lastEntry.getKey() + config.extend(), lastEntry.getValue());
|
||||
}
|
||||
|
||||
try (PrintWriter pw = new PrintWriter(new FileWriter(inputFile))) {
|
||||
for (double t : attitudeMap.keySet()) {
|
||||
SpiceQuaternion q = attitudeMap.get(t);
|
||||
pw.printf("%s %.14e %.14e %.14e %.14e\n", new TDBTime(t).toUTCString("ISOC", 6),
|
||||
q.getElt(0), q.getElt(1), q.getElt(2), q.getElt(3));
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
|
||||
return String.format("msopck %s %s %s.bc", setupFile, inputFile, basename);
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws SpiceException, IOException {
|
||||
TerrasaurTool defaultOBJ = new CKFromSumFile();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
if (cl.hasOption("dumpConfig")){
|
||||
CKFromSumFileConfigFactory factory = new CKFromSumFileConfigFactory();
|
||||
PropertiesConfiguration config = factory.toConfig(factory.getTemplate());
|
||||
try {
|
||||
String filename = cl.getOptionValue("dumpConfig");
|
||||
config.write(new PrintWriter(filename));
|
||||
logger.info("Wrote {}", filename);
|
||||
} catch (ConfigurationException | IOException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
NativeLibraryLoader.loadSpiceLibraries();
|
||||
|
||||
PropertiesConfiguration config = null;
|
||||
CKFromSumFileConfigFactory factory = new CKFromSumFileConfigFactory();
|
||||
try {
|
||||
config = new Configurations().properties(new File(cl.getOptionValue("config")));
|
||||
} catch (ConfigurationException e1) {
|
||||
logger.error(e1.getLocalizedMessage(), e1);
|
||||
}
|
||||
|
||||
CKFromSumFileConfig appConfig = factory.fromConfig(config);
|
||||
|
||||
for (String kernel : appConfig.metakernel())
|
||||
KernelDatabase.load(kernel);
|
||||
|
||||
NavigableMap<SumFile, String> sumFiles = new TreeMap<>((o1, o2) -> {
|
||||
try {
|
||||
return Double.compare(new TDBTime(o1.utcString()).getTDBSeconds(),
|
||||
new TDBTime(o2.utcString()).getTDBSeconds());
|
||||
} catch (SpiceErrorException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
|
||||
List<String> lines =
|
||||
FileUtils.readLines(new File(cl.getOptionValue("sumFile")), Charset.defaultCharset());
|
||||
for (String line : lines) {
|
||||
if (line.strip().startsWith("#"))
|
||||
continue;
|
||||
String[] parts = line.strip().split("\\s+");
|
||||
String filename = parts[0].trim();
|
||||
sumFiles.put(SumFile.fromFile(new File(filename)), FilenameUtils.getBaseName(filename));
|
||||
}
|
||||
|
||||
CKFromSumFile app = new CKFromSumFile(appConfig, sumFiles);
|
||||
TDBTime begin = new TDBTime(sumFiles.firstKey().utcString());
|
||||
TDBTime end = new TDBTime(sumFiles.lastKey().utcString());
|
||||
String picture = "YYYY_DOY";
|
||||
String command = app.writeMSOPCKFiles(
|
||||
String.format("ck_%s_%s", begin.toString(picture), end.toString(picture)),
|
||||
new ArrayList<>());
|
||||
|
||||
logger.info("To generate the CK, run:\n\t{}", command);
|
||||
|
||||
logger.info("Finished.");
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
567
src/main/java/terrasaur/apps/ColorSpots.java
Normal file
@@ -0,0 +1,567 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.Reader;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.csv.CSVFormat;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
|
||||
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
|
||||
import org.apache.commons.math3.util.Pair;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.spi.StandardLevel;
|
||||
import picante.math.vectorspace.VectorIJK;
|
||||
import terrasaur.smallBodyModel.SmallBodyModel;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.*;
|
||||
import terrasaur.utils.math.MathConversions;
|
||||
import terrasaur.utils.mesh.TriangularFacet;
|
||||
import vtk.vtkCell;
|
||||
import vtk.vtkIdList;
|
||||
import vtk.vtkObject;
|
||||
import vtk.vtkPoints;
|
||||
import vtk.vtkPolyData;
|
||||
|
||||
/**
|
||||
* Read a file containing (x, y, z, value), or (lat, lon, value) along with an OBJ and write out
|
||||
* facet, color
|
||||
*
|
||||
* @author nairah1
|
||||
*/
|
||||
public class ColorSpots implements TerrasaurTool {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
private ColorSpots() {}
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Assign values to facets in a shape model from an input dataset.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
|
||||
String header = "";
|
||||
String footer =
|
||||
"""
|
||||
|
||||
This program reads an OBJ file along with a CSV file containing locations and values and writes the \
|
||||
mean value and standard deviation for each facet within a specified distance of an input point to standard \
|
||||
out. Latitude and longitude are specified in degrees. Longitude is east longitude. Units of x, y, z, and \
|
||||
radius are the same as the units in the supplied OBJ file.
|
||||
""";
|
||||
return TerrasaurTool.super.fullDescription(options, header, footer);
|
||||
}
|
||||
|
||||
private enum FORMAT {
|
||||
LL,
|
||||
LLR,
|
||||
XYZ
|
||||
}
|
||||
|
||||
private enum FIELD {
|
||||
MIN,
|
||||
MAX,
|
||||
MEDIAN,
|
||||
N,
|
||||
RMS,
|
||||
SUM,
|
||||
STD,
|
||||
VARIANCE
|
||||
}
|
||||
|
||||
private vtkPolyData polyData;
|
||||
private SmallBodyModel smallBodyModel;
|
||||
|
||||
public ColorSpots(vtkPolyData polyData) {
|
||||
this.polyData = polyData;
|
||||
this.smallBodyModel = new SmallBodyModel(polyData);
|
||||
}
|
||||
|
||||
private long getXYZ(double lat, double lon, double[] pt) {
|
||||
double[] origin = {0., 0., 0.};
|
||||
Vector3D lookDir = new Vector3D(lon, lat);
|
||||
|
||||
return smallBodyModel.computeRayIntersection(origin, lookDir.toArray(), pt);
|
||||
}
|
||||
|
||||
private ArrayList<double[]> readCSV(String filename, FORMAT format) {
|
||||
|
||||
ArrayList<double[]> returnArray = new ArrayList<>();
|
||||
|
||||
try (Reader in = new FileReader(filename)) {
|
||||
Iterable<CSVRecord> records = CSVFormat.DEFAULT.parse(in);
|
||||
for (CSVRecord record : records) {
|
||||
double[] values = new double[4];
|
||||
values[3] = Double.NaN;
|
||||
if (format == FORMAT.LL) {
|
||||
double lon = Math.toRadians(Double.parseDouble(record.get(0).trim()));
|
||||
double lat = Math.toRadians(Double.parseDouble(record.get(1).trim()));
|
||||
|
||||
if (getXYZ(lat, lon, values) < 0) continue;
|
||||
try {
|
||||
values[3] = Double.parseDouble(record.get(2));
|
||||
} catch (NumberFormatException e) {
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
if (format == FORMAT.LLR) {
|
||||
double lon = Math.toRadians(Double.parseDouble(record.get(0).trim()));
|
||||
double lat = Math.toRadians(Double.parseDouble(record.get(1).trim()));
|
||||
double rad = Double.parseDouble(record.get(2).trim());
|
||||
Vector3D xyz = new Vector3D(lon, lat).scalarMultiply(rad);
|
||||
values[0] = xyz.getX();
|
||||
values[1] = xyz.getY();
|
||||
values[2] = xyz.getZ();
|
||||
} else if (format == FORMAT.XYZ) {
|
||||
values[0] = Double.parseDouble(record.get(0).trim());
|
||||
values[1] = Double.parseDouble(record.get(1).trim());
|
||||
values[2] = Double.parseDouble(record.get(2).trim());
|
||||
}
|
||||
smallBodyModel.findClosestCell(values);
|
||||
try {
|
||||
values[3] = Double.parseDouble(record.get(3).trim());
|
||||
} catch (NumberFormatException e) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
returnArray.add(values);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
|
||||
return returnArray;
|
||||
}
|
||||
|
||||
public TreeMap<Long, DescriptiveStatistics> getStatsFast(
|
||||
ArrayList<double[]> valuesList, double radius, boolean weight, boolean atVertices) {
|
||||
return atVertices
|
||||
? getStatsVertex(valuesList, radius, weight)
|
||||
: getStatsFacet(valuesList, radius, weight);
|
||||
}
|
||||
|
||||
private TreeMap<Long, DescriptiveStatistics> getStatsVertex(
|
||||
ArrayList<double[]> valuesList, double radius, boolean weight) {
|
||||
|
||||
TreeMap<Long, DescriptiveStatistics> statMap = new TreeMap<>();
|
||||
for (long i = 0; i < smallBodyModel.getSmallBodyPolyData().GetNumberOfPoints(); i++) {
|
||||
DescriptiveStatistics stats = new DescriptiveStatistics();
|
||||
statMap.put(i, stats);
|
||||
}
|
||||
// double[] xyz = new double[3];
|
||||
|
||||
for (double[] values : valuesList) {
|
||||
Vector3D xyz = new Vector3D(values[0], values[1], values[2]);
|
||||
double value = values[3];
|
||||
|
||||
vtkIdList pointIDs = new vtkIdList();
|
||||
smallBodyModel.getPointLocator().FindPointsWithinRadius(radius, xyz.toArray(), pointIDs);
|
||||
// pointIDs.InsertNextId(smallBodyModel.getPointLocator().FindClosestPoint(xyz));
|
||||
|
||||
for (int i = 0; i < pointIDs.GetNumberOfIds(); i++) {
|
||||
long pointID = pointIDs.GetId(i);
|
||||
DescriptiveStatistics stats = statMap.get(pointID);
|
||||
|
||||
Vector3D p = new Vector3D(smallBodyModel.getSmallBodyPolyData().GetPoint(pointID));
|
||||
double dist = p.distance(xyz);
|
||||
|
||||
// cell center can be farther than radius as long as one point is closer than //
|
||||
// radius
|
||||
if (dist < radius) {
|
||||
double thisValue = value;
|
||||
if (weight) thisValue *= (1 - dist / radius);
|
||||
stats.addValue(thisValue);
|
||||
|
||||
// if (thisValue < 0)
|
||||
// System.out.printf("Cell %d dist %f radius %f xyz %f %f %f value %f thisValue %e\n",
|
||||
// cellID, dist, radius,
|
||||
// xyz[0], xyz[1], xyz[2],
|
||||
// value, thisValue);
|
||||
}
|
||||
} // point loop
|
||||
} // values loop
|
||||
|
||||
return statMap;
|
||||
}
|
||||
|
||||
private TreeMap<Long, DescriptiveStatistics> getStatsFacet(
|
||||
ArrayList<double[]> valuesList, double radius, boolean weight) {
|
||||
|
||||
TreeMap<Long, DescriptiveStatistics> statMap = new TreeMap<>();
|
||||
for (long i = 0; i < smallBodyModel.getSmallBodyPolyData().GetNumberOfCells(); i++) {
|
||||
DescriptiveStatistics stats = new DescriptiveStatistics();
|
||||
statMap.put(i, stats);
|
||||
}
|
||||
|
||||
for (double[] values : valuesList) {
|
||||
Vector3D xyz = new Vector3D(values[0], values[1], values[2]);
|
||||
double value = values[3];
|
||||
|
||||
Set<Long> cellIDs = smallBodyModel.findClosestCellsWithinRadius(xyz.toArray(), radius);
|
||||
// cellIDs.add(smallBodyModel.findClosestCell(xyz));
|
||||
|
||||
for (Long cellID : cellIDs) {
|
||||
DescriptiveStatistics stats = statMap.get(cellID);
|
||||
|
||||
TriangularFacet tf = PolyDataUtil.getFacet(polyData, cellID);
|
||||
Vector3D p = MathConversions.toVector3D(tf.getCenter());
|
||||
double dist = p.distance(xyz);
|
||||
|
||||
// cell center can be farther than radius as long as one point is closer than //
|
||||
// radius
|
||||
if (dist < radius) {
|
||||
double thisValue = value;
|
||||
if (weight) thisValue *= (1 - dist / radius);
|
||||
stats.addValue(thisValue);
|
||||
|
||||
// if (thisValue < 0)
|
||||
// System.out.printf("Cell %d dist %f radius %f xyz %f %f %f value %f thisValue %e\n",
|
||||
// cellID, dist, radius,
|
||||
// xyz[0], xyz[1], xyz[2],
|
||||
// value, thisValue);
|
||||
}
|
||||
} // cell loop
|
||||
} // values loop
|
||||
|
||||
return statMap;
|
||||
}
|
||||
|
||||
public TreeMap<Integer, DescriptiveStatistics> getStats(
|
||||
ArrayList<double[]> valuesList, double radius) {
|
||||
|
||||
// for each value, store indices of closest cells and distances
|
||||
TreeMap<Integer, ArrayList<Pair<Long, Double>>> closestCells = new TreeMap<>();
|
||||
for (int i = 0; i < valuesList.size(); i++) {
|
||||
double[] values = valuesList.get(i);
|
||||
Vector3D xyz = new Vector3D(values);
|
||||
|
||||
TreeSet<Long> sortedCellIDs =
|
||||
new TreeSet<>(smallBodyModel.findClosestCellsWithinRadius(values, radius));
|
||||
sortedCellIDs.add(smallBodyModel.findClosestCell(values));
|
||||
|
||||
ArrayList<Pair<Long, Double>> distances = new ArrayList<>();
|
||||
for (long cellID : sortedCellIDs) {
|
||||
TriangularFacet tf = PolyDataUtil.getFacet(polyData, cellID);
|
||||
Vector3D p = MathConversions.toVector3D(tf.getCenter());
|
||||
double dist = p.distance(xyz);
|
||||
distances.add(Pair.create(cellID, dist));
|
||||
}
|
||||
closestCells.put(i, distances);
|
||||
}
|
||||
|
||||
TreeMap<Integer, DescriptiveStatistics> statMap = new TreeMap<>();
|
||||
for (int cellID = 0; cellID < polyData.GetNumberOfCells(); cellID++) {
|
||||
DescriptiveStatistics stats = statMap.get(cellID);
|
||||
if (stats == null) {
|
||||
stats = new DescriptiveStatistics();
|
||||
statMap.put(cellID, stats);
|
||||
}
|
||||
|
||||
for (int i = 0; i < valuesList.size(); i++) {
|
||||
double[] values = valuesList.get(i);
|
||||
|
||||
ArrayList<Pair<Long, Double>> distances = closestCells.get(i);
|
||||
for (Pair<Long, Double> pair : distances) {
|
||||
|
||||
if (pair.getFirst().intValue() < cellID) continue;
|
||||
|
||||
if (pair.getFirst().intValue() > cellID) break;
|
||||
|
||||
if (pair.getFirst().intValue() == cellID) {
|
||||
double dist = pair.getSecond();
|
||||
if (dist < radius) {
|
||||
double thisValue = (1 - dist / radius) * values[3];
|
||||
stats.addValue(thisValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} // cell loop
|
||||
|
||||
return statMap;
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
// run the VTK garbage collector every 30 seconds
|
||||
vtkObject.JAVA_OBJECT_MANAGER.getAutoGarbageCollector().SetScheduleTime(30, TimeUnit.SECONDS);
|
||||
vtkObject.JAVA_OBJECT_MANAGER.getAutoGarbageCollector().SetAutoGarbageCollection(true);
|
||||
// vtkObject.JAVA_OBJECT_MANAGER.getAutoGarbageCollector().SetDebug(true);
|
||||
|
||||
try {
|
||||
ColorSpotsMain(args);
|
||||
} catch (Exception e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
|
||||
vtkObject.JAVA_OBJECT_MANAGER.getAutoGarbageCollector().SetAutoGarbageCollection(false);
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(
|
||||
Option.builder("additionalFields")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Specify additional fields to write out. Allowed values are min, max, median, n, rms, sum, std, variance. "
|
||||
+ "More than one field may be specified in a comma separated list (e.g. "
|
||||
+ "-additionalFields sum,median,rms). Additional fields will be written out after the mean and std columns.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("allFacets")
|
||||
.desc(
|
||||
"Report values for all facets in OBJ shape model, even if facet is not within searchRadius "
|
||||
+ "of any points. Prints NaN if facet not within searchRadius. Default is to only "
|
||||
+ "print facets which have contributions from input points.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("info")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Required. Name of CSV file containing value to plot."
|
||||
+ " Default format is lon, lat, radius, value. See -xyz and -llOnly options for alternate formats.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("llOnly").desc("Format of -info file is lon, lat, value.").build());
|
||||
options.addOption(
|
||||
Option.builder("logFile")
|
||||
.hasArg()
|
||||
.desc("If present, save screen output to log file.")
|
||||
.build());
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (StandardLevel l : StandardLevel.values()) sb.append(String.format("%s ", l.name()));
|
||||
options.addOption(
|
||||
Option.builder("logLevel")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"If present, print messages above selected priority. Valid values are "
|
||||
+ sb.toString().trim()
|
||||
+ ". Default is INFO.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("normalize")
|
||||
.desc(
|
||||
"Report values per unit area (divide by total area of facets within search ellipse).")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("noWeight")
|
||||
.desc("Do not weight points by distance from facet/vertex.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("obj")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc("Required. Name of shape model to read.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("outFile")
|
||||
.hasArg()
|
||||
.desc("Specify output file to store the output.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("searchRadius")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Each facet will be colored using a weighted average of all points within searchRadius of the facet/vertex. "
|
||||
+ "If not present, set to sqrt(2)/2 * mean facet edge length.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("writeVertices")
|
||||
.desc(
|
||||
"Convert output from a per facet to per vertex format. Each line will be of the form"
|
||||
+ " x, y, z, value, sigma where x, y, z are the vector components of vertex V. "
|
||||
+ " Default is to only report facetID, facet_value, facet_sigma.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("xyz").desc("Format of -info file is x, y, z, value.").build());
|
||||
return options;
|
||||
}
|
||||
|
||||
public static void ColorSpotsMain(String[] args) throws Exception {
|
||||
|
||||
TerrasaurTool defaultOBJ = new ColorSpots();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
NativeLibraryLoader.loadVtkLibraries();
|
||||
|
||||
final boolean writeVerts = cl.hasOption("writeVertices");
|
||||
final boolean allFacets = cl.hasOption("allFacets");
|
||||
final boolean normalize = cl.hasOption("normalize") && !writeVerts;
|
||||
final boolean weight = !cl.hasOption("noWeight");
|
||||
FORMAT format = FORMAT.LLR;
|
||||
for (Option option : cl.getOptions()) {
|
||||
if (option.getOpt().equals("xyz")) {
|
||||
format = FORMAT.XYZ;
|
||||
}
|
||||
if (option.getOpt().equals("llOnly")) {
|
||||
format = FORMAT.LL;
|
||||
}
|
||||
}
|
||||
|
||||
vtkPolyData polyData = PolyDataUtil.loadShapeModelAndComputeNormals(cl.getOptionValue("obj"));
|
||||
|
||||
double radius;
|
||||
if (cl.hasOption("searchRadius")) {
|
||||
radius = Double.parseDouble(cl.getOptionValue("searchRadius"));
|
||||
} else {
|
||||
PolyDataStatistics stats = new PolyDataStatistics(polyData);
|
||||
radius = stats.getMeanEdgeLength() * Math.sqrt(2) / 2;
|
||||
logger.info("Using search radius of " + radius);
|
||||
}
|
||||
|
||||
ColorSpots cs = new ColorSpots(polyData);
|
||||
ArrayList<double[]> infoValues = cs.readCSV(cl.getOptionValue("info"), format);
|
||||
TreeMap<Long, DescriptiveStatistics> statMap =
|
||||
cs.getStatsFast(infoValues, radius, weight, writeVerts);
|
||||
|
||||
double totalArea = 0;
|
||||
if (normalize) {
|
||||
for (int facet = 0; facet < polyData.GetNumberOfCells(); facet++) {
|
||||
vtkCell cell = polyData.GetCell(facet);
|
||||
vtkPoints points = cell.GetPoints();
|
||||
double[] pt0 = points.GetPoint(0);
|
||||
double[] pt1 = points.GetPoint(1);
|
||||
double[] pt2 = points.GetPoint(2);
|
||||
|
||||
TriangularFacet tf =
|
||||
new TriangularFacet(new VectorIJK(pt0), new VectorIJK(pt1), new VectorIJK(pt2));
|
||||
double area = tf.getArea();
|
||||
|
||||
totalArea += area;
|
||||
points.Delete();
|
||||
cell.Delete();
|
||||
}
|
||||
}
|
||||
|
||||
ArrayList<FIELD> fields = new ArrayList<>();
|
||||
if (cl.hasOption("additionalFields")) {
|
||||
for (String s : cl.getOptionValue("additionalFields").trim().toUpperCase().split(",")) {
|
||||
for (FIELD f : FIELD.values()) {
|
||||
if (f.name().equalsIgnoreCase(s)) fields.add(f);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TreeMap<Long, ArrayList<Double>> map = new TreeMap<>();
|
||||
long numPoints = (writeVerts ? polyData.GetNumberOfPoints() : polyData.GetNumberOfCells());
|
||||
for (long index = 0; index < numPoints; index++) {
|
||||
DescriptiveStatistics stats = statMap.get(index);
|
||||
ArrayList<Double> values = new ArrayList<>();
|
||||
if (stats != null) {
|
||||
values.add(stats.getMean());
|
||||
values.add(stats.getStandardDeviation());
|
||||
for (FIELD f : fields) {
|
||||
if (f == FIELD.MIN) values.add(stats.getMin());
|
||||
if (f == FIELD.MAX) values.add(stats.getMax());
|
||||
if (f == FIELD.MEDIAN) values.add(stats.getPercentile(50));
|
||||
if (f == FIELD.N) values.add((double) stats.getN());
|
||||
if (f == FIELD.RMS) values.add(Math.sqrt(stats.getSumsq() / stats.getN()));
|
||||
if (f == FIELD.STD) values.add(stats.getStandardDeviation());
|
||||
if (f == FIELD.SUM) values.add(stats.getSum());
|
||||
if (f == FIELD.VARIANCE) values.add(stats.getVariance());
|
||||
}
|
||||
} else {
|
||||
values.add(Double.NaN);
|
||||
values.add(Double.NaN);
|
||||
for (FIELD f : fields) {
|
||||
if (f == FIELD.MIN) values.add(Double.NaN);
|
||||
if (f == FIELD.MAX) values.add(Double.NaN);
|
||||
if (f == FIELD.MEDIAN) values.add(Double.NaN);
|
||||
if (f == FIELD.N) values.add(Double.NaN);
|
||||
if (f == FIELD.RMS) values.add(Double.NaN);
|
||||
if (f == FIELD.STD) values.add(Double.NaN);
|
||||
if (f == FIELD.SUM) values.add(Double.NaN);
|
||||
if (f == FIELD.VARIANCE) values.add(Double.NaN);
|
||||
}
|
||||
}
|
||||
map.put(index, values);
|
||||
}
|
||||
|
||||
ArrayList<String> returnList;
|
||||
|
||||
if (writeVerts) {
|
||||
returnList = writeVertices(map, polyData, allFacets);
|
||||
} else {
|
||||
returnList = writeFacets(map, allFacets, normalize, totalArea);
|
||||
}
|
||||
|
||||
if (cl.hasOption("outFile")) {
|
||||
try (PrintWriter pw = new PrintWriter(cl.getOptionValue("outFile"))) {
|
||||
for (String s : returnList) pw.println(s);
|
||||
}
|
||||
} else {
|
||||
for (String string : returnList) System.out.println(string);
|
||||
}
|
||||
}
|
||||
|
||||
private static ArrayList<String> writeFacets(
|
||||
TreeMap<Long, ArrayList<Double>> map,
|
||||
boolean allFacets,
|
||||
boolean normalize,
|
||||
double totalArea) {
|
||||
|
||||
ArrayList<String> returnList = new ArrayList<>();
|
||||
|
||||
for (Long facet : map.keySet()) {
|
||||
ArrayList<Double> values = map.get(facet);
|
||||
Double value = values.get(0);
|
||||
Double sigma = values.get(1);
|
||||
if (allFacets || !value.isNaN()) {
|
||||
if (normalize) {
|
||||
value /= totalArea;
|
||||
sigma /= totalArea;
|
||||
}
|
||||
StringBuilder sb = new StringBuilder(String.format("%d, %e, %e", facet, value, sigma));
|
||||
for (int i = 2; i < values.size(); i++) {
|
||||
value = values.get(i);
|
||||
if (normalize) value /= totalArea;
|
||||
sb.append(String.format(", %e", value));
|
||||
}
|
||||
returnList.add(sb.toString());
|
||||
}
|
||||
}
|
||||
return returnList;
|
||||
}
|
||||
|
||||
private static ArrayList<String> writeVertices(
|
||||
TreeMap<Long, ArrayList<Double>> map, vtkPolyData polyData, boolean allFacets) {
|
||||
|
||||
ArrayList<String> returnList = new ArrayList<>();
|
||||
|
||||
double[] thisPt = new double[3];
|
||||
for (Long vertex : map.keySet()) {
|
||||
ArrayList<Double> values = map.get(vertex);
|
||||
Double value = values.get(0);
|
||||
Double sigma = values.get(1);
|
||||
if (allFacets || !value.isNaN()) {
|
||||
// get vertex x,y,z values
|
||||
polyData.GetPoint(vertex, thisPt);
|
||||
StringBuilder sb =
|
||||
new StringBuilder(
|
||||
String.format("%e, %e, %e, %e, %e", thisPt[0], thisPt[1], thisPt[2], value, sigma));
|
||||
for (int i = 2; i < values.size(); i++) {
|
||||
value = values.get(i);
|
||||
sb.append(String.format(", %e", value));
|
||||
}
|
||||
returnList.add(sb.toString());
|
||||
}
|
||||
}
|
||||
return returnList;
|
||||
}
|
||||
}
|
||||
1012
src/main/java/terrasaur/apps/CompareOBJ.java
Normal file
217
src/main/java/terrasaur/apps/CreateSBMTStructure.java
Normal file
@@ -0,0 +1,217 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.awt.*;
|
||||
import java.io.File;
|
||||
import java.io.PrintWriter;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.math3.geometry.euclidean.threed.Rotation;
|
||||
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
|
||||
import org.apache.commons.math3.geometry.euclidean.twod.Vector2D;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.*;
|
||||
import terrasaur.utils.math.RotationUtils;
|
||||
import vtk.vtkPolyData;
|
||||
|
||||
public class CreateSBMTStructure implements TerrasaurTool {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
/**
|
||||
* This doesn't need to be private, or even declared, but you might want to if you have other
|
||||
* constructors.
|
||||
*/
|
||||
private CreateSBMTStructure() {}
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Construct ellipses from user-defined points on an image.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
String header =
|
||||
"This tool creates an SBMT ellipse file from a set of point on an image.";
|
||||
String footer = "";
|
||||
return TerrasaurTool.super.fullDescription(options, header, footer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an Ellipse as an SBMT structure from three points. The first two points define the long
|
||||
* axis and the third point defines the short axis.
|
||||
*
|
||||
* @param p1 First point
|
||||
* @param p2 Second point
|
||||
* @param p3 Third point
|
||||
* @return An SBMT structure describing the ellipse
|
||||
*/
|
||||
private static SBMTEllipseRecord createRecord(
|
||||
int id, String name, Vector3D p1, Vector3D p2, Vector3D p3) {
|
||||
// Create a local coordinate system where X axis contains long axis and Y axis contains short
|
||||
// axis
|
||||
|
||||
Vector3D origin = p1.add(p2).scalarMultiply(0.5);
|
||||
Vector3D X = p1.subtract(p2).normalize();
|
||||
Vector3D Y = p3.subtract(origin).normalize();
|
||||
|
||||
// Create a rotation matrix to go from body fixed frame to this local coordinate system
|
||||
Rotation globalToLocal = RotationUtils.IprimaryJsecondary(X, Y);
|
||||
|
||||
// All of these vectors should have a Z coordinate of zero
|
||||
Vector3D p1Local = globalToLocal.applyTo(p1);
|
||||
Vector3D p2Local = globalToLocal.applyTo(p2);
|
||||
Vector3D p3Local = globalToLocal.applyTo(p3);
|
||||
|
||||
// fit an ellipse to the three points on the plane
|
||||
Vector2D a = new Vector2D(p1Local.getX(), p1Local.getY());
|
||||
Vector2D b = new Vector2D(p2Local.getX(), p2Local.getY());
|
||||
Vector2D c = new Vector2D(p3Local.getX(), p3Local.getY());
|
||||
|
||||
Vector2D center = a.add(b).scalarMultiply(0.5);
|
||||
double majorAxis = a.subtract(b).getNorm();
|
||||
double minorAxis = 2 * c.subtract(center).getNorm();
|
||||
|
||||
double rotation = Math.atan2(b.getY() - a.getY(), b.getX() - a.getX());
|
||||
double flattening = (majorAxis - minorAxis) / majorAxis;
|
||||
|
||||
ImmutableSBMTEllipseRecord.Builder record =
|
||||
ImmutableSBMTEllipseRecord.builder()
|
||||
.id(id)
|
||||
.name(name)
|
||||
.x(origin.getX())
|
||||
.y(origin.getY())
|
||||
.z(origin.getZ())
|
||||
.lat(origin.getDelta())
|
||||
.lon(origin.getAlpha())
|
||||
.radius(origin.getNorm())
|
||||
.slope(0)
|
||||
.elevation(0)
|
||||
.acceleration(0)
|
||||
.potential(0)
|
||||
.diameter(majorAxis)
|
||||
.flattening(flattening)
|
||||
.angle(rotation)
|
||||
.color(Color.BLACK)
|
||||
.dummy("")
|
||||
.label("");
|
||||
return record.build();
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(
|
||||
Option.builder("input")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc(
|
||||
"""
|
||||
Required. Name or input file. This is a text file with a pair of pixel coordinates per line. The pixel
|
||||
coordinates are offsets from the image center. For example:
|
||||
|
||||
# My test file
|
||||
|
||||
627.51274 876.11775
|
||||
630.53612 883.55992
|
||||
626.3499 881.46681
|
||||
|
||||
Empty lines or lines beginning with # are ignored.
|
||||
|
||||
Each set of three points are used to create the SBMT structures. The first two points are the long
|
||||
axis and the third is a location for the semi-minor axis.""")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("objFile")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc("Required. Name of OBJ shape file.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("output")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc("Required. Name of output file.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("sumFile")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc("Required. Name of sum file to read.")
|
||||
.build());
|
||||
return options;
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
TerrasaurTool defaultOBJ = new CreateSBMTStructure();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
NativeLibraryLoader.loadSpiceLibraries();
|
||||
NativeLibraryLoader.loadVtkLibraries();
|
||||
|
||||
SumFile sumFile = SumFile.fromFile(new File(cl.getOptionValue("sumFile")));
|
||||
|
||||
try {
|
||||
String objFile = cl.getOptionValue("objFile");
|
||||
vtkPolyData polyData = PolyDataUtil.loadShapeModel(objFile);
|
||||
if (polyData == null) {
|
||||
logger.error("Cannot read shape model {}!", objFile);
|
||||
System.exit(0);
|
||||
}
|
||||
RangeFromSumFile rfsf = new RangeFromSumFile(sumFile, polyData);
|
||||
|
||||
List<Vector3D> intercepts = new ArrayList<>();
|
||||
List<String> lines =
|
||||
FileUtils.readLines(new File(cl.getOptionValue("input")), Charset.defaultCharset());
|
||||
for (String line :
|
||||
lines.stream().filter(s -> !(s.isBlank() || s.strip().startsWith("#"))).toList()) {
|
||||
String[] parts = line.split("\\s+");
|
||||
int ix = (int) Math.round(Double.parseDouble(parts[0]));
|
||||
int iy = (int) Math.round(Double.parseDouble(parts[1]));
|
||||
|
||||
Map.Entry<Long, Vector3D> entry = rfsf.findIntercept(ix, iy);
|
||||
long cellID = entry.getKey();
|
||||
if (cellID > -1) intercepts.add(entry.getValue());
|
||||
}
|
||||
|
||||
logger.info("Found {} sets of points", intercepts.size() / 3);
|
||||
|
||||
List<SBMTEllipseRecord> records = new ArrayList<>();
|
||||
for (int i = 0; i < intercepts.size(); i += 3) {
|
||||
|
||||
// p1 and p2 define the long axis of the ellipse
|
||||
Vector3D p1 = intercepts.get(i);
|
||||
Vector3D p2 = intercepts.get(i+1);
|
||||
|
||||
// p3 lies on the short axis
|
||||
Vector3D p3 = intercepts.get(i+2);
|
||||
|
||||
SBMTEllipseRecord record = createRecord(i/3, String.format("Ellipse %d", i/3), p1, p2, p3);
|
||||
records.add(record);
|
||||
}
|
||||
|
||||
try (PrintWriter pw = new PrintWriter(cl.getOptionValue("output"))) {
|
||||
for (SBMTEllipseRecord record : records) pw.println(record.toString());
|
||||
}
|
||||
logger.info("Wrote {}", cl.getOptionValue("output"));
|
||||
} catch (Exception e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
logger.info("Finished");
|
||||
}
|
||||
}
|
||||
196
src/main/java/terrasaur/apps/DSK2OBJ.java
Normal file
@@ -0,0 +1,196 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.PrintWriter;
|
||||
import java.util.Map;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.spi.StandardLevel;
|
||||
import spice.basic.Body;
|
||||
import spice.basic.CSPICE;
|
||||
import spice.basic.DLADescriptor;
|
||||
import spice.basic.DSK;
|
||||
import spice.basic.DSKDescriptor;
|
||||
import spice.basic.SpiceException;
|
||||
import spice.basic.Surface;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
|
||||
public class DSK2OBJ implements TerrasaurTool {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Create an OBJ from a DSK.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
String header = "";
|
||||
String footer = "\nCreate an OBJ from a DSK.\n";
|
||||
return TerrasaurTool.super.fullDescription(options, header, footer);
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(
|
||||
Option.builder("body")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"If present, convert shape for named body. Default is to use the first body in the DSK.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("dsk").hasArg().required().desc("Required. Name of input DSK.").build());
|
||||
options.addOption(
|
||||
Option.builder("logFile")
|
||||
.hasArg()
|
||||
.desc("If present, save screen output to log file.")
|
||||
.build());
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (StandardLevel l : StandardLevel.values()) sb.append(String.format("%s ", l.name()));
|
||||
options.addOption(
|
||||
Option.builder("logLevel")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"If present, print messages above selected priority. Valid values are "
|
||||
+ sb.toString().trim()
|
||||
+ ". Default is INFO.")
|
||||
.build());
|
||||
options.addOption(Option.builder("obj").hasArg().desc("Name of output OBJ.").build());
|
||||
options.addOption(
|
||||
Option.builder("printBodies")
|
||||
.desc("If present, print bodies and surface ids in DSK.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("surface")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"If present, use specified surface id. Default is to use the first surface id for the body.")
|
||||
.build());
|
||||
return options;
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
TerrasaurTool defaultOBJ = new DSK2OBJ();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
System.loadLibrary("JNISpice");
|
||||
|
||||
String dskName = cl.getOptionValue("dsk");
|
||||
File dskFile = new File(dskName);
|
||||
if (!dskFile.exists()) {
|
||||
logger.warn("Input DSK " + dskName + "does not exist!");
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
try {
|
||||
DSK dsk = DSK.openForRead(dskName);
|
||||
Body[] bodies = dsk.getBodies();
|
||||
|
||||
if (cl.hasOption("printBodies")) {
|
||||
logger.info("found bodies and surface ids:");
|
||||
for (int i = 0; i < bodies.length; i++) {
|
||||
Body b = bodies[i];
|
||||
Surface[] surfaces = dsk.getSurfaces(b);
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(String.format("%d) %s", i, b.getName()));
|
||||
for (Surface s : surfaces) sb.append(String.format(" %d", s.getIDCode()));
|
||||
logger.info(sb.toString());
|
||||
}
|
||||
}
|
||||
|
||||
Body b = cl.hasOption("body") ? new Body(cl.getOptionValue("body")) : bodies[0];
|
||||
boolean missingBody = true;
|
||||
for (Body body : bodies) {
|
||||
if (b.equals(body)) {
|
||||
missingBody = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (missingBody) {
|
||||
logger.warn(String.format("Body %s not found in DSK! Valid bodies are:", b.getName()));
|
||||
for (Body body : bodies) logger.warn(body.getName());
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
Surface[] surfaces = dsk.getSurfaces(b);
|
||||
Surface s =
|
||||
cl.hasOption("surface")
|
||||
? new Surface(Integer.parseInt(cl.getOptionValue("surface")), b)
|
||||
: surfaces[0];
|
||||
boolean missingSurface = true;
|
||||
for (Surface surface : surfaces) {
|
||||
if (s.equals(surface)) {
|
||||
missingSurface = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (missingSurface) {
|
||||
logger.warn(
|
||||
String.format(
|
||||
"Surface %d for body %s not found in DSK! Valid surfaces are:",
|
||||
s.getIDCode(), b.getName()));
|
||||
for (Surface surface : surfaces) logger.warn(Integer.toString(surface.getIDCode()));
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
DLADescriptor dladsc = dsk.beginBackwardSearch();
|
||||
boolean found = true;
|
||||
while (found) {
|
||||
|
||||
DSKDescriptor dskdsc = dsk.getDSKDescriptor(dladsc);
|
||||
|
||||
if (b.getIDCode() == dskdsc.getCenterID() && s.getIDCode() == dskdsc.getSurfaceID()) {
|
||||
|
||||
// number of plates and vertices
|
||||
int[] np = new int[1];
|
||||
int[] nv = new int[1];
|
||||
CSPICE.dskz02(dsk.getHandle(), dladsc.toArray(), nv, np);
|
||||
|
||||
double[][] vertices = CSPICE.dskv02(dsk.getHandle(), dladsc.toArray(), 1, nv[0]);
|
||||
int[][] plates = CSPICE.dskp02(dsk.getHandle(), dladsc.toArray(), 1, np[0]);
|
||||
|
||||
if (cl.hasOption("obj")) {
|
||||
try (PrintWriter pw = new PrintWriter(cl.getOptionValue("obj"))) {
|
||||
for (double[] v : vertices) {
|
||||
pw.printf("v %20.16f %20.16f %20.16f\r\n", v[0], v[1], v[2]);
|
||||
}
|
||||
for (int[] p : plates) {
|
||||
pw.printf("f %d %d %d\r\n", p[0], p[1], p[2]);
|
||||
}
|
||||
logger.info(
|
||||
String.format(
|
||||
"Wrote %d vertices and %d plates to %s for body %d surface %d",
|
||||
nv[0],
|
||||
np[0],
|
||||
cl.getOptionValue("obj"),
|
||||
dskdsc.getCenterID(),
|
||||
dskdsc.getSurfaceID()));
|
||||
} catch (FileNotFoundException e) {
|
||||
logger.warn(e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
found = dsk.hasPrevious(dladsc);
|
||||
if (found) {
|
||||
dladsc = dsk.getPrevious(dladsc);
|
||||
}
|
||||
}
|
||||
|
||||
} catch (SpiceException e) {
|
||||
logger.warn(e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
995
src/main/java/terrasaur/apps/DifferentialVolumeEstimator.java
Normal file
@@ -0,0 +1,995 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.awt.geom.Path2D;
|
||||
import java.awt.geom.Point2D;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.PrintWriter;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.AbstractMap;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.NavigableMap;
|
||||
import java.util.NavigableSet;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.TreeSet;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import org.apache.commons.math3.geometry.euclidean.threed.Rotation;
|
||||
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
|
||||
import org.apache.commons.math3.util.Pair;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import picante.math.vectorspace.RotationMatrixIJK;
|
||||
import terrasaur.smallBodyModel.SBMTStructure;
|
||||
import terrasaur.smallBodyModel.SmallBodyModel;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.*;
|
||||
import terrasaur.utils.math.MathConversions;
|
||||
import vtk.vtkCellArray;
|
||||
import vtk.vtkDoubleArray;
|
||||
import vtk.vtkIdList;
|
||||
import vtk.vtkPoints;
|
||||
import vtk.vtkPolyData;
|
||||
import vtk.vtkPolyDataWriter;
|
||||
|
||||
/**
|
||||
* Given a reference surface (either from a shape model or a set of points), find a best fit
|
||||
* reference plane. Find the height above this plane for the reference surface and an input shape
|
||||
* model. Report the differential height and volume between these two surfaces on a uniform grid.
|
||||
* <p>
|
||||
* This class uses three coordinate systems:
|
||||
* <ul>
|
||||
* <li>The global coordinate system. This is the coordinate system of the input data.</li>
|
||||
* <li>The local coordinate system. This has the X and Y axes in the best fit plane to the reference
|
||||
* data. The origin is optionally set by the user.</li>
|
||||
* <li>The native coordinate system. This is not seen by the user. The XY plane is the same as the
|
||||
* local coordinate system, but the origin may be translated and there may be a rotation applied
|
||||
* about the Z axis.</li>
|
||||
* </ul>
|
||||
*
|
||||
* @author Hari.Nair@jhuapl.edu
|
||||
*
|
||||
*/
|
||||
public class DifferentialVolumeEstimator implements TerrasaurTool {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
private DifferentialVolumeEstimator() {}
|
||||
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Find volume difference between two shape models.";
|
||||
}
|
||||
|
||||
// degree of polynomial used to fit surface
|
||||
private final int POLYNOMIAL_DEGREE = 2;
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
String header = "";
|
||||
String footer = "\nThis program finds the volume difference between a shape model and a reference surface. "
|
||||
+"The reference surface can either be another shape model or a degree "+POLYNOMIAL_DEGREE+" fit to a set of supplied points. "+
|
||||
"A local coordinate system is derived from the reference surface. The heights of the shape and reference at "+
|
||||
"each grid point are reported. ";
|
||||
return TerrasaurTool.super.fullDescription(options, header, footer);
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
/** input shape model */
|
||||
private vtkPolyData globalPolyData;
|
||||
/** shape model in native coordinates */
|
||||
private SmallBodyModel nativeSBM;
|
||||
/** number of radial profiles */
|
||||
private Integer numProfiles;
|
||||
|
||||
public void setNumProfiles(Integer numProfiles) {
|
||||
this.numProfiles = numProfiles;
|
||||
}
|
||||
|
||||
/** true if local +Z is aligned with the center radial direction */
|
||||
private boolean radialUp;
|
||||
|
||||
public void setRadialUp(boolean radialUp) {
|
||||
this.radialUp = radialUp;
|
||||
}
|
||||
|
||||
/** reference points in global coordinates */
|
||||
private List<Vector3D> referencePoints;
|
||||
|
||||
public void setReferencePoints(List<Vector3D> referencePoints) {
|
||||
this.referencePoints = referencePoints;
|
||||
}
|
||||
|
||||
/** reference model in global coordinates */
|
||||
private vtkPolyData referencePolyData;
|
||||
|
||||
public void setReferencePolyData(vtkPolyData referencePolyData) {
|
||||
this.referencePolyData = referencePolyData;
|
||||
}
|
||||
|
||||
/** Reference shape in native coordinates */
|
||||
private SmallBodyModel referenceSBM;
|
||||
/** reference surface in native coordinates */
|
||||
private FitSurface referenceSurface;
|
||||
|
||||
private double gridSpacing;
|
||||
private double gridHalfExtent;
|
||||
|
||||
/** global coordinates of the highest point of the shape model */
|
||||
private Vector3D highPoint;
|
||||
/** global coordinates of the lowest point of the shape model */
|
||||
private Vector3D lowPoint;
|
||||
|
||||
/** this plane converts coordinates from native to global and back */
|
||||
private FitPlane plane;
|
||||
|
||||
/** Inner edge of the ROI */
|
||||
private Path2D.Double roiInner;
|
||||
/** Outer edge of the ROI */
|
||||
private Path2D.Double roiOuter;
|
||||
|
||||
// local grid is in the same plane as native grid but is translated and rotated
|
||||
private Entry<Rotation, Vector3D> nativeToLocal;
|
||||
|
||||
// the origin of the local coordinate system, in global coordinates
|
||||
private enum ORIGIN {
|
||||
MIN_HEIGHT, MAX_HEIGHT, CUSTOM, DEFAULT
|
||||
}
|
||||
|
||||
public Vector3D nativeToLocal(Vector3D nativeIJK) {
|
||||
return nativeToLocal.getKey().applyTo(nativeIJK.subtract(nativeToLocal.getValue()));
|
||||
}
|
||||
|
||||
public Vector3D localToNative(Vector3D local) {
|
||||
return nativeToLocal.getKey().applyInverseTo(local).add(nativeToLocal.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the inner boundary of the ROI
|
||||
*
|
||||
* @param filename file containing points in global coordinates
|
||||
*/
|
||||
public void setInnerROI(String filename) {
|
||||
List<Vector3D> points = readPointsFromFile(filename);
|
||||
roiInner = createOutline(points);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the outer boundary of the ROI
|
||||
*
|
||||
* @param filename file containing points in global coordinates
|
||||
*/
|
||||
public void setOuterROI(String filename) {
|
||||
List<Vector3D> points = readPointsFromFile(filename);
|
||||
roiOuter = createOutline(points);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct an outline on the local grid from a list of points in global coordinates.
|
||||
*
|
||||
* @param points points in global coordinates
|
||||
* @return outline on local grid
|
||||
*/
|
||||
private Path2D.Double createOutline(List<Vector3D> points) {
|
||||
|
||||
Path2D.Double outline = new Path2D.Double();
|
||||
for (int i = 0; i < points.size(); i++) {
|
||||
Vector3D nativeIJK = plane.globalToLocal(points.get(i));
|
||||
Vector3D localIJK =
|
||||
nativeToLocal.getKey().applyTo(nativeIJK.subtract(nativeToLocal.getValue()));
|
||||
if (i == 0) {
|
||||
outline.moveTo(localIJK.getX(), localIJK.getY());
|
||||
} else {
|
||||
outline.lineTo(localIJK.getX(), localIJK.getY());
|
||||
}
|
||||
}
|
||||
outline.closePath();
|
||||
|
||||
return outline;
|
||||
}
|
||||
|
||||
public DifferentialVolumeEstimator(vtkPolyData polyData) {
|
||||
this.globalPolyData = polyData;
|
||||
this.referencePolyData = null;
|
||||
this.referencePoints = null;
|
||||
this.numProfiles = 0;
|
||||
this.radialUp = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the height of the shape model above the reference plane
|
||||
*
|
||||
* @param x in native coordinates
|
||||
* @param y in native coordinates
|
||||
* @return height, or {@link Double#NaN} if no intersection found
|
||||
*/
|
||||
public double getHeight(double x, double y) {
|
||||
|
||||
double height = Double.NaN;
|
||||
double[] origin = {x, y, 0};
|
||||
double[] direction = {0, 0, 1};
|
||||
double[] intersect = new double[3];
|
||||
|
||||
long cellID = nativeSBM.computeRayIntersection(origin, direction, intersect);
|
||||
if (cellID < 0) {
|
||||
direction[2] = -1;
|
||||
cellID = nativeSBM.computeRayIntersection(origin, direction, intersect);
|
||||
}
|
||||
if (cellID >= 0)
|
||||
height = direction[2] * new Vector3D(origin).distance(new Vector3D(intersect));
|
||||
|
||||
if (Double.isNaN(height))
|
||||
return Double.NaN;
|
||||
|
||||
return height;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the height of the reference surface above the reference plane.
|
||||
*
|
||||
* @param x in native coordinates
|
||||
* @param y in native coordinates
|
||||
* @return height of surface at (x,y) above reference plane
|
||||
*/
|
||||
public double getRefHeight(double x, double y) {
|
||||
|
||||
double[] origin = {x, y, 0};
|
||||
double[] direction = {0, 0, 1};
|
||||
double[] intersect;
|
||||
|
||||
double refHeight = Double.NaN;
|
||||
if (referenceSBM != null) {
|
||||
intersect = new double[3];
|
||||
|
||||
long cellID = referenceSBM.computeRayIntersection(origin, direction, intersect);
|
||||
if (cellID < 0) {
|
||||
direction[2] = -1;
|
||||
cellID = referenceSBM.computeRayIntersection(origin, direction, intersect);
|
||||
}
|
||||
if (cellID >= 0) {
|
||||
refHeight = direction[2] * new Vector3D(origin).distance(new Vector3D(intersect));
|
||||
}
|
||||
} else {
|
||||
refHeight = referenceSurface.value(x, y);
|
||||
}
|
||||
|
||||
if (Double.isNaN(refHeight))
|
||||
return Double.NaN;
|
||||
|
||||
return refHeight;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an array of grid points with heights
|
||||
*
|
||||
* @param gridHalfExtent half-size of grid
|
||||
* @param gridSpacing spacing between points
|
||||
* @return grid points sorted by x coordinate, then y
|
||||
*/
|
||||
private NavigableSet<GridPoint> createGrid(double gridHalfExtent, double gridSpacing) {
|
||||
Set<Vector3D> localGrid = new HashSet<>();
|
||||
|
||||
for (double x = 0; x <= gridHalfExtent; x += gridSpacing) {
|
||||
for (double y = 0; y <= gridHalfExtent; y += gridSpacing) {
|
||||
localGrid.add(new Vector3D(x, y, 0));
|
||||
if (y != 0)
|
||||
localGrid.add(new Vector3D(x, -y, 0));
|
||||
if (x != 0)
|
||||
localGrid.add(new Vector3D(-x, y, 0));
|
||||
if (x != 0 && y != 0)
|
||||
localGrid.add(new Vector3D(-x, -y, 0));
|
||||
}
|
||||
}
|
||||
this.gridSpacing = gridSpacing;
|
||||
this.gridHalfExtent=gridHalfExtent;
|
||||
|
||||
GridPoint highGridPoint = null;
|
||||
GridPoint lowGridPoint = null;
|
||||
NavigableSet<GridPoint> gridPoints = new TreeSet<>();
|
||||
for (Vector3D localPoint : localGrid) {
|
||||
GridPoint gp = new GridPoint(localPoint);
|
||||
gridPoints.add(gp);
|
||||
if (Double.isFinite(gp.height)) {
|
||||
if (highGridPoint == null || highGridPoint.height < gp.height)
|
||||
highGridPoint = gp;
|
||||
if (lowGridPoint == null || lowGridPoint.height > gp.height)
|
||||
lowGridPoint = gp;
|
||||
}
|
||||
}
|
||||
|
||||
if (highGridPoint != null)
|
||||
highPoint = highGridPoint.globalIJK;
|
||||
if (lowGridPoint != null)
|
||||
lowPoint = lowGridPoint.globalIJK;
|
||||
|
||||
return gridPoints;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the reference surface, either from a fit to a set of points or from an input shape
|
||||
* model.
|
||||
*
|
||||
* @param localOriginInGlobalCoordinates local origin in global coordinates
|
||||
*/
|
||||
public void createReference(Vector3D localOriginInGlobalCoordinates) {
|
||||
|
||||
double[] pt = new double[3];
|
||||
|
||||
if (referencePolyData != null) {
|
||||
if (referencePoints != null) {
|
||||
logger.warn(
|
||||
"Both -referenceList and -referenceShape were specified. Reference surface will be set to argument of -referenceShape.");
|
||||
}
|
||||
referencePoints = new ArrayList<>();
|
||||
for (int i = 0; i < referencePolyData.GetNumberOfPoints(); i++) {
|
||||
referencePolyData.GetPoint(i, pt);
|
||||
referencePoints.add(new Vector3D(pt));
|
||||
}
|
||||
}
|
||||
|
||||
// this is the best fit plane to the reference points. It can convert points in input
|
||||
// (global) coordinates to native coordinates and vice versa
|
||||
plane = new FitPlane(referencePoints);
|
||||
|
||||
// set the +Z direction for the local plane
|
||||
Vector3D referenceNormal = radialUp ? plane.getTransform().getValue() : Vector3D.PLUS_K;
|
||||
|
||||
// check if the plane normal is pointing in the same direction as the reference normal. If not,
|
||||
// flip the plane
|
||||
Pair<Rotation, Vector3D> transform = plane.getTransform();
|
||||
Vector3D planeNormal = transform.getKey().applyInverseTo(referenceNormal);
|
||||
if (planeNormal.dotProduct(referenceNormal) < 0)
|
||||
plane = plane.reverseNormal();
|
||||
|
||||
// create the SmallBodyModel for the shape to evaluate
|
||||
vtkPolyData nativePolyData = new vtkPolyData();
|
||||
nativePolyData.DeepCopy(globalPolyData);
|
||||
vtkPoints points = nativePolyData.GetPoints();
|
||||
for (int i = 0; i < points.GetNumberOfPoints(); i++) {
|
||||
points.GetPoint(i, pt);
|
||||
Vector3D nativePoint = plane.globalToLocal(new Vector3D(pt));
|
||||
double[] data = nativePoint.toArray();
|
||||
points.SetPoint(i, data);
|
||||
}
|
||||
nativeSBM = new SmallBodyModel(nativePolyData);
|
||||
|
||||
// now define the reference shape/surface
|
||||
if (referencePolyData != null) {
|
||||
// create the SmallBodyModel for the reference shape
|
||||
nativePolyData = new vtkPolyData();
|
||||
nativePolyData.DeepCopy(referencePolyData);
|
||||
points = nativePolyData.GetPoints();
|
||||
for (int i = 0; i < points.GetNumberOfPoints(); i++) {
|
||||
points.GetPoint(i, pt);
|
||||
Vector3D nativePoint = plane.globalToLocal(new Vector3D(pt));
|
||||
double[] data = nativePoint.toArray();
|
||||
points.SetPoint(i, data);
|
||||
}
|
||||
|
||||
referenceSBM = new SmallBodyModel(nativePolyData);
|
||||
} else {
|
||||
// create the reference surface
|
||||
List<Vector3D> nativePoints = new ArrayList<>();
|
||||
for (Vector3D v : referencePoints) {
|
||||
Vector3D nativePoint = plane.globalToLocal(v);
|
||||
nativePoints.add(nativePoint);
|
||||
}
|
||||
|
||||
referenceSurface = new FitSurface(nativePoints, POLYNOMIAL_DEGREE);
|
||||
}
|
||||
|
||||
// create a rotation matrix to go from native to local (where the Z axis is the same for both
|
||||
// and the X axis is aligned in the same direction as the global X axis)
|
||||
Pair<Rotation, Vector3D> globalToLocalTransform = plane.getTransform();
|
||||
Vector3D kRow = Vector3D.PLUS_K;
|
||||
Vector3D iRow = globalToLocalTransform.getKey().applyTo(Vector3D.PLUS_I);
|
||||
Vector3D jRow = Vector3D.crossProduct(kRow, iRow).normalize();
|
||||
kRow = Vector3D.crossProduct(iRow, jRow).normalize();
|
||||
iRow = iRow.normalize();
|
||||
|
||||
Vector3D translateNativeToLocal = Vector3D.ZERO;
|
||||
if (localOriginInGlobalCoordinates.getNorm() > 0) {
|
||||
// translation to go from native to local (where localOriginInGlobalCoordinates defines 0,0 in
|
||||
// the local frame)
|
||||
Vector3D nativeOriginInGlobalCoordinates = plane.localToGlobal(Vector3D.ZERO);
|
||||
Vector3D translateNativeToLocalInGlobalCoordinates =
|
||||
localOriginInGlobalCoordinates.subtract(nativeOriginInGlobalCoordinates);
|
||||
// TODO: check that the Z component is zero (it should be?)
|
||||
translateNativeToLocal =
|
||||
globalToLocalTransform.getKey().applyTo(translateNativeToLocalInGlobalCoordinates);
|
||||
}
|
||||
|
||||
Rotation rotateNativeToLocal =
|
||||
MathConversions.toRotation(new RotationMatrixIJK(iRow.getX(), jRow.getX(), kRow.getX(),
|
||||
iRow.getY(), jRow.getY(), kRow.getY(), iRow.getZ(), jRow.getZ(), kRow.getZ()));
|
||||
|
||||
this.nativeToLocal = new AbstractMap.SimpleEntry<>(rotateNativeToLocal, translateNativeToLocal);
|
||||
}
|
||||
|
||||
/**
|
||||
* The header for grid and profile CSV files. Each line begins with a #
|
||||
*
|
||||
* @param header string at beginning of header
|
||||
* @return complete header
|
||||
*/
|
||||
public static String getHeader(String header) {
|
||||
StringBuffer sb = new StringBuffer();
|
||||
sb.append(header);
|
||||
sb.append("# Local X and Y are grid coordinates in the local reference frame\n");
|
||||
sb.append("# Angle is measured from the local X axis, in degrees\n");
|
||||
sb.append("# ROI flag is 1 if point is in the region of interest, 0 if not\n");
|
||||
sb.append("# Global X, Y, and Z are the local grid points in the global "
|
||||
+ " (input) reference system\n");
|
||||
sb.append("# Reference Height is the height of the reference model (or fit surface) above "
|
||||
+ "the local grid plane\n");
|
||||
sb.append("# Model Height is the height of the shape model above the local grid plane. "
|
||||
+ "NaN means there is no model intersection at this grid point.\n");
|
||||
sb.append("# Bin volume is the grid cell area times the model - reference height\n");
|
||||
sb.append("#\n");
|
||||
sb.append(String.format("%s, ", "Local X"));
|
||||
sb.append(String.format("%s, ", "Local Y"));
|
||||
sb.append(String.format("%s, ", "Angle"));
|
||||
sb.append(String.format("%s, ", "ROI Flag"));
|
||||
sb.append(String.format("%s, ", "Global X"));
|
||||
sb.append(String.format("%s, ", "Global Y"));
|
||||
sb.append(String.format("%s, ", "Global Z"));
|
||||
sb.append(String.format("%s, ", "Reference Height"));
|
||||
sb.append(String.format("%s, ", "Model Height"));
|
||||
sb.append(String.format("%s, ", "Model - Reference"));
|
||||
sb.append(String.format("%s", "Bin Volume"));
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The header for the sector CSV file. Each line begins with a #
|
||||
*
|
||||
* @param header string at beginning of header
|
||||
* @return complete header
|
||||
*/
|
||||
public static String getSectorHeader(String header) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(header);
|
||||
sb.append("# Angle is measured from the local X axis, in degrees\n");
|
||||
sb.append(
|
||||
"# Sector volume is the grid cell area times the model - reference height summed over all grid cells in the ROI.\n");
|
||||
sb.append("#\n");
|
||||
sb.append(String.format("%s, ", "Index"));
|
||||
sb.append(String.format("%s, ", "Start angle (degrees)"));
|
||||
sb.append(String.format("%s, ", "Stop angle (degrees)"));
|
||||
sb.append(String.format("%s, ", "Sector Volume above reference surface"));
|
||||
sb.append(String.format("%s, ", "Sector Volume below reference surface"));
|
||||
sb.append(String.format("%s", "Total Sector Volume"));
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private String toCSV(GridPoint gp) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
sb.append(String.format("%f, ", gp.localIJK.getX()));
|
||||
sb.append(String.format("%f, ", gp.localIJK.getY()));
|
||||
|
||||
double angle = Math.toDegrees(Math.atan2(gp.localIJK.getY(), gp.localIJK.getX()));
|
||||
if (angle < 0)
|
||||
angle += 360;
|
||||
sb.append(String.format("%f, ", angle));
|
||||
sb.append(String.format("%d, ", isInsideROI(gp.localIJK) ? 1 : 0));
|
||||
|
||||
sb.append(String.format("%f, ", gp.globalIJK.getX()));
|
||||
sb.append(String.format("%f, ", gp.globalIJK.getY()));
|
||||
sb.append(String.format("%f, ", gp.globalIJK.getZ()));
|
||||
|
||||
sb.append(String.format("%g, ", gp.referenceHeight));
|
||||
sb.append(String.format("%g, ", gp.height));
|
||||
sb.append(String.format("%g, ", gp.differentialHeight));
|
||||
sb.append(String.format("%g", gridSpacing * gridSpacing * gp.differentialHeight));
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param localIJ Point on the local grid. The Z coordinate is ignored
|
||||
* @return true if the point is inside the outer boundary and outside the inner boundary. If the
|
||||
* outer boundary is null then all points are considered to be inside the outer boundary. If the
|
||||
* inner boundary is null all points are considered to be outside the inner boundary.
|
||||
*/
|
||||
private boolean isInsideROI(Vector3D localIJ) {
|
||||
Point2D thisPoint = new Point2D.Double(localIJ.getX(), localIJ.getY());
|
||||
boolean insideROI = roiOuter == null || roiOuter.contains(thisPoint);
|
||||
if (roiInner != null) {
|
||||
if (insideROI && roiInner.contains(thisPoint))
|
||||
insideROI = false;
|
||||
}
|
||||
return insideROI;
|
||||
}
|
||||
|
||||
/**
|
||||
* Write out a VTK file with the local grid points. Useful for a sanity check.
|
||||
*
|
||||
* @param gridPointsList grid points
|
||||
* @param profilesMap grid points along each profile
|
||||
* @param sectorsMap grid points within each sector
|
||||
* @param vtkFile file to write
|
||||
*/
|
||||
private void writeReferenceVTK(Collection<GridPoint> gridPointsList,
|
||||
Map<Integer, Collection<GridPoint>> profilesMap,
|
||||
Map<Integer, Collection<GridPoint>> sectorsMap, String vtkFile) {
|
||||
|
||||
Map<Vector3D, Boolean> roiMap = new HashMap<>();
|
||||
Map<Vector3D, Integer> profileMap = new HashMap<>();
|
||||
Map<Vector3D, Integer> sectorMap = new HashMap<>();
|
||||
|
||||
for (GridPoint gp : gridPointsList) {
|
||||
Vector3D localIJK = gp.localIJK;
|
||||
Vector3D nativeIJK =
|
||||
nativeToLocal.getKey().applyInverseTo(localIJK).add(nativeToLocal.getValue());
|
||||
Vector3D globalIJK = plane.localToGlobal(nativeIJK);
|
||||
roiMap.put(globalIJK, isInsideROI(gp.localIJK));
|
||||
profileMap.put(globalIJK, 0);
|
||||
sectorMap.put(globalIJK, 0);
|
||||
}
|
||||
|
||||
for (int i : profilesMap.keySet()) {
|
||||
for (GridPoint gp : profilesMap.get(i)) {
|
||||
Vector3D localIJK = gp.localIJK;
|
||||
Vector3D nativeIJK =
|
||||
nativeToLocal.getKey().applyInverseTo(localIJK).add(nativeToLocal.getValue());
|
||||
Vector3D globalIJK = plane.localToGlobal(nativeIJK);
|
||||
profileMap.put(globalIJK, i + 1);
|
||||
}
|
||||
}
|
||||
|
||||
for (int i : sectorsMap.keySet()) {
|
||||
for (GridPoint gp : sectorsMap.get(i)) {
|
||||
Vector3D localIJK = gp.localIJK;
|
||||
Vector3D nativeIJK =
|
||||
nativeToLocal.getKey().applyInverseTo(localIJK).add(nativeToLocal.getValue());
|
||||
Vector3D globalIJK = plane.localToGlobal(nativeIJK);
|
||||
sectorMap.put(globalIJK, i + 1);
|
||||
}
|
||||
}
|
||||
|
||||
vtkDoubleArray insideROI = new vtkDoubleArray();
|
||||
insideROI.SetName("Inside ROI");
|
||||
|
||||
vtkDoubleArray profiles = new vtkDoubleArray();
|
||||
profiles.SetName("Profiles");
|
||||
|
||||
vtkDoubleArray sectors = new vtkDoubleArray();
|
||||
sectors.SetName("Sectors");
|
||||
|
||||
vtkPoints pointsXYZ = new vtkPoints();
|
||||
for (Vector3D point : roiMap.keySet()) {
|
||||
double[] array = point.toArray();
|
||||
pointsXYZ.InsertNextPoint(array);
|
||||
insideROI.InsertNextValue(roiMap.get(point) ? 1 : 0);
|
||||
profiles.InsertNextValue(profileMap.get(point));
|
||||
sectors.InsertNextValue(sectorMap.get(point));
|
||||
}
|
||||
|
||||
vtkPolyData polyData = new vtkPolyData();
|
||||
polyData.SetPoints(pointsXYZ);
|
||||
polyData.GetPointData().AddArray(insideROI);
|
||||
polyData.GetPointData().AddArray(profiles);
|
||||
polyData.GetPointData().AddArray(sectors);
|
||||
|
||||
vtkCellArray cells = new vtkCellArray();
|
||||
polyData.SetPolys(cells);
|
||||
|
||||
for (int i = 0; i < pointsXYZ.GetNumberOfPoints(); i++) {
|
||||
vtkIdList idList = new vtkIdList();
|
||||
idList.InsertNextId(i);
|
||||
cells.InsertNextCell(idList);
|
||||
}
|
||||
|
||||
vtkPolyDataWriter writer = new vtkPolyDataWriter();
|
||||
writer.SetInputData(polyData);
|
||||
writer.SetFileName(vtkFile);
|
||||
writer.SetFileTypeToBinary();
|
||||
writer.Update();
|
||||
}
|
||||
|
||||
/**
|
||||
* Write the local grid out to a file
|
||||
*
|
||||
* @param gridPoints grid points
|
||||
* @param header file header
|
||||
* @param outputBasename CSV file to write
|
||||
*/
|
||||
private void writeGridCSV(Collection<GridPoint> gridPoints, String header, String outputBasename) {
|
||||
String csvFile = outputBasename + "_grid.csv";
|
||||
|
||||
try (PrintWriter pw = new PrintWriter(csvFile)) {
|
||||
pw.println(getHeader(header));
|
||||
for (GridPoint gp : gridPoints)
|
||||
pw.println(toCSV(gp));
|
||||
} catch (FileNotFoundException e) {
|
||||
logger.warn("Can't write " + csvFile);
|
||||
logger.warn(e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write profiles to file
|
||||
*
|
||||
* @param gridPoints grid points
|
||||
* @param header file header
|
||||
* @param outputBasename CSV file to write
|
||||
*/
|
||||
private Map<Integer, Collection<GridPoint>> writeProfileCSV(Collection<GridPoint> gridPoints,
|
||||
String header, String outputBasename) {
|
||||
Map<Integer, Collection<GridPoint>> profileMap = new HashMap<>();
|
||||
|
||||
if (numProfiles == 0)
|
||||
return profileMap;
|
||||
|
||||
// sort grid points into radial bins
|
||||
NavigableMap<Integer, Set<GridPoint>> radialMap = new TreeMap<>();
|
||||
for (GridPoint gp : gridPoints) {
|
||||
double radius = gp.localIJK.getNorm() / gridSpacing;
|
||||
int key = (int) radius;
|
||||
Set<GridPoint> set = radialMap.computeIfAbsent(key, k -> new HashSet<>());
|
||||
set.add(gp);
|
||||
}
|
||||
|
||||
final double deltaAngle = 2 * Math.PI / numProfiles;
|
||||
for (int i = 0; i < numProfiles; i++) {
|
||||
|
||||
Collection<GridPoint> profileGridPoints = new HashSet<>();
|
||||
profileMap.put(i, profileGridPoints);
|
||||
|
||||
double angle = deltaAngle * i;
|
||||
String csvFile = String.format("%s_profile_%03d.csv", outputBasename,
|
||||
(int) Math.round(Math.toDegrees(angle)));
|
||||
|
||||
try (PrintWriter pw = new PrintWriter(csvFile)) {
|
||||
pw.println(getHeader(header));
|
||||
for (int bin : radialMap.keySet()) {
|
||||
|
||||
// stop profile at grid edge
|
||||
double thisX = Math.abs(Math.cos(angle) * bin) * gridSpacing;
|
||||
if (thisX > gridHalfExtent) continue;
|
||||
double thisY = Math.abs(Math.sin(angle) * bin) * gridSpacing;
|
||||
if (thisY > gridHalfExtent) continue;
|
||||
|
||||
// sort points in this radial bin by angular distance from profile angle
|
||||
NavigableSet<GridPoint> sortedByAngle = new TreeSet<>((o1, o2) -> {
|
||||
double angle1 = Math.atan2(o1.localIJK.getY(), o1.localIJK.getX());
|
||||
if (angle1 < 0)
|
||||
angle1 += 2 * Math.PI;
|
||||
double angle2 = Math.atan2(o2.localIJK.getY(), o2.localIJK.getX());
|
||||
if (angle2 < 0)
|
||||
angle2 += 2 * Math.PI;
|
||||
return Double.compare(Math.abs(angle1 - angle), Math.abs(angle2 - angle));
|
||||
});
|
||||
|
||||
sortedByAngle.addAll(radialMap.get(bin));
|
||||
|
||||
pw.println(toCSV(sortedByAngle.first()));
|
||||
GridPoint thisPoint = sortedByAngle.first();
|
||||
if (Double.isFinite(thisPoint.differentialHeight))
|
||||
profileGridPoints.add(thisPoint);
|
||||
}
|
||||
} catch (FileNotFoundException e) {
|
||||
logger.warn("Can't write {}", csvFile);
|
||||
logger.warn(e.getLocalizedMessage());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return profileMap;
|
||||
}
|
||||
|
||||
/**
|
||||
* Write sector volumes to a file
|
||||
*
|
||||
* @param gridPoints grid points
|
||||
* @param header file header
|
||||
* @param outputBasename CSV file to write
|
||||
*/
|
||||
private Map<Integer, Collection<GridPoint>> writeSectorCSV(Collection<GridPoint> gridPoints,
|
||||
String header, String outputBasename) {
|
||||
|
||||
// grid points in each sector
|
||||
Map<Integer, Collection<GridPoint>> sectorMap = new HashMap<>();
|
||||
|
||||
if (numProfiles == 0)
|
||||
return sectorMap;
|
||||
|
||||
String csvFile = outputBasename + "_sector.csv";
|
||||
|
||||
NavigableMap<Double, Double> aboveMap = new TreeMap<>();
|
||||
NavigableMap<Double, Double> belowMap = new TreeMap<>();
|
||||
final double deltaAngle = 2 * Math.PI / numProfiles;
|
||||
for (int i = 0; i < numProfiles; i++) {
|
||||
aboveMap.put(i * deltaAngle, 0.);
|
||||
belowMap.put(i * deltaAngle, 0.);
|
||||
}
|
||||
|
||||
// run through all the grid points and put them in the appropriate sector
|
||||
double gridCellArea = gridSpacing * gridSpacing;
|
||||
for (GridPoint gp : gridPoints) {
|
||||
Vector3D localIJK = gp.localIJK;
|
||||
double azimuth = Math.atan2(localIJK.getY(), localIJK.getX());
|
||||
if (azimuth < 0)
|
||||
azimuth += 2 * Math.PI;
|
||||
double key = aboveMap.floorKey(azimuth);
|
||||
|
||||
int sector = (int) (key / deltaAngle);
|
||||
Collection<GridPoint> sectorGridPoints = sectorMap.computeIfAbsent(sector, k -> new HashSet<>());
|
||||
|
||||
if (isInsideROI(gp.localIJK)) {
|
||||
double dv = gridCellArea * gp.differentialHeight;
|
||||
if (Double.isFinite(dv)) {
|
||||
if (dv > 0) {
|
||||
aboveMap.compute(key, (k, value) -> value + dv);
|
||||
} else {
|
||||
belowMap.compute(key, (k, value) -> value + dv);
|
||||
}
|
||||
sectorGridPoints.add(gp);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try (PrintWriter pw = new PrintWriter(csvFile)) {
|
||||
pw.println(getSectorHeader(header));
|
||||
for (double azimuth : aboveMap.keySet()) {
|
||||
StringBuffer sb = new StringBuffer();
|
||||
sb.append(String.format("%d, ", (int) (azimuth / deltaAngle)));
|
||||
sb.append(String.format("%.2f, ", Math.toDegrees(azimuth)));
|
||||
sb.append(String.format("%.2f, ", Math.toDegrees(azimuth + deltaAngle)));
|
||||
sb.append(String.format("%e, ", aboveMap.get(azimuth)));
|
||||
sb.append(String.format("%e, ", belowMap.get(azimuth)));
|
||||
sb.append(String.format("%e", aboveMap.get(azimuth) + belowMap.get(azimuth)));
|
||||
pw.println(sb);
|
||||
}
|
||||
|
||||
} catch (FileNotFoundException e) {
|
||||
logger.warn("Can't write " + csvFile);
|
||||
logger.warn(e.getLocalizedMessage());
|
||||
}
|
||||
|
||||
return sectorMap;
|
||||
|
||||
}
|
||||
|
||||
private class GridPoint implements Comparable<GridPoint> {
|
||||
Vector3D localIJK;
|
||||
Vector3D globalIJK;
|
||||
double referenceHeight;
|
||||
double height;
|
||||
double differentialHeight;
|
||||
|
||||
/**
|
||||
* Create a grid point from an input location in local coordinates
|
||||
*
|
||||
* @param xy point in local coordinates. Z value is ignored.
|
||||
*/
|
||||
public GridPoint(Vector3D xy) {
|
||||
this.localIJK = xy;
|
||||
Vector3D nativeIJK =
|
||||
nativeToLocal.getKey().applyInverseTo(localIJK).add(nativeToLocal.getValue());
|
||||
globalIJK = plane.localToGlobal(nativeIJK);
|
||||
referenceHeight = getRefHeight(nativeIJK.getX(), nativeIJK.getY());
|
||||
height = getHeight(nativeIJK.getX(), nativeIJK.getY());
|
||||
differentialHeight = height - referenceHeight;
|
||||
}
|
||||
|
||||
/**
|
||||
* sort by the x coordinate on the local grid, then by the y coordinate.
|
||||
*/
|
||||
@Override
|
||||
public int compareTo(GridPoint o) {
|
||||
int compare = Double.compare(localIJK.getX(), o.localIJK.getX());
|
||||
if (compare == 0)
|
||||
compare = Double.compare(localIJK.getY(), o.localIJK.getY());
|
||||
return compare;
|
||||
}
|
||||
}
|
||||
|
||||
private static List<Vector3D> readPointsFromFile(String filename) {
|
||||
List<Vector3D> points = new ArrayList<>();
|
||||
|
||||
try {
|
||||
if (FilenameUtils.getExtension(filename).equalsIgnoreCase("vtk")) {
|
||||
|
||||
vtkPolyData polydata = PolyDataUtil.loadShapeModel(filename);
|
||||
double[] pt = new double[3];
|
||||
for (int i = 0; i < polydata.GetNumberOfPoints(); i++) {
|
||||
polydata.GetPoint(i, pt);
|
||||
points.add(new Vector3D(pt));
|
||||
}
|
||||
} else {
|
||||
List<String> lines = FileUtils.readLines(new File(filename), Charset.defaultCharset());
|
||||
for (String line : lines) {
|
||||
if (line.trim().isEmpty() || line.trim().startsWith("#"))
|
||||
continue;
|
||||
SBMTStructure structure = SBMTStructure.fromString(line);
|
||||
points.add(structure.centerXYZ());
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.warn(e.getLocalizedMessage());
|
||||
}
|
||||
return points;
|
||||
}
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(Option.builder("gridExtent").required().hasArg().desc(
|
||||
"Required. Size of local grid, in same units as shape model and reference surface. Grid is assumed to be square.")
|
||||
.build());
|
||||
options.addOption(Option.builder("gridSpacing").required().hasArg()
|
||||
.desc(
|
||||
"Required. Spacing of local grid, in same units as shape model and reference surface.")
|
||||
.build());
|
||||
options.addOption(Option.builder("logFile").hasArg()
|
||||
.desc("If present, save screen output to log file.").build());
|
||||
options.addOption(Option.builder("logLevel").hasArg()
|
||||
.desc("If present, print messages above selected priority. Valid values are "
|
||||
+ "ALL, OFF, SEVERE, WARNING, INFO, CONFIG, FINE, FINER, or FINEST. Default is INFO.")
|
||||
.build());
|
||||
options.addOption(Option.builder("numProfiles").hasArg().desc(
|
||||
"Number of radial profiles to create. Profiles are evenly spaced in degrees and evaluated "
|
||||
+ "at intervals of gridSpacing in the radial direction.")
|
||||
.build());
|
||||
options.addOption(Option.builder("origin").hasArg()
|
||||
.desc("If present, set origin of local coordinate system. "
|
||||
+ "Options are MAX_HEIGHT (set to maximum elevation of the shape model), "
|
||||
+ "MIN_HEIGHT (set to minimum elevation of the shape model), "
|
||||
+ "or a three element vector specifying the desired origin, comma separated, no spaces (e.g. 11.45,-45.34,0.932).")
|
||||
.build());
|
||||
options.addOption(Option.builder("output").hasArg().required().desc(
|
||||
"Basename of output files. Files will be named ${output}_grid.csv for the grid, ${output}_sector.csv for the sectors, "
|
||||
+ "and ${output}_profile_${degrees}.csv for profiles.")
|
||||
.build());
|
||||
options.addOption(Option.builder("radialUp")
|
||||
.desc("Specify +Z direction of local coordinate system to be in the radial "
|
||||
+ "direction. Default is to align local +Z along global +Z.")
|
||||
.build());
|
||||
options.addOption(Option.builder("referenceList").hasArg().desc(
|
||||
"File containing reference points. If the file extension is .vtk it is read as a VTK file, "
|
||||
+ "otherwise it is assumed to be an SBMT structure file.")
|
||||
.build());
|
||||
options.addOption(Option.builder("referenceShape").hasArg().desc("Reference shape.").build());
|
||||
options.addOption(Option.builder("referenceVTK").hasArg()
|
||||
.desc("If present, write out a VTK file with the reference surface at each grid point. "
|
||||
+ "If an ROI is defined color points inside/outside the boundaries.")
|
||||
.build());
|
||||
options.addOption(Option.builder("roiInner").hasArg().desc(
|
||||
"Flag points closer to the origin than this as outside the ROI. Supported formats are the same as referenceList.")
|
||||
.build());
|
||||
options.addOption(Option.builder("roiOuter").hasArg().desc(
|
||||
"Flag points closer to the origin than this as outside the ROI. Supported formats are the same as referenceList.")
|
||||
.build());
|
||||
options.addOption(Option.builder("shapeModel").hasArg().required()
|
||||
.desc("Shape model for volume computation.").build()); return options;
|
||||
}
|
||||
|
||||
|
||||
public static void main(String[] args) {
|
||||
TerrasaurTool defaultOBJ = new DifferentialVolumeEstimator();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
StringBuilder header = new StringBuilder();
|
||||
header.append("# ").append(new Date()).append("\n");
|
||||
header.append("# ").append(defaultOBJ.getClass().getSimpleName()).append(" [").append(AppVersion.getVersionString()).append("]\n");
|
||||
header.append("# ").append(startupMessages.get(MessageLabel.ARGUMENTS)).append("\n");
|
||||
|
||||
NativeLibraryLoader.loadVtkLibraries();
|
||||
|
||||
double gridHalfExtent = Double.parseDouble(cl.getOptionValue("gridExtent")) / 2;
|
||||
double gridSpacing = Double.parseDouble(cl.getOptionValue("gridSpacing"));
|
||||
|
||||
String outputBasename = cl.getOptionValue("output");
|
||||
String dirName = FilenameUtils.getFullPath(outputBasename);
|
||||
if (!dirName.trim().isEmpty()) {
|
||||
File dir = new File(dirName);
|
||||
if (!dir.exists())
|
||||
dir.mkdirs();
|
||||
}
|
||||
|
||||
vtkPolyData polyData = null;
|
||||
try {
|
||||
polyData = PolyDataUtil.loadShapeModel(cl.getOptionValue("shapeModel"));
|
||||
} catch (Exception e) {
|
||||
logger.error("Cannot load shape model!");
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
|
||||
ORIGIN originType = ORIGIN.DEFAULT;
|
||||
Vector3D localOrigin = Vector3D.ZERO;
|
||||
if (cl.hasOption("origin")) {
|
||||
String originString = cl.getOptionValue("origin");
|
||||
if (originString.contains(",")) {
|
||||
String[] parts = originString.split(",");
|
||||
if (parts.length == 3) {
|
||||
localOrigin = new Vector3D(Double.parseDouble(parts[0].trim()),
|
||||
Double.parseDouble(parts[1].trim()), Double.parseDouble(parts[2].trim()));
|
||||
originType = ORIGIN.CUSTOM;
|
||||
}
|
||||
} else {
|
||||
originType = ORIGIN.valueOf(originString.toUpperCase());
|
||||
}
|
||||
}
|
||||
DifferentialVolumeEstimator app = new DifferentialVolumeEstimator(polyData);
|
||||
|
||||
if (cl.hasOption("numProfiles"))
|
||||
app.setNumProfiles(Integer.parseInt(cl.getOptionValue("numProfiles")));
|
||||
|
||||
if (cl.hasOption("radialUp"))
|
||||
app.setRadialUp(true);
|
||||
|
||||
if (cl.hasOption("referenceShape")) {
|
||||
try {
|
||||
app.setReferencePolyData(PolyDataUtil.loadShapeModel(cl.getOptionValue("referenceShape")));
|
||||
} catch (Exception e) {
|
||||
logger.error("Cannot load reference shape model!");
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
}
|
||||
if (cl.hasOption("referenceList"))
|
||||
app.setReferencePoints(readPointsFromFile(cl.getOptionValue("referenceList")));
|
||||
|
||||
app.createReference(localOrigin);
|
||||
|
||||
// Shift the origin if needed
|
||||
switch (originType) {
|
||||
case CUSTOM:
|
||||
case DEFAULT:
|
||||
break;
|
||||
case MAX_HEIGHT:
|
||||
app.createGrid(gridHalfExtent, 0.1 * gridSpacing);
|
||||
localOrigin = app.highPoint;
|
||||
app.createReference(localOrigin);
|
||||
break;
|
||||
case MIN_HEIGHT:
|
||||
app.createGrid(gridHalfExtent, 0.1 * gridSpacing);
|
||||
localOrigin = app.lowPoint;
|
||||
app.createReference(localOrigin);
|
||||
break;
|
||||
}
|
||||
|
||||
if (cl.hasOption("roiInner"))
|
||||
app.setInnerROI(cl.getOptionValue("roiInner"));
|
||||
|
||||
if (cl.hasOption("roiOuter"))
|
||||
app.setOuterROI(cl.getOptionValue("roiOuter"));
|
||||
|
||||
NavigableSet<GridPoint> gridPoints = app.createGrid(gridHalfExtent, gridSpacing);
|
||||
|
||||
app.writeGridCSV(gridPoints, header.toString(), outputBasename);
|
||||
Map<Integer, Collection<GridPoint>> profileMap =
|
||||
app.writeProfileCSV(gridPoints, header.toString(), outputBasename);
|
||||
Map<Integer, Collection<GridPoint>> sectorMap =
|
||||
app.writeSectorCSV(gridPoints, header.toString(), outputBasename);
|
||||
|
||||
if (cl.hasOption("referenceVTK")) {
|
||||
app.writeReferenceVTK(gridPoints, profileMap, sectorMap, cl.getOptionValue("referenceVTK"));
|
||||
}
|
||||
|
||||
logger.info("Finished.");
|
||||
}
|
||||
}
|
||||
76
src/main/java/terrasaur/apps/DumpConfig.java
Normal file
@@ -0,0 +1,76 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.nio.file.Paths;
|
||||
import java.time.Instant;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.Locale;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.configuration2.PropertiesConfiguration;
|
||||
import org.apache.commons.configuration2.PropertiesConfigurationLayout;
|
||||
import org.apache.commons.configuration2.ex.ConfigurationException;
|
||||
import org.apache.commons.text.WordUtils;
|
||||
import terrasaur.config.ConfigBlock;
|
||||
import terrasaur.config.ConfigBlockFactory;
|
||||
import terrasaur.config.TerrasaurConfig;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.AppVersion;
|
||||
|
||||
public class DumpConfig implements TerrasaurTool {
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Write a sample configuration file to use with Terrasaur, using defaults for DART.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
return WordUtils.wrap(
|
||||
"This program writes out sample configuration files to be used with Terrasaur. "
|
||||
+ "It takes a single argument, which is the name of the directory that will contain "
|
||||
+ "the configuration files to be written.",
|
||||
80);
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
// if no arguments, print the usage and exit
|
||||
if (args.length == 0) {
|
||||
System.out.println(new DumpConfig().fullDescription(null));
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
// if -shortDescription is specified, print short description and exit.
|
||||
for (String arg : args) {
|
||||
if (arg.equals("-shortDescription")) {
|
||||
System.out.println(new DumpConfig().shortDescription());
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
File path = Paths.get(args[0]).toFile();
|
||||
|
||||
ConfigBlock configBlock = TerrasaurConfig.getTemplate();
|
||||
try (PrintWriter pw = new PrintWriter(path)) {
|
||||
PropertiesConfiguration config = new ConfigBlockFactory().toConfig(configBlock);
|
||||
PropertiesConfigurationLayout layout = config.getLayout();
|
||||
|
||||
String now =
|
||||
DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")
|
||||
.withLocale(Locale.getDefault())
|
||||
.withZone(ZoneOffset.UTC)
|
||||
.format(Instant.now());
|
||||
layout.setHeaderComment(
|
||||
String.format(
|
||||
"Configuration file for %s\nCreated %s UTC", AppVersion.getVersionString(), now));
|
||||
|
||||
config.write(pw);
|
||||
} catch (ConfigurationException | IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
System.out.println("Wrote config file to " + path.getAbsolutePath());
|
||||
}
|
||||
}
|
||||
626
src/main/java/terrasaur/apps/GetSpots.java
Normal file
@@ -0,0 +1,626 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.PrintStream;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.*;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import spice.basic.AberrationCorrection;
|
||||
import spice.basic.Body;
|
||||
import spice.basic.CSPICE;
|
||||
import spice.basic.FOV;
|
||||
import spice.basic.Instrument;
|
||||
import spice.basic.LatitudinalCoordinates;
|
||||
import spice.basic.Matrix33;
|
||||
import spice.basic.Plane;
|
||||
import spice.basic.PositionVector;
|
||||
import spice.basic.ReferenceFrame;
|
||||
import spice.basic.SCLK;
|
||||
import spice.basic.SCLKTime;
|
||||
import spice.basic.SpiceException;
|
||||
import spice.basic.StateRecord;
|
||||
import spice.basic.TDBTime;
|
||||
import spice.basic.Vector3;
|
||||
import terrasaur.smallBodyModel.SmallBodyModel;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.CellInfo;
|
||||
import terrasaur.utils.NativeLibraryLoader;
|
||||
import terrasaur.utils.PolyDataUtil;
|
||||
import terrasaur.utils.SPICEUtil;
|
||||
import terrasaur.utils.math.MathConversions;
|
||||
import vtk.vtkIdList;
|
||||
import vtk.vtkPolyData;
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* <h2>NAME</h2>
|
||||
*
|
||||
* GetSpots - find relevant OSIRIS-REx data for assigning values to facets in an OSIRIS-REx map
|
||||
* interchange (OBJ) file
|
||||
*
|
||||
* <h2>SYNOPSIS</h2>
|
||||
*
|
||||
* <code>GetSpots --spice <em>spicemetakernel</em> --obj <em>objfile</em> --instype
|
||||
* <em>instrumenttype</em> --sclk <em>sclkfile</em> --maxdist <em>distance</em></code>
|
||||
*
|
||||
* <h2>DESCRIPTION</h2>
|
||||
*
|
||||
* GetSpots identifies those times, listed in <em>sclkfile</em>, when the boresight of
|
||||
* <em>instrumenttype</em> intersects the surface of Bennu less than <em>distance</em> milliradians
|
||||
* from the center of individual facets in the OBJ file described in <em>objfile</em>. Needed spice
|
||||
* files for this calculation are listed in <em>spicemetakernel</em>.
|
||||
*
|
||||
* <p>At the time a three-dimensional map is made, a designated DSK file will be used to create an
|
||||
* OBJ file that act as a framework on which a map will be made. (This is done with the utility
|
||||
* {@link DSK2OBJ}.) The OBJ file identifies a set of vertices (in body centered Cartesian
|
||||
* coordinates) and groups these vertices to identify facets. Facet numbers correspond to their
|
||||
* ordinal position in the list of facet identifications. An ancillary file locates the center of
|
||||
* each facet in latitude (deg), longitude (deg), and range (km) from center of figure.
|
||||
*
|
||||
* <p>Mapmakers may wish to assign a value to a facet based on an algorithmic combination of one or
|
||||
* more observations that are in the proximity of the facet. In order to do this, they need to
|
||||
* understand which observations are candidates for their analysis. Knowing the distance and
|
||||
* position angle of the observation relative to the center of the facet is the discriminator for
|
||||
* determining which observations are candidates, and GetSpots provides this information.
|
||||
*
|
||||
* <p>For all observations taken by OSIRIS-REx, the unique value of a counter on the spacecraft when
|
||||
* the observation was made is known. This is called the sclk ("sklock") value. SPICE files provide
|
||||
* spacecraft ephemeris and orientation, Bennu position, orientation, and shape, and instrument
|
||||
* boresight and orientations. When combined with the sclk value, the intersection of the boresight
|
||||
* on the surface of Bennu can be calculated.
|
||||
*
|
||||
* <p>A simple text file of sclk values (one per line) acts as input to getspots. GetSpots can
|
||||
* determine which boresight values corresponding to the sclk values do not exceed the maximum
|
||||
* distance criteria for each facet, along with the position angle of the spot, and the fraction of
|
||||
* the facet covered by the spot. The position angle is valued from 0 to 359 degrees North being 0
|
||||
* with east being 90 degrees. Spots that do not completely intersect the surface of Bennu are
|
||||
* flagged. Any additional information on the input line following the sclk value is echoed to the
|
||||
* output unchanged, allowing other data useful in the subsequent analysis to be carried along in
|
||||
* the output.
|
||||
*
|
||||
* <h2>INPUTS</h2>
|
||||
*
|
||||
* <em>spicemetakernel</em> is a spice metakernel listing all required spice files (DSK, SPK, PCK,
|
||||
* etc.) needed to perform the analysis. Only files needed for this analysis are in the metakernel
|
||||
* (e.g. no ambiguity about which file to use.) Required contents of this metakernel is TBD.
|
||||
*
|
||||
* <p><em>instrumenttype</em> is a code that specifies the boresight and field of view of the
|
||||
* instrument to use in the analysis. This must be one of "OLA", "OTES", "OVIRS", "POLYCAM",
|
||||
* "MAPCAM", "SAMCAM", or "NAVCAM".
|
||||
*
|
||||
* <p><em>distance</em> is the maximum distance of the instrument boresight from the center of the
|
||||
* facet expressed in milliradians.
|
||||
*
|
||||
* <p><em>sclkfile</em> is a text file that contains the sclk values for one or more times of
|
||||
* observation by an instrument. Leading whitespace is ignored. One or more whitespace characters
|
||||
* must separate the sclkvalue from the rest of the line. The format is as follows:<br>
|
||||
* <code>
|
||||
* BEGIN<br>
|
||||
* sclkvalue [otherdata]<br>
|
||||
* sclkvalue [otherdata]<br>
|
||||
* .<br>
|
||||
* .<br>
|
||||
* END<br>
|
||||
* </code> Where:<br>
|
||||
* <code>sclkvalue</code> is a string (format TBD)<br>
|
||||
* <code>[otherdata]</code> includes all additional data (including whitespace) in the line up
|
||||
* to, but not including, the linefeed character.
|
||||
*
|
||||
* <h2>OUTPUT</h2>
|
||||
*
|
||||
* All output is written to standard output.
|
||||
*
|
||||
* <p>Output is a text file, with each line terminated by linefeeds.<br>
|
||||
* <code>
|
||||
* F1<br>
|
||||
* sclkvalue dist pos frac flag inc ems phs [otherdata]<br>
|
||||
* sclkvalue dist pos frac flag inc ems phs [otherdata]<br>
|
||||
* ..<br>
|
||||
* F2<br>
|
||||
* sclkvalue dist pos frac flag inc ems phs [otherdata]<br>
|
||||
* sclkvalue dist pos frac flag inc ems phs [otherdata]<br>
|
||||
* F3<br>
|
||||
* F4<br>
|
||||
* ..<br>
|
||||
* FN<br>
|
||||
* END<br>
|
||||
* </code> Where:<br>
|
||||
* F<em>N</em> is a facet identifier that identifies facet number <em>N</em>. Each sclkvalue
|
||||
* that meets the distance criteria for facet number <em>N</em> is listed sequentially after the
|
||||
* facet identifier. If no sclk values meet the distance criteria, then no values are listed, and
|
||||
* the next facet identifier follows on the next line. <code>sclkvalue</code> is the sclk
|
||||
* exactly as it appeared in the input file.<br>
|
||||
* <code>dist</code> is a real number that describes the distance of the spot from the center
|
||||
* of the facet in units of milliradians.<br>
|
||||
* <code>pos</code> is the position angle of the center of the instrument boresight
|
||||
* intersection on the surface measured in degrees from North, with East being 90 degrees.<br>
|
||||
* <code>frac</code> is a real number greater than or equal to 0.0 and less than or equal to
|
||||
* 100.0 that describes the fraction of the facet covered by the spot. If the value is 0.0, then the
|
||||
* spot does not cover any portion of the facet. If the value is 100.0 then the facet is entirely
|
||||
* covered by the spot.<br>
|
||||
* <code>flag</code> is 1 if any portion of the spot does not intersect the surface of Bennu,
|
||||
* otherwise it is 0. <br>
|
||||
* <code>inc</code> is the incidence angle in degrees<br>
|
||||
* <code>ems</code> is the emission angle in degrees<br>
|
||||
* <code>phs</code> is the phase angle in degrees<br>
|
||||
* <code>[otherdata]</code> is all textual data on the line following the sclk value, up to
|
||||
* the linefeed, unchanged.
|
||||
*
|
||||
* @author nairah1
|
||||
*/
|
||||
public class GetSpots implements TerrasaurTool {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
private GetSpots() {}
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "find relevant OSIRIS-REx data for assigning values to facets in an OBJ file.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
String header =
|
||||
"""
|
||||
This program identifies those times when the boresight of instrumenttype intersects the surface
|
||||
of Bennu less than a specified distance from the center of individual facets in shape model.
|
||||
""";
|
||||
String footer =
|
||||
"""
|
||||
All output is written to standard output in the following format:
|
||||
F1
|
||||
sclkvalue dist pos frac flag inc ems phs [otherdata]
|
||||
sclkvalue dist pos frac flag inc ems phs [otherdata]
|
||||
..
|
||||
F2
|
||||
sclkvalue dist pos frac flag inc ems phs [otherdata]
|
||||
sclkvalue dist pos frac flag inc ems phs [otherdata]
|
||||
..
|
||||
F3
|
||||
F4
|
||||
..
|
||||
FN
|
||||
END
|
||||
Where:
|
||||
FN is a facet identifier that identifies facet number N. Each sclkvalue that meets the distance criteria for facet number N is listed sequentially\
|
||||
after the facet identifier. If no sclk values meet the distance criteria, then no values are listed, and the next facet identifier follows on the \
|
||||
next line.
|
||||
sclkvalue is the sclk exactly as it appeared in the input file.
|
||||
dist is a real number that describes the distance of the spot from the center of the facet in units of milliradians.
|
||||
pos is the position angle of the center of the instrument boresight intersection on the surface measured in degrees from North, with East being 90 degrees.
|
||||
frac is a real number greater than or equal to 0.0 and less than or equal to 100.0 that describes the fraction of the facet covered by the spot. \
|
||||
If the value is 0.0, then the spot does not cover any portion of the facet. If the value is 100.0 then the facet is entirely covered by the spot.
|
||||
flag is 1 if any portion of the spot does not intersect the surface of Bennu, otherwise it is 0.\s
|
||||
inc is the incidence angle in degrees
|
||||
ems is the emission angle in degrees
|
||||
phs is the phase angle in degrees
|
||||
[otherdata] is all textual data on the line following the sclk value, up to the linefeed, unchanged.
|
||||
""";
|
||||
return TerrasaurTool.super.fullDescription(options, header, footer);
|
||||
}
|
||||
|
||||
private String spicemetakernel;
|
||||
private String objfile;
|
||||
private String instrument;
|
||||
private String sclkfile;
|
||||
private int debugLevel;
|
||||
private double maxdist;
|
||||
private vtkPolyData polydata;
|
||||
private SmallBodyModel smallBodyModel;
|
||||
private HashMap<Integer, String> coverageMap;
|
||||
|
||||
private int SC_ID;
|
||||
private String SC_ID_String;
|
||||
private ReferenceFrame BodyFixed;
|
||||
private String TARGET;
|
||||
private final Vector3 NORTH = new Vector3(0, 0, 1e6);
|
||||
private int instID;
|
||||
|
||||
private PrintStream outputStream;
|
||||
|
||||
public GetSpots(
|
||||
String spicemetakernel,
|
||||
String objfile,
|
||||
String instrument,
|
||||
String sclkfile,
|
||||
double maxdist,
|
||||
int debugLevel) {
|
||||
this.spicemetakernel = spicemetakernel;
|
||||
this.objfile = objfile;
|
||||
this.instrument = instrument;
|
||||
this.sclkfile = sclkfile;
|
||||
this.maxdist = maxdist;
|
||||
this.debugLevel = debugLevel;
|
||||
|
||||
coverageMap = new HashMap<>();
|
||||
|
||||
outputStream = System.out;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find facets covered by the FOV of the instrument. For each facet, find the distance and
|
||||
* position angle of the instrument boresight and the fraction of the facet covered by the FOV.
|
||||
*
|
||||
* @param line String where the first "word" is an sclk time
|
||||
* @throws SpiceException
|
||||
*/
|
||||
private void findCoverage(String line) throws SpiceException {
|
||||
String[] parts = line.split(" ");
|
||||
if (parts.length == 0) return;
|
||||
|
||||
SCLKTime sclkTime = new SCLKTime(new SCLK(SC_ID), parts[0]);
|
||||
TDBTime tdbTime = new TDBTime(sclkTime.getTDBSeconds());
|
||||
|
||||
Instrument instrument = new Instrument(instID);
|
||||
FOV fov = new FOV(instrument);
|
||||
Matrix33 instrToBodyFixed =
|
||||
fov.getReferenceFrame().getPositionTransformation(BodyFixed, tdbTime);
|
||||
Vector3 bsightBodyFixed = instrToBodyFixed.mxv(fov.getBoresight());
|
||||
|
||||
StateRecord sr =
|
||||
new StateRecord(
|
||||
new Body(SC_ID_String),
|
||||
tdbTime,
|
||||
BodyFixed,
|
||||
new AberrationCorrection("LT+S"),
|
||||
new Body(TARGET));
|
||||
Vector3 scposBodyFixed = sr.getPosition();
|
||||
|
||||
PositionVector sunPos =
|
||||
new StateRecord(
|
||||
new Body("SUN"),
|
||||
tdbTime,
|
||||
BodyFixed,
|
||||
new AberrationCorrection("LT+S"),
|
||||
new Body(TARGET))
|
||||
.getPosition();
|
||||
|
||||
double[] double3 = new double[3];
|
||||
long cellID =
|
||||
smallBodyModel.computeRayIntersection(
|
||||
scposBodyFixed.toArray(), bsightBodyFixed.hat().toArray(), double3);
|
||||
if (cellID == -1) return; // no boresight intersection
|
||||
|
||||
Vector3 bsightIntersectVector = new Vector3(double3);
|
||||
|
||||
if (debugLevel > 1) {
|
||||
LatitudinalCoordinates lc = new LatitudinalCoordinates(bsightIntersectVector);
|
||||
System.out.printf(
|
||||
"# %s %f %f %s\n",
|
||||
sclkTime,
|
||||
Math.toDegrees(lc.getLatitude()),
|
||||
Math.toDegrees(lc.getLongitude()),
|
||||
bsightIntersectVector);
|
||||
}
|
||||
|
||||
// flag is 1 if any portion of the spot does not intersect the surface
|
||||
int flag = 0;
|
||||
Vector<Vector3> boundaryBodyFixed = new Vector<>();
|
||||
if (fov.getShape().equals("RECTANGLE") || fov.getShape().equals("POLYGON")) {
|
||||
for (Vector3 boundary : fov.getBoundary()) {
|
||||
boundaryBodyFixed.add(instrToBodyFixed.mxv(boundary));
|
||||
}
|
||||
} else if (fov.getShape().equals("CIRCLE")) {
|
||||
// bounds contains a single vector parallel to a ray that lies in the cone
|
||||
// that makes up the boundary of the FOV
|
||||
Vector3[] bounds = fov.getBoundary();
|
||||
|
||||
for (int i = 0; i < 8; i++) {
|
||||
// not ideal, but check every 45 degrees along the perimeter of the circle for intersection
|
||||
// with the
|
||||
// surface
|
||||
Matrix33 rotateAlongPerimeter = new Matrix33(fov.getBoresight(), i * Math.toRadians(45));
|
||||
Vector3 perimeterVector = rotateAlongPerimeter.mxv(bounds[0]);
|
||||
boundaryBodyFixed.add(instrToBodyFixed.mxv(perimeterVector));
|
||||
}
|
||||
} else {
|
||||
// TODO: add ELLIPSE
|
||||
System.err.printf(
|
||||
"Instrument %s: Unsupported FOV shape %s\n", instrument.getName(), fov.getShape());
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
// check all of the boundary vectors for surface intersections
|
||||
for (Vector3 vector : boundaryBodyFixed) {
|
||||
cellID =
|
||||
smallBodyModel.computeRayIntersection(
|
||||
scposBodyFixed.toArray(), vector.hat().toArray(), double3);
|
||||
if (cellID == -1) {
|
||||
flag = 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
vtkIdList idList = new vtkIdList();
|
||||
for (int i = 0; i < polydata.GetNumberOfCells(); ++i) {
|
||||
|
||||
polydata.GetCellPoints(i, idList);
|
||||
double[] pt0 = polydata.GetPoint(idList.GetId(0));
|
||||
double[] pt1 = polydata.GetPoint(idList.GetId(1));
|
||||
double[] pt2 = polydata.GetPoint(idList.GetId(2));
|
||||
|
||||
CellInfo ci = CellInfo.getCellInfo(polydata, i, idList);
|
||||
Vector3 facetNormal = MathConversions.toVector3(ci.normal());
|
||||
Vector3 facetCenter = MathConversions.toVector3(ci.center());
|
||||
|
||||
// check that facet faces the observer
|
||||
Vector3 facetToSC = scposBodyFixed.sub(facetCenter);
|
||||
double emission = facetToSC.sep(facetNormal);
|
||||
if (emission > Math.PI / 2) continue;
|
||||
|
||||
double dist =
|
||||
findDist(scposBodyFixed, bsightIntersectVector, facetCenter) * 1e3; // milliradians
|
||||
if (dist < maxdist) {
|
||||
|
||||
Vector3 facetToSun = sunPos.sub(facetCenter);
|
||||
double incidence = facetToSun.sep(facetNormal);
|
||||
double phase = facetToSun.sep(facetToSC);
|
||||
|
||||
Vector3 pt0v = new Vector3(pt0);
|
||||
Vector3 pt1v = new Vector3(pt1);
|
||||
Vector3 pt2v = new Vector3(pt2);
|
||||
|
||||
Vector3 span1 = pt1v.sub(pt0v);
|
||||
Vector3 span2 = pt2v.sub(pt0v);
|
||||
|
||||
Plane facetPlane = new Plane(pt0v, span1, span2);
|
||||
Vector3 localNorth = facetPlane.project(NORTH).sub(facetCenter);
|
||||
Vector3 bsightIntersectProjection =
|
||||
facetPlane.project(bsightIntersectVector).sub(facetCenter);
|
||||
|
||||
// 0 = North, 90 = East
|
||||
double pos =
|
||||
Math.toDegrees(Math.acos(localNorth.hat().dot(bsightIntersectProjection.hat())));
|
||||
if (localNorth.cross(bsightIntersectProjection).dot(facetNormal) > 0) pos = 360 - pos;
|
||||
|
||||
int nCovered = 0;
|
||||
if (SPICEUtil.isInFOV(fov, instrToBodyFixed.mtxv(pt0v.sub(scposBodyFixed)))) nCovered++;
|
||||
if (SPICEUtil.isInFOV(fov, instrToBodyFixed.mtxv(pt1v.sub(scposBodyFixed)))) nCovered++;
|
||||
if (SPICEUtil.isInFOV(fov, instrToBodyFixed.mtxv(pt2v.sub(scposBodyFixed)))) nCovered++;
|
||||
double frac;
|
||||
if (nCovered == 3) {
|
||||
frac = 1;
|
||||
} else {
|
||||
final double sep012 = span1.negate().sep(pt2v.sub(pt1v)); // angle at vertex 1
|
||||
final double sep021 = span2.negate().sep(pt1v.sub(pt2v)); // angle at vertex 2
|
||||
|
||||
// check 0.5*nPts^2 points if they fall in FOV
|
||||
int nPts = 50;
|
||||
Vector<Vector3> pointsInFacet = new Vector<>();
|
||||
for (int ii = 0; ii < nPts; ii++) {
|
||||
Vector3 x = pt0v.add(span1.scale(ii / (nPts - 1.)));
|
||||
for (int jj = 0; jj < nPts; jj++) {
|
||||
Vector3 y = x.add(span2.scale(jj / (nPts - 1.)));
|
||||
|
||||
// if outside the facet, angle 01y will be larger than angle 012
|
||||
if (span1.negate().sep(y.sub(pt1v)) > sep012) continue;
|
||||
// if outside the facet, angle 02y will be larger than angle 021
|
||||
if (span2.negate().sep(y.sub(pt2v)) > sep021) continue;
|
||||
pointsInFacet.add(instrToBodyFixed.mtxv(y.sub(scposBodyFixed)));
|
||||
}
|
||||
}
|
||||
|
||||
if (pointsInFacet.isEmpty()) {
|
||||
frac = 0;
|
||||
} else {
|
||||
List<Boolean> isInFOV = SPICEUtil.isInFOV(fov, pointsInFacet);
|
||||
nCovered = 0;
|
||||
for (boolean b : isInFOV) if (b) nCovered++;
|
||||
frac = ((double) nCovered) / pointsInFacet.size();
|
||||
}
|
||||
}
|
||||
|
||||
StringBuilder output =
|
||||
new StringBuilder(
|
||||
String.format(
|
||||
"%s %.4f %5.1f %.1f %d %.1f %.1f %.1f",
|
||||
sclkTime,
|
||||
dist,
|
||||
pos,
|
||||
frac * 100,
|
||||
flag,
|
||||
Math.toDegrees(incidence),
|
||||
Math.toDegrees(emission),
|
||||
Math.toDegrees(phase)));
|
||||
for (int j = 1; j < parts.length; j++) output.append(String.format(" %s", parts[j]));
|
||||
output.append("\n");
|
||||
String coverage = coverageMap.get(i);
|
||||
if (coverage == null) {
|
||||
coverageMap.put(i, output.toString());
|
||||
} else {
|
||||
coverage += output;
|
||||
coverageMap.put(i, coverage);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the angular distance between pt1 and pt2 as seen from scPos. All coordinates are in the
|
||||
* body fixed frame.
|
||||
*
|
||||
* @param scPos Spacecraft position
|
||||
* @param pt1 Point 1
|
||||
* @param pt2 Point 2
|
||||
* @return distance between pt1 and pt2 in radians.
|
||||
*/
|
||||
private double findDist(Vector3 scPos, Vector3 pt1, Vector3 pt2) {
|
||||
Vector3 scToPt1 = pt1.sub(scPos).hat();
|
||||
Vector3 scToPt2 = pt2.sub(scPos).hat();
|
||||
|
||||
return Math.acos(scToPt1.dot(scToPt2));
|
||||
}
|
||||
|
||||
public void printMap() {
|
||||
if (debugLevel > 0) {
|
||||
for (int i = 0; i < polydata.GetNumberOfCells(); ++i) {
|
||||
outputStream.printf("F%d\n", i + 1);
|
||||
String output = coverageMap.get(i);
|
||||
if (output != null) outputStream.print(coverageMap.get(i));
|
||||
}
|
||||
} else {
|
||||
List<Integer> list = new ArrayList<>(coverageMap.keySet());
|
||||
Collections.sort(list);
|
||||
for (Integer i : list) {
|
||||
outputStream.printf("F%d\n", i + 1);
|
||||
outputStream.print(coverageMap.get(i));
|
||||
}
|
||||
}
|
||||
outputStream.println("END");
|
||||
}
|
||||
|
||||
public void process() throws Exception {
|
||||
boolean useNEAR = false;
|
||||
if (instrument.equalsIgnoreCase("OLA_LOW")) {
|
||||
// instID = -64400; // ORX_OLA_BASE
|
||||
// instID = -64401; // ORX_OLA_ART
|
||||
instID = -64403; // ORX_OLA_LOW
|
||||
}
|
||||
if (instrument.equalsIgnoreCase("OLA_HIGH")) {
|
||||
instID = -64402; // ORX_OLA_HIGH
|
||||
} else if (instrument.equalsIgnoreCase("OTES")) {
|
||||
instID = -64310; // ORX_OTES
|
||||
} else if (instrument.equalsIgnoreCase("OVIRS_SCI")) {
|
||||
// instID = -64320; // ORX_OVIRS <- no instrument kernel for this
|
||||
instID = -64321; // ORX_OVIRS_SCI
|
||||
// instID = -64322; // ORX_OVIRS_SUN
|
||||
} else if (instrument.equalsIgnoreCase("REXIS")) {
|
||||
instID = -64330; // ORX_REXIS
|
||||
} else if (instrument.equalsIgnoreCase("REXIS_SXM")) {
|
||||
instID = -64340; // ORX_REXIS_SXM
|
||||
} else if (instrument.equalsIgnoreCase("POLYCAM")) {
|
||||
instID = -64360; // ORX_OCAMS_POLYCAM
|
||||
} else if (instrument.equalsIgnoreCase("MAPCAM")) {
|
||||
instID = -64361; // ORX_OCAMS_MAPCAM
|
||||
} else if (instrument.equalsIgnoreCase("SAMCAM")) {
|
||||
instID = -64362; // ORX_OCAMS_SAMCAM
|
||||
} else if (instrument.equalsIgnoreCase("NAVCAM")) {
|
||||
// instID = -64070; // ORX_NAVCAM <- no frame kernel for this
|
||||
// instID = -64081; // ORX_NAVCAM1 <- no instrument kernel for this
|
||||
instID = -64082; // ORX_NAVCAM2 <- no instrument kernel for this
|
||||
} else if (instrument.equalsIgnoreCase("NIS_RECT")) {
|
||||
useNEAR = true;
|
||||
// instID = -93021;
|
||||
instID = -93023; // relative to NEAR_NIS_BASE
|
||||
} else if (instrument.equalsIgnoreCase("NIS_SQUARE")) {
|
||||
useNEAR = true;
|
||||
// instID = -93022;
|
||||
instID = -93024; // relative to NEAR_NIS_BASE
|
||||
}
|
||||
|
||||
NativeLibraryLoader.loadVtkLibraries();
|
||||
polydata = PolyDataUtil.loadShapeModelAndComputeNormals(objfile);
|
||||
smallBodyModel = new SmallBodyModel(polydata);
|
||||
|
||||
NativeLibraryLoader.loadSpiceLibraries();
|
||||
CSPICE.furnsh(spicemetakernel);
|
||||
|
||||
if (useNEAR) {
|
||||
SC_ID = -93;
|
||||
SC_ID_String = "-93"; // "NEAR";
|
||||
TARGET = "2000433";
|
||||
BodyFixed = new ReferenceFrame("IAU_EROS");
|
||||
} else {
|
||||
SC_ID = -64;
|
||||
SC_ID_String = "-64"; // "ORX_SPACECRAFT";
|
||||
TARGET = "2101955";
|
||||
BodyFixed = new ReferenceFrame("IAU_BENNU");
|
||||
}
|
||||
|
||||
List<String> sclkLines = FileUtils.readLines(new File(sclkfile), Charset.defaultCharset());
|
||||
boolean foundBegin = false;
|
||||
for (String line : sclkLines) {
|
||||
String trimLine = line.trim();
|
||||
if (trimLine.startsWith("#")) continue;
|
||||
if (trimLine.startsWith("BEGIN")) {
|
||||
foundBegin = true;
|
||||
continue;
|
||||
}
|
||||
if (foundBegin && !trimLine.startsWith("END")) {
|
||||
if (trimLine.startsWith("#")) continue;
|
||||
|
||||
findCoverage(trimLine);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(Option.builder("spice").required().hasArg().desc("SPICE metakernel").build());
|
||||
options.addOption(Option.builder("obj").required().hasArg().desc("Shape file").build());
|
||||
options.addOption(
|
||||
Option.builder("instype")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc(
|
||||
"one of OLA_LOW, OLA_HIGH, OTES, OVIRS_SCI, REXIS, REXIS_SXM, POLYCAM, MAPCAM, SAMCAM, or NAVCAM")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("sclk")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc(
|
||||
"""
|
||||
file containing sclk values for instrument observation times. All values between the strings BEGIN and END will be processed.
|
||||
For example:
|
||||
BEGIN
|
||||
3/605862045.24157
|
||||
END""")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("maxdist")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc("maximum distance of boresight from facet center in milliradians")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("all-facets")
|
||||
.desc(
|
||||
"Optional. If present, entries for all facets will be output, even if there is no intersection.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("verbose")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Optional. A level of 1 is equivalent to -all-facets. A level of 2 or higher will print out the boresight intersection position at each sclk.")
|
||||
.build());
|
||||
return options;
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
TerrasaurTool defaultOBJ = new GetSpots();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
String spicemetakernel = cl.getOptionValue("spice");
|
||||
String objfile = cl.getOptionValue("obj");
|
||||
String instrumenttype = cl.getOptionValue("instype");
|
||||
String sclkfile = cl.getOptionValue("sclk");
|
||||
double distance = Double.parseDouble(cl.getOptionValue("maxdist"));
|
||||
int debugLevel = Integer.parseInt(cl.getOptionValue("verbose", "0"));
|
||||
if (cl.hasOption("all-facets")) debugLevel = debugLevel == 0 ? 1 : debugLevel + 1;
|
||||
|
||||
GetSpots gs =
|
||||
new GetSpots(spicemetakernel, objfile, instrumenttype, sclkfile, distance, debugLevel);
|
||||
try {
|
||||
gs.process();
|
||||
gs.printMap();
|
||||
} catch (Exception e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
764
src/main/java/terrasaur/apps/ImpactLocator.java
Normal file
@@ -0,0 +1,764 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.PrintWriter;
|
||||
import java.util.*;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.spi.StandardLevel;
|
||||
import spice.basic.*;
|
||||
import terrasaur.smallBodyModel.SmallBodyModel;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.*;
|
||||
import terrasaur.utils.math.MathConversions;
|
||||
import vtk.vtkCellArray;
|
||||
import vtk.vtkLine;
|
||||
import vtk.vtkPoints;
|
||||
import vtk.vtkPolyData;
|
||||
import vtk.vtkPolyDataWriter;
|
||||
|
||||
/**
|
||||
* Find the impact point and time given an initial position and velocity.
|
||||
*
|
||||
* @author nairah1
|
||||
*/
|
||||
public class ImpactLocator implements UnivariateFunction, TerrasaurTool {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Calculate impact time and position from a sumFile.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
String header = "";
|
||||
String footer =
|
||||
"""
|
||||
Given a sum file, shape model, and spacecraft velocity in the J2000 frame,
|
||||
this program will calculate an impact time and position.
|
||||
|
||||
NOTE: Spacecraft position is assumed to be in kilometers. If not, use the
|
||||
-distanceScale option to convert to km.
|
||||
|
||||
NOTE: Do not include a "pinpoint" or impact SPK in the kernels to
|
||||
load.""";
|
||||
return TerrasaurTool.super.fullDescription(options, header, footer);
|
||||
}
|
||||
|
||||
private ReferenceFrame J2000;
|
||||
private ReferenceFrame bodyFixed;
|
||||
private SmallBodyModel sbm;
|
||||
private Double finalHeight;
|
||||
private Double finalStep;
|
||||
private TDBTime t0;
|
||||
private StateVector initialObserverJ2000;
|
||||
private StateVector initialTargetJ2000;
|
||||
|
||||
private Vector3 observerAccelerationJ2000;
|
||||
private Vector3 targetAccelerationJ2000;
|
||||
|
||||
private StateVector lastState;
|
||||
|
||||
private vtkPolyData rayBundlePolyData;
|
||||
private vtkCellArray rayBundleCells;
|
||||
private vtkPoints rayBundlePoints;
|
||||
|
||||
private ImpactLocator() {}
|
||||
|
||||
public ImpactLocator(
|
||||
ReferenceFrame J2000,
|
||||
ReferenceFrame bodyFixed,
|
||||
SmallBodyModel sbm,
|
||||
Double finalHeight,
|
||||
Double finalStep,
|
||||
TDBTime t0,
|
||||
StateVector initialObserverJ2000,
|
||||
StateVector initialTargetJ2000,
|
||||
TDBTime t1,
|
||||
StateVector finalObserverJ2000,
|
||||
StateVector finalTargetJ2000)
|
||||
throws SpiceErrorException {
|
||||
this.J2000 = J2000;
|
||||
this.bodyFixed = bodyFixed;
|
||||
this.sbm = sbm;
|
||||
this.finalHeight = finalHeight;
|
||||
this.finalStep = finalStep;
|
||||
this.t0 = t0;
|
||||
this.initialObserverJ2000 = initialObserverJ2000;
|
||||
this.initialTargetJ2000 = initialTargetJ2000;
|
||||
|
||||
if (t1 == null) {
|
||||
observerAccelerationJ2000 = new Vector3();
|
||||
targetAccelerationJ2000 = new Vector3();
|
||||
} else {
|
||||
double duration = t1.getTDBSeconds() - t0.getTDBSeconds();
|
||||
observerAccelerationJ2000 =
|
||||
finalObserverJ2000
|
||||
.getVelocity()
|
||||
.sub(initialObserverJ2000.getVelocity())
|
||||
.scale(1. / duration);
|
||||
targetAccelerationJ2000 =
|
||||
finalTargetJ2000.getVelocity().sub(initialTargetJ2000.getVelocity()).scale(1. / duration);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* find the body state at time t. Assume a constant velocity in the J2000 frame.
|
||||
*
|
||||
* @param et ephemeris time
|
||||
* @return Body state at time et
|
||||
*/
|
||||
public StateVector getStateBodyFixed(TDBTime et) {
|
||||
|
||||
try {
|
||||
double delta = et.sub(t0).getMeasure();
|
||||
|
||||
Vector3 observerPosJ2000 =
|
||||
initialObserverJ2000
|
||||
.getPosition()
|
||||
.add(initialObserverJ2000.getVelocity().scale(delta))
|
||||
.add(observerAccelerationJ2000.scale(0.5 * delta * delta));
|
||||
|
||||
Vector3 targetPosJ2000 =
|
||||
initialTargetJ2000
|
||||
.getPosition()
|
||||
.add(initialTargetJ2000.getVelocity().scale(delta))
|
||||
.add(targetAccelerationJ2000.scale(0.5 * delta * delta));
|
||||
|
||||
Vector3 scPosJ2000 = observerPosJ2000.sub(targetPosJ2000);
|
||||
Vector3 observerVelJ2000 =
|
||||
initialObserverJ2000.getVelocity().add(observerAccelerationJ2000.scale(delta));
|
||||
Vector3 targetVelJ2000 =
|
||||
initialTargetJ2000.getVelocity().add(targetAccelerationJ2000.scale(delta));
|
||||
Vector3 scVelJ2000 = observerVelJ2000.sub(targetVelJ2000);
|
||||
|
||||
StateVector scStateJ2000 = new StateVector(scPosJ2000, scVelJ2000);
|
||||
StateVector scStateBodyFixed =
|
||||
new StateVector(J2000.getStateTransformation(bodyFixed, et).mxv(scStateJ2000));
|
||||
|
||||
if (lastState == null) {
|
||||
lastState = scStateBodyFixed;
|
||||
rayBundlePolyData = new vtkPolyData();
|
||||
rayBundleCells = new vtkCellArray();
|
||||
rayBundlePoints = new vtkPoints();
|
||||
|
||||
rayBundlePolyData.SetPoints(rayBundlePoints);
|
||||
rayBundlePolyData.SetLines(rayBundleCells);
|
||||
}
|
||||
long id0 = rayBundlePoints.InsertNextPoint(lastState.getPosition().toArray());
|
||||
long id1 = rayBundlePoints.InsertNextPoint(scStateBodyFixed.getPosition().toArray());
|
||||
lastState = scStateBodyFixed;
|
||||
|
||||
vtkLine line = new vtkLine();
|
||||
line.GetPointIds().SetId(0, id0);
|
||||
line.GetPointIds().SetId(1, id1);
|
||||
|
||||
rayBundleCells.InsertNextCell(line);
|
||||
|
||||
return scStateBodyFixed;
|
||||
|
||||
} catch (SpiceException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/** return range from surface at time t */
|
||||
@Override
|
||||
public double value(double t) {
|
||||
|
||||
TDBTime thisTime = new TDBTime(t);
|
||||
|
||||
StateVector scStateBodyFixed = getStateBodyFixed(thisTime);
|
||||
|
||||
Vector3 closestPoint =
|
||||
new Vector3(sbm.findClosestPoint(scStateBodyFixed.getPosition().toArray()));
|
||||
|
||||
Vector3 toSurface = scStateBodyFixed.getPosition().sub(closestPoint);
|
||||
return toSurface.norm();
|
||||
}
|
||||
|
||||
private NavigableSet<ImpactRecord> findTrajectory() {
|
||||
NavigableSet<ImpactRecord> records = new TreeSet<>();
|
||||
try {
|
||||
|
||||
lastState = null;
|
||||
|
||||
TDBTime et = t0;
|
||||
|
||||
StateVector scStateBodyFixed = getStateBodyFixed(et);
|
||||
|
||||
Vector3 closestPoint =
|
||||
new Vector3(sbm.findClosestPoint(scStateBodyFixed.getPosition().toArray()));
|
||||
Vector3 toSurface = scStateBodyFixed.getPosition().sub(closestPoint);
|
||||
double altitude = toSurface.norm();
|
||||
|
||||
// time it takes to get halfway to the surface
|
||||
TDBDuration delta = new TDBDuration(altitude / (2 * scStateBodyFixed.getVelocity().norm()));
|
||||
|
||||
boolean keepGoing = true;
|
||||
while (keepGoing) {
|
||||
LatitudinalCoordinates lc = new LatitudinalCoordinates(scStateBodyFixed.getPosition());
|
||||
records.add(
|
||||
new ImpactRecord(
|
||||
et,
|
||||
scStateBodyFixed,
|
||||
new LatitudinalCoordinates(altitude, lc.getLongitude(), lc.getLatitude())));
|
||||
|
||||
et = et.add(delta);
|
||||
|
||||
scStateBodyFixed = getStateBodyFixed(et);
|
||||
|
||||
closestPoint = new Vector3(sbm.findClosestPoint(scStateBodyFixed.getPosition().toArray()));
|
||||
toSurface = scStateBodyFixed.getPosition().sub(closestPoint);
|
||||
altitude = toSurface.norm();
|
||||
|
||||
// check that we're still moving towards the target
|
||||
if (scStateBodyFixed.getPosition().dot(scStateBodyFixed.getVelocity()) > 0) {
|
||||
logger.warn(
|
||||
"Stopping at {}; passed closest approach to the body center.",
|
||||
et.toUTCString("ISOC", 3));
|
||||
keepGoing = false;
|
||||
}
|
||||
|
||||
if (altitude > finalHeight) {
|
||||
delta = new TDBDuration(toSurface.norm() / (2 * scStateBodyFixed.getVelocity().norm()));
|
||||
} else if (altitude > finalStep) {
|
||||
delta = new TDBDuration(finalStep / scStateBodyFixed.getVelocity().norm());
|
||||
} else {
|
||||
keepGoing = false;
|
||||
}
|
||||
}
|
||||
|
||||
LatitudinalCoordinates lc = new LatitudinalCoordinates(scStateBodyFixed.getPosition());
|
||||
records.add(
|
||||
new ImpactRecord(
|
||||
et,
|
||||
scStateBodyFixed,
|
||||
new LatitudinalCoordinates(altitude, lc.getLongitude(), lc.getLatitude())));
|
||||
|
||||
} catch (SpiceException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
return records;
|
||||
}
|
||||
|
||||
static class ImpactRecord implements Comparable<ImpactRecord> {
|
||||
TDBTime et;
|
||||
StateVector scStateBodyFixed;
|
||||
LatitudinalCoordinates lc;
|
||||
|
||||
private ImpactRecord(TDBTime et, StateVector scStateBodyFixed, LatitudinalCoordinates lc) {
|
||||
this.et = et;
|
||||
this.scStateBodyFixed = scStateBodyFixed;
|
||||
this.lc = lc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(ImpactRecord o) {
|
||||
try {
|
||||
return Double.compare(et.getTDBSeconds(), o.et.getTDBSeconds());
|
||||
} catch (SpiceErrorException e) {
|
||||
// completely unnecessary exception
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static Vector3 correctForAberration(
|
||||
Vector3 targetLTS, Body observer, Body target, TDBTime t) throws SpiceException {
|
||||
RemoveAberration ra = new RemoveAberration(target, observer);
|
||||
|
||||
return ra.getGeometricPosition(t, targetLTS);
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(
|
||||
Option.builder("date")
|
||||
.hasArgs()
|
||||
.desc("Initial UTC date. Required if -sumFile is not used.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("finalHeight")
|
||||
.hasArg()
|
||||
.desc("Height above surface in meters to consider \"impact\". Default is 1 meter.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("finalStep")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Continue printing output below finalHeight in increments of approximate finalStep "
|
||||
+ "(in meters) until zero. Default is to stop at finalHeight.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("frame")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc("Required. Name of body fixed frame.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("instrumentFrame")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"SPICE ID for the camera reference frame. Required if -outputTransform "
|
||||
+ "AND -sumFile are used.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("logFile")
|
||||
.hasArg()
|
||||
.desc("If present, save screen output to log file.")
|
||||
.build());
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (StandardLevel l : StandardLevel.values()) sb.append(String.format("%s ", l.name()));
|
||||
options.addOption(
|
||||
Option.builder("logLevel")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"If present, print messages above selected priority. Valid values are "
|
||||
+ sb.toString().trim()
|
||||
+ ". Default is INFO.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("objFile")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc("Required. Name of OBJ shape file.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("observer")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc("Required. SPICE ID for the impactor.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("observerFrame")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"SPICE ID for the impactor's reference frame. Required if -outputTransform is used.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("outputTransform")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"If present, write out a transform file that can be used by TransformShape to place "
|
||||
+ "coordinates in the spacecraft frame in the body fixed frame. The rotation "
|
||||
+ " is evaluated at the sumfile time. The translation is evaluated at the impact time. "
|
||||
+ "Requires -observerFrame option.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("position")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Spacecraft to body vector in body fixed coordinates. Units are km. "
|
||||
+ "Spacecraft is at the origin to be consistent with sumFile convention.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("spice")
|
||||
.required()
|
||||
.hasArgs()
|
||||
.desc(
|
||||
"Required. SPICE metakernel file containing body fixed frame and spacecraft kernels. "
|
||||
+ "Can specify more than one kernel, separated by whitespace.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("sumFile")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Name of sum file to read. Coordinate system is assumed to be in the body "
|
||||
+ "fixed frame with the spacecraft at the origin.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("target")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc("Required. SPICE ID for the target.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("trajectory")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"If present, name of output VTK file containing trajectory in body fixed coordinates.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("verbosity")
|
||||
.hasArg()
|
||||
.desc("This option does nothing! Use -logLevel instead.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("velocity")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Spacecraft velocity in J2000 relative to the body. Units are km/s. "
|
||||
+ "If not specified, velocity is calculated using SPICE.")
|
||||
.build());
|
||||
return options;
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
TerrasaurTool defaultOBJ = new ImpactLocator();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
NativeLibraryLoader.loadVtkLibraries();
|
||||
NativeLibraryLoader.loadSpiceLibraries();
|
||||
|
||||
String objFile = cl.getOptionValue("objFile");
|
||||
SmallBodyModel sbm = new SmallBodyModel(PolyDataUtil.loadShapeModel(objFile));
|
||||
|
||||
for (String kernel : cl.getOptionValues("spice")) KernelDatabase.load(kernel);
|
||||
ReferenceFrame J2000 = new ReferenceFrame("J2000");
|
||||
ReferenceFrame bodyFixed = new ReferenceFrame(cl.getOptionValue("frame"));
|
||||
Body observer = new Body(cl.getOptionValue("observer"));
|
||||
Body target = new Body(cl.getOptionValue("target"));
|
||||
|
||||
final double finalHeight =
|
||||
cl.hasOption("finalHeight")
|
||||
? Double.parseDouble(cl.getOptionValue("finalHeight")) / 1e3
|
||||
: 1e-3;
|
||||
if (finalHeight <= 0) {
|
||||
logger.warn("Argument to -finalHeight must be positive!");
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
final double finalStep =
|
||||
cl.hasOption("finalStep")
|
||||
? Double.parseDouble(cl.getOptionValue("finalStep")) / 1e3
|
||||
: Double.MAX_VALUE;
|
||||
if (finalStep <= 0) {
|
||||
logger.warn("Argument to -finalStep must be positive!");
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
// initial spacecraft position relative to target body
|
||||
Vector3 initialPos = new Vector3();
|
||||
TDBTime et = null;
|
||||
SumFile sumFile = null;
|
||||
if (cl.hasOption("sumFile")) {
|
||||
sumFile = SumFile.fromFile(new File(cl.getOptionValue("sumFile")));
|
||||
|
||||
et = new TDBTime(sumFile.utcString());
|
||||
|
||||
Matrix33 bodyFixedToJ2000 = bodyFixed.getPositionTransformation(J2000, et);
|
||||
Vector3 scObjJ2000 = bodyFixedToJ2000.mxv(MathConversions.toVector3(sumFile.scobj()));
|
||||
initialPos = correctForAberration(scObjJ2000, observer, target, et);
|
||||
initialPos = bodyFixedToJ2000.mtxv(initialPos).negate();
|
||||
|
||||
} else if (cl.hasOption("date")) {
|
||||
String[] parts = cl.getOptionValues("date");
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (String part : parts) sb.append(part).append(" ");
|
||||
et = new TDBTime(sb.toString());
|
||||
} else {
|
||||
logger.warn("Either -sumFile or -date must be specified.");
|
||||
System.exit(0);
|
||||
}
|
||||
TDBTime et0 = et;
|
||||
AberrationCorrection abCorrNone = new AberrationCorrection("NONE");
|
||||
|
||||
// target's state relative to observer
|
||||
StateRecord sr = new StateRecord(target, et, bodyFixed, abCorrNone, observer);
|
||||
|
||||
/*-
|
||||
// aberration test
|
||||
sr = new StateRecord(target, et, J2000, abCorrNone, observer);
|
||||
StateRecord srLTS =
|
||||
new StateRecord(target, et, J2000, new AberrationCorrection("LT+S"), observer);
|
||||
RemoveAberration ra = new RemoveAberration(target, observer);
|
||||
Vector3 estimatedGeometric = ra.getGeometricPosition(et, srLTS.getPosition());
|
||||
|
||||
System.out.printf("LT+S position: %s\n", new Vector3(srLTS.getPosition()));
|
||||
System.out.printf("geometric position: %s\n", new Vector3(sr.getPosition()));
|
||||
Vector3 difference = sr.getPosition().sub(srLTS.getPosition());
|
||||
System.out.printf("difference: %s %f\n", difference, difference.norm());
|
||||
System.out.printf("aberration angle: %.3e\n", srLTS.getPosition().sep(sr.getPosition()));
|
||||
System.out.printf("estimated geometric: %s\n", estimatedGeometric);
|
||||
difference = sr.getPosition().sub(estimatedGeometric);
|
||||
System.out.printf("difference: %s %f\n", difference, difference.norm());
|
||||
System.out.printf("angle: %.3e\n", estimatedGeometric.sep(sr.getPosition()));
|
||||
System.out.println();
|
||||
System.exit(0);
|
||||
*/
|
||||
|
||||
// this is only true with aberration correction NONE!
|
||||
Vector3 scPosBodyFixed = sr.getPosition().negate();
|
||||
|
||||
if (cl.hasOption("position")) {
|
||||
String[] parts = cl.getOptionValue("position").split(",");
|
||||
double[] tmp = new double[3];
|
||||
for (int i = 0; i < 3; i++) tmp[i] = Double.parseDouble(parts[i].trim());
|
||||
initialPos.assign(tmp);
|
||||
} else if (!cl.hasOption("sumFile")) {
|
||||
// use position calculated by SPICE
|
||||
initialPos = scPosBodyFixed;
|
||||
}
|
||||
|
||||
if (Math.abs(scPosBodyFixed.sub(initialPos).norm()) > 0) {
|
||||
logger.warn(
|
||||
String.format(
|
||||
"Warning! Spacecraft position relative to target from SPICE is %s while input position is %s",
|
||||
new Vector3(scPosBodyFixed), initialPos.toString()));
|
||||
logger.warn(String.format("Difference is %e km", initialPos.sub(scPosBodyFixed).norm()));
|
||||
logger.warn("Continuing with input position");
|
||||
}
|
||||
|
||||
Vector3 initialPosJ2000 = bodyFixed.getPositionTransformation(J2000, et).mxv(initialPos);
|
||||
|
||||
// relative to solar system barycenter in J2000
|
||||
StateVector initialTargetJ2000 =
|
||||
new StateRecord(target, et, J2000, abCorrNone, new Body(0)).getStateVector();
|
||||
StateVector initialObserverJ2000 =
|
||||
new StateVector(
|
||||
initialPosJ2000.add(initialTargetJ2000.getPosition()),
|
||||
new StateRecord(observer, et, J2000, abCorrNone, new Body(0)).getVelocity());
|
||||
|
||||
if (cl.hasOption("velocity")) {
|
||||
String[] parts = cl.getOptionValue("velocity").split(",");
|
||||
double[] tmp = new double[3];
|
||||
for (int i = 0; i < 3; i++) tmp[i] = Double.parseDouble(parts[i].trim());
|
||||
Vector3 scVelJ2000 = new Vector3(tmp);
|
||||
|
||||
initialObserverJ2000 =
|
||||
new StateVector(
|
||||
initialObserverJ2000.getPosition(), scVelJ2000.add(initialTargetJ2000.getVelocity()));
|
||||
|
||||
StateRecord obs = new StateRecord(observer, et, J2000, abCorrNone, new Body(0));
|
||||
logger.info(
|
||||
String.format(
|
||||
"spacecraft velocity relative to target from SPICE at %s is %s",
|
||||
et.toUTCString("ISOC", 3), obs.getVelocity().sub(initialTargetJ2000.getVelocity())));
|
||||
logger.info(String.format("Specified velocity is %s", scVelJ2000));
|
||||
}
|
||||
|
||||
ImpactLocator ifsf =
|
||||
new ImpactLocator(
|
||||
J2000,
|
||||
bodyFixed,
|
||||
sbm,
|
||||
finalHeight,
|
||||
finalStep,
|
||||
et,
|
||||
initialObserverJ2000,
|
||||
initialTargetJ2000,
|
||||
null,
|
||||
null,
|
||||
null);
|
||||
|
||||
NavigableSet<ImpactRecord> records = ifsf.findTrajectory();
|
||||
TDBTime first = records.first().et;
|
||||
|
||||
StateVector scStateBodyFixed = ifsf.getStateBodyFixed(first);
|
||||
StateVector scStateJ2000 =
|
||||
new StateVector(bodyFixed.getStateTransformation(J2000, first).mxv(scStateBodyFixed));
|
||||
|
||||
System.out.printf("T0: %s%n", first.toUTCString("ISOC", 3));
|
||||
System.out.printf("Frame %s: %s%n", bodyFixed.getName(), scStateBodyFixed);
|
||||
System.out.printf("Frame J2000: %s%n", scStateJ2000);
|
||||
System.out.printf(
|
||||
"%s: Observer velocity relative to SSB (J2000): %s%n",
|
||||
first.toUTCString("ISOC", 3), initialObserverJ2000.getVelocity());
|
||||
|
||||
// find coverage of observer and target
|
||||
int numSPK = KernelDatabase.ktotal("SPK");
|
||||
double lastTarget = -Double.MAX_VALUE;
|
||||
double lastObserver = -Double.MAX_VALUE;
|
||||
for (int i = 0; i < numSPK; i++) {
|
||||
String filename = KernelDatabase.getFileName(i, "SPK");
|
||||
SPK thisSPK = SPK.openForRead(filename);
|
||||
SpiceWindow coverage = thisSPK.getCoverage(target.getIDCode());
|
||||
if (coverage.card() > 0) {
|
||||
double[] lastInterval = coverage.getInterval(coverage.card() - 1);
|
||||
lastTarget = Math.max(lastTarget, lastInterval[1]);
|
||||
logger.debug(
|
||||
"SPK {}: body {}, last time is {}",
|
||||
filename,
|
||||
target.getName(),
|
||||
new TDBTime(lastTarget).toUTCString("ISOC", 3));
|
||||
}
|
||||
coverage = thisSPK.getCoverage(observer.getIDCode());
|
||||
if (coverage.card() > 0) {
|
||||
double[] lastInterval = coverage.getInterval(coverage.card() - 1);
|
||||
lastObserver = Math.max(lastObserver, lastInterval[1]);
|
||||
logger.debug(
|
||||
"SPK {}: body {}, last time is {}",
|
||||
filename,
|
||||
observer.getName(),
|
||||
new TDBTime(lastObserver).toUTCString("ISOC", 3));
|
||||
}
|
||||
}
|
||||
|
||||
double lastET = Math.min(records.last().et.getTDBSeconds(), lastTarget);
|
||||
lastET = Math.min(lastET, lastObserver);
|
||||
TDBTime last = new TDBTime(lastET);
|
||||
StateRecord finalObserverJ2000 =
|
||||
new StateRecord(observer, last, J2000, abCorrNone, new Body(0));
|
||||
StateRecord finalTargetJ2000 = new StateRecord(target, last, J2000, abCorrNone, new Body(0));
|
||||
System.out.printf(
|
||||
"%s: Observer velocity relative to SSB (J2000): %s%n",
|
||||
last.toUTCString("ISOC", 3), finalObserverJ2000.getVelocity());
|
||||
|
||||
double duration = last.getTDBSeconds() - first.getTDBSeconds();
|
||||
Vector3 observerAccelerationJ2000 =
|
||||
finalObserverJ2000
|
||||
.getVelocity()
|
||||
.sub(initialObserverJ2000.getVelocity())
|
||||
.scale(1. / duration);
|
||||
Vector3 targetAccelerationJ2000 =
|
||||
finalTargetJ2000.getVelocity().sub(initialTargetJ2000.getVelocity()).scale(1. / duration);
|
||||
|
||||
System.out.printf("Estimated time of impact %s\n", last.toUTCString("ISOC", 6));
|
||||
System.out.printf("Estimated time to impact %.6f seconds\n", duration);
|
||||
System.out.printf("Estimated observer acceleration (J2000): %s\n", observerAccelerationJ2000);
|
||||
System.out.printf("Estimated target acceleration (J2000): %s\n", targetAccelerationJ2000);
|
||||
System.out.printf(
|
||||
"observer acceleration relative to target: %s\n",
|
||||
observerAccelerationJ2000.sub(targetAccelerationJ2000));
|
||||
|
||||
System.out.println();
|
||||
|
||||
// Run again with constant accelerations for target and observer
|
||||
ifsf =
|
||||
new ImpactLocator(
|
||||
J2000,
|
||||
bodyFixed,
|
||||
sbm,
|
||||
finalHeight,
|
||||
finalStep,
|
||||
first,
|
||||
initialObserverJ2000,
|
||||
initialTargetJ2000,
|
||||
last,
|
||||
finalObserverJ2000,
|
||||
finalTargetJ2000);
|
||||
records = ifsf.findTrajectory();
|
||||
|
||||
System.out.printf(
|
||||
"%26s, %13s, %13s, %13s, %13s, %13s, %13s, %12s, %12s, %12s",
|
||||
"UTC",
|
||||
"X (km)",
|
||||
"Y (km)",
|
||||
"Z (km)",
|
||||
"VX (km/s)",
|
||||
"VY (km/s)",
|
||||
"VZ (km/s)",
|
||||
"Lat (deg)",
|
||||
"Lon (deg)",
|
||||
"Alt (m)\n");
|
||||
for (ImpactRecord record : records) {
|
||||
PositionVector p = record.scStateBodyFixed.getPosition();
|
||||
VelocityVector v = record.scStateBodyFixed.getVelocity();
|
||||
System.out.printf(
|
||||
String.format(
|
||||
"%26s, %13.6e, %13.6e, %13.6e, %13.6e, %13.6e, %13.6e, %12.4f, %12.4f, %12.4f\n",
|
||||
record.et.toUTCString("ISOC", 6),
|
||||
p.getElt(0),
|
||||
p.getElt(1),
|
||||
p.getElt(2),
|
||||
v.getElt(0),
|
||||
v.getElt(1),
|
||||
v.getElt(2),
|
||||
Math.toDegrees(record.lc.getLatitude()),
|
||||
Math.toDegrees(record.lc.getLongitude()),
|
||||
record.lc.getRadius() * 1e3));
|
||||
}
|
||||
|
||||
if (cl.hasOption("trajectory")) {
|
||||
|
||||
File trajectoryFile = new File(cl.getOptionValue("trajectory"));
|
||||
File parent = trajectoryFile.getParentFile();
|
||||
if (parent != null && !parent.exists()) parent.mkdirs();
|
||||
|
||||
vtkPolyDataWriter writer = new vtkPolyDataWriter();
|
||||
writer.SetInputData(ifsf.rayBundlePolyData);
|
||||
writer.SetFileName(cl.getOptionValue("trajectory"));
|
||||
writer.SetFileTypeToBinary();
|
||||
writer.Update();
|
||||
}
|
||||
|
||||
if (cl.hasOption("outputTransform")) {
|
||||
if (cl.hasOption("observerFrame")) {
|
||||
// evaluate rotation at -date or -sumFile time
|
||||
|
||||
File transformFile = new File(cl.getOptionValue("outputTransform"));
|
||||
File parent = transformFile.getParentFile();
|
||||
if (!parent.exists()) parent.mkdirs();
|
||||
|
||||
ReferenceFrame scFrame =
|
||||
new ReferenceFrame(cl.getOptionValue("observerFrame").toUpperCase());
|
||||
Matrix33 scToBodyFixed;
|
||||
|
||||
if (sumFile != null) {
|
||||
|
||||
// scToBodyFixed = scFrame.getPositionTransformation(bodyFixed, et0);
|
||||
// logger.info("scToBody (SPICE):\n" + scToBodyFixed);
|
||||
|
||||
ReferenceFrame instrFrame = null;
|
||||
if (cl.hasOption("instrumentFrame"))
|
||||
instrFrame = new ReferenceFrame(cl.getOptionValue("instrumentFrame").toUpperCase());
|
||||
if (instrFrame == null) {
|
||||
logger.error("-instrumentFrame needed for -outputTransform. Exiting.");
|
||||
System.exit(0);
|
||||
}
|
||||
Matrix33 scToCamera = scFrame.getPositionTransformation(instrFrame, et0);
|
||||
|
||||
// DART SPECIFIC!!!!!! TODO: create a Terrasaur config file with project-specific
|
||||
// defaults,
|
||||
// like spice kernel, camera flips, etc.
|
||||
|
||||
// flip -1, 2, -3
|
||||
|
||||
Vector3 row0 = MathConversions.toVector3(sumFile.cx().negate());
|
||||
Vector3 row1 = MathConversions.toVector3(sumFile.cy());
|
||||
Vector3 row2 = MathConversions.toVector3(sumFile.cz().negate());
|
||||
|
||||
Matrix33 bodyFixedToCamera = new Matrix33(row0, row1, row2);
|
||||
|
||||
scToBodyFixed = bodyFixedToCamera.mtxm(scToCamera);
|
||||
|
||||
// logger.info("scToBody (SUMFILE):\n" + scToBodyFixed);
|
||||
} else {
|
||||
scToBodyFixed = scFrame.getPositionTransformation(bodyFixed, et0);
|
||||
}
|
||||
|
||||
PositionVector p = records.last().scStateBodyFixed.getPosition();
|
||||
try (PrintWriter pw = new PrintWriter(transformFile)) {
|
||||
|
||||
List<String> transform = new ArrayList<>();
|
||||
for (int i = 0; i < 3; i++) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (int j = 0; j < 3; j++) sb.append(String.format("%f ", scToBodyFixed.getElt(i, j)));
|
||||
sb.append(String.format("%f ", p.getElt(i)));
|
||||
transform.add(sb.toString());
|
||||
}
|
||||
transform.add("0 0 0 1");
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (String s : transform) sb.append(String.format("%s\n", s));
|
||||
|
||||
pw.println(sb);
|
||||
}
|
||||
/*-
|
||||
Pair<Vector3, Matrix33> transform =
|
||||
CommandLineOptionsUtil.getTransformation(cl.getOptionValue("outputTransform"));
|
||||
System.out.printf("translate\n\t%s\n\t%s\n", p.toString(),
|
||||
transform.getFirst().toString());
|
||||
System.out.printf("rotate\n\t%s\n\t%s\n", scToBodyFixed.toString(),
|
||||
transform.getSecond().toString());
|
||||
*/
|
||||
} else {
|
||||
logger.warn("-observerFrame needed for -outputTransform");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
865
src/main/java/terrasaur/apps/Maplet2FITS.java
Normal file
@@ -0,0 +1,865 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import com.google.common.io.LittleEndianDataInputStream;
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.DataInput;
|
||||
import java.io.DataInputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import nom.tam.fits.FitsException;
|
||||
import nom.tam.fits.HeaderCard;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import picante.math.coords.CoordConverters;
|
||||
import picante.math.coords.LatitudinalVector;
|
||||
import picante.math.vectorspace.UnwritableVectorIJK;
|
||||
import terrasaur.altwg.pipeline.NameConvention;
|
||||
import terrasaur.altwg.pipeline.NamingFactory;
|
||||
import terrasaur.altwg.pipeline.ProductNamer;
|
||||
import terrasaur.enums.AltwgDataType;
|
||||
import terrasaur.enums.FitsHeaderType;
|
||||
import terrasaur.enums.PlaneInfo;
|
||||
import terrasaur.enums.SrcProductType;
|
||||
import terrasaur.fits.FitsData;
|
||||
import terrasaur.fits.FitsData.FitsDataBuilder;
|
||||
import terrasaur.fits.FitsHdr;
|
||||
import terrasaur.fits.FitsHdr.FitsHdrBuilder;
|
||||
import terrasaur.fits.HeaderTag;
|
||||
import terrasaur.fits.ProductFits;
|
||||
import terrasaur.fits.UnitDir;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.Binary16;
|
||||
import terrasaur.utils.BinaryUtils;
|
||||
import terrasaur.utils.xml.AsciiFile;
|
||||
|
||||
/**
|
||||
* Maplet2FITS program. See the usage string for more information about this program.
|
||||
*
|
||||
* @author Eli Kahn
|
||||
* @version 1.0
|
||||
*/
|
||||
public class Maplet2FITS implements TerrasaurTool {
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Convert a Gaskell maplet to FITS format.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
String header =
|
||||
"""
|
||||
This program converts a maplet file in Gaskell maplet format to a FITS file.
|
||||
The program assumes the Gaskell scale is in units of km.
|
||||
By default the generated FITS cube will contain these 10 planes:
|
||||
1. latitude
|
||||
2. longitude
|
||||
3. radius
|
||||
4. x position
|
||||
5. y position
|
||||
6. z position
|
||||
7. height
|
||||
8. albedo
|
||||
9. sigma
|
||||
10. quality
|
||||
If the --exclude-position option is provided, then only the height, albedo, sigma
|
||||
and quality planes are saved out.""";
|
||||
String footer = "";
|
||||
return TerrasaurTool.super.fullDescription(options, header, footer);
|
||||
}
|
||||
|
||||
public static class HazardParams {
|
||||
|
||||
public final boolean noHazard;
|
||||
public final double initialValue;
|
||||
|
||||
private HazardParams(boolean noHazard, double initialValue) {
|
||||
this.noHazard = noHazard;
|
||||
this.initialValue = initialValue;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate HazardParams object that contains attributes needed for generating the Hazard plane.
|
||||
*
|
||||
* @param noHazard
|
||||
* @param initialVal
|
||||
* @return
|
||||
*/
|
||||
public static HazardParams getHazardParam(boolean noHazard, double initialVal) {
|
||||
return new HazardParams(noHazard, initialVal);
|
||||
}
|
||||
|
||||
/**
|
||||
* Self-contained function call to generate ALTWG FITS files from a given maplet file. Note the
|
||||
* boolean that determines whether to use the outfile string "as-is" or replace the filename with
|
||||
* one using the ALTWG naming convention.
|
||||
*
|
||||
* @param mapletFile
|
||||
* @param outfile
|
||||
* @param productType
|
||||
* @param excludePosition
|
||||
* @param sigmasFile
|
||||
* @param qualityFile
|
||||
* @param namingConvention
|
||||
* @throws IOException
|
||||
* @throws FitsException
|
||||
*/
|
||||
public static void run(
|
||||
String mapletFile,
|
||||
String outfile,
|
||||
AltwgDataType productType,
|
||||
boolean excludePosition,
|
||||
String fitsConfigFile,
|
||||
String sigmasFile,
|
||||
String sigsumFile,
|
||||
String qualityFile,
|
||||
String namingConvention,
|
||||
boolean swapBytes,
|
||||
double scalFactor,
|
||||
double sigmaScale,
|
||||
String mapName,
|
||||
HazardParams hazParam)
|
||||
throws IOException, FitsException {
|
||||
|
||||
// sanity check. If no naming convention specified then outfile should be fully qualified path
|
||||
// to an output file, NOT a directory
|
||||
if (namingConvention.isEmpty()) {
|
||||
File outFname = new File(outfile);
|
||||
if (outFname.isDirectory()) {
|
||||
String errMesg =
|
||||
"ERROR! No naming convention specified but output file:"
|
||||
+ outfile
|
||||
+ " is a directory! Must be a full path to an output file!";
|
||||
throw new RuntimeException(errMesg);
|
||||
}
|
||||
}
|
||||
|
||||
DataInputStream is =
|
||||
new DataInputStream(new BufferedInputStream(new FileInputStream(mapletFile)));
|
||||
DataInput sigmaInput = null;
|
||||
BufferedInputStream sigmabis = null;
|
||||
if (sigmasFile != null) {
|
||||
System.out.println("Parsing " + sigmasFile + " for sigma values.");
|
||||
Path filePath = Paths.get(sigmasFile);
|
||||
if (!Files.exists(filePath)) {
|
||||
System.out.println(
|
||||
"WARNING! sigmas file:"
|
||||
+ filePath.toAbsolutePath()
|
||||
+ " not found! Sigmas will default to 0!");
|
||||
} else {
|
||||
sigmabis =
|
||||
new BufferedInputStream(new FileInputStream(filePath.toAbsolutePath().toString()));
|
||||
sigmaInput =
|
||||
swapBytes ? new LittleEndianDataInputStream(sigmabis) : new DataInputStream(sigmabis);
|
||||
}
|
||||
}
|
||||
DataInput qualityInput = null;
|
||||
BufferedInputStream qualbis = null;
|
||||
if (qualityFile != null) {
|
||||
System.out.println("Parsing " + qualityFile + " for quality values.");
|
||||
Path filePath = Paths.get(qualityFile);
|
||||
if (!Files.exists(filePath)) {
|
||||
System.out.println(
|
||||
"WARNING! quality file:"
|
||||
+ filePath.toAbsolutePath()
|
||||
+ " not found! Quality values will default to 0!");
|
||||
} else {
|
||||
qualbis =
|
||||
new BufferedInputStream(new FileInputStream(filePath.toAbsolutePath().toString()));
|
||||
qualityInput =
|
||||
swapBytes ? new LittleEndianDataInputStream(qualbis) : new DataInputStream(qualbis);
|
||||
}
|
||||
}
|
||||
|
||||
double[] V = new double[3];
|
||||
double[] ux = new double[3];
|
||||
double[] uy = new double[3];
|
||||
double[] uz = new double[3];
|
||||
|
||||
/*
|
||||
* Copied from Bob Gaskell READ_MAP.f code:
|
||||
*
|
||||
* bytes 1-2 height/hscale (integer*2 msb) byte 3 relative "albedo" (1-199) (byte)
|
||||
*
|
||||
* If there is missing data at any point, both height and albedo are set to zero.
|
||||
*
|
||||
* The map array is read row by row from the upper left (i,j = -qsz). Rows are increasing in the
|
||||
* Uy direction with spacing = scale Columns are increasing in the Ux direction with spacing =
|
||||
* scale Heights are positive in the Uz direction with units = scale
|
||||
*/
|
||||
|
||||
// use the first 4 bytes of the maplet header to store intensity min & dynamic range.
|
||||
float intensityMin = Binary16.toFloat(BinaryUtils.swap(is.readShort()));
|
||||
float intensityRange = Binary16.toFloat(BinaryUtils.swap(is.readShort()));
|
||||
|
||||
// advancing byte pointer past some headers. Unused, per Bob's WRITE_MAP.f
|
||||
is.readByte();
|
||||
is.readByte();
|
||||
|
||||
float scale = is.readFloat();
|
||||
short halfsize = BinaryUtils.swap(is.readShort());
|
||||
|
||||
// x,y,z position uncertainty unit vector * 255.
|
||||
// per Bob's WRITE_MAP.f
|
||||
is.readByte();
|
||||
is.readByte();
|
||||
is.readByte();
|
||||
|
||||
V[0] = is.readFloat();
|
||||
V[1] = is.readFloat();
|
||||
V[2] = is.readFloat();
|
||||
ux[0] = is.readFloat();
|
||||
ux[1] = is.readFloat();
|
||||
ux[2] = is.readFloat();
|
||||
uy[0] = is.readFloat();
|
||||
uy[1] = is.readFloat();
|
||||
uy[2] = is.readFloat();
|
||||
uz[0] = is.readFloat();
|
||||
uz[1] = is.readFloat();
|
||||
uz[2] = is.readFloat();
|
||||
float hscale = is.readFloat();
|
||||
|
||||
// magnitude of position uncertainty
|
||||
is.readFloat();
|
||||
|
||||
// byte 72 of the maplet header is the version number. OLA uses version numbers < 0 and SPC
|
||||
// maplets have
|
||||
// version numbers > 0. A version number of 0 is Bob Gaskell's original maplet format.
|
||||
byte b = is.readByte();
|
||||
logger.info("byte is:" + b);
|
||||
// boolean isOLAMaplet = (is.readByte() < 0);
|
||||
boolean isOLAMaplet = (b < 0);
|
||||
if (isOLAMaplet) {
|
||||
logger.info("byte72 of maplet header indicates this is an OLA maplet.");
|
||||
} else {
|
||||
logger.info("byte72 of maplet header indicates this is an SPC maplet.");
|
||||
}
|
||||
|
||||
logger.info(String.format("V : [%f %f %f]", V[0], V[1], V[2]));
|
||||
logger.info(String.format("ux: [%f %f %f]", ux[0], ux[1], ux[2]));
|
||||
logger.info(String.format("uy: [%f %f %f]", uy[0], uy[1], uy[2]));
|
||||
logger.info(String.format("uz: [%f %f %f]", uz[0], uz[1], uz[2]));
|
||||
logger.info("halfsize: " + halfsize);
|
||||
logger.info("scale: " + scale);
|
||||
logger.info("hscale: " + hscale);
|
||||
logger.info("AltwgProductType:" + productType.toString());
|
||||
|
||||
int totalsize = 2 * halfsize + 1;
|
||||
int numPlanes = 10;
|
||||
if (excludePosition) {
|
||||
numPlanes = 4;
|
||||
if (!hazParam.noHazard) {
|
||||
numPlanes = numPlanes + 1;
|
||||
}
|
||||
}
|
||||
double[][][] data = new double[numPlanes][totalsize][totalsize];
|
||||
double[][][] llrData = new double[numPlanes][totalsize][totalsize];
|
||||
|
||||
for (int i = -halfsize; i <= halfsize; ++i)
|
||||
for (int j = -halfsize; j <= halfsize; ++j) {
|
||||
|
||||
double h = is.readShort() * hscale * scale;
|
||||
|
||||
int n = 0;
|
||||
int llrIndex = 0;
|
||||
double[] p = {
|
||||
V[0] + i * scale * ux[0] + j * scale * uy[0] + h * uz[0],
|
||||
V[1] + i * scale * ux[1] + j * scale * uy[1] + h * uz[1],
|
||||
V[2] + i * scale * ux[2] + j * scale * uy[2] + h * uz[2]
|
||||
};
|
||||
LatitudinalVector lv = CoordConverters.convertToLatitudinal(new UnwritableVectorIJK(p));
|
||||
if (!excludePosition) {
|
||||
data[n++][i + halfsize][j + halfsize] = Math.toDegrees(lv.getLatitude());
|
||||
data[n++][i + halfsize][j + halfsize] = Math.toDegrees(lv.getLongitude());
|
||||
data[n++][i + halfsize][j + halfsize] = lv.getRadius();
|
||||
data[n++][i + halfsize][j + halfsize] = p[0];
|
||||
data[n++][i + halfsize][j + halfsize] = p[1];
|
||||
data[n++][i + halfsize][j + halfsize] = p[2];
|
||||
} else {
|
||||
llrData[llrIndex++][i + halfsize][j + halfsize] = Math.toDegrees(lv.getLatitude());
|
||||
llrData[llrIndex++][i + halfsize][j + halfsize] = Math.toDegrees(lv.getLongitude());
|
||||
llrData[llrIndex++][i + halfsize][j + halfsize] = lv.getRadius();
|
||||
}
|
||||
|
||||
data[n++][i + halfsize][j + halfsize] = h;
|
||||
|
||||
double albedo = is.readUnsignedByte();
|
||||
|
||||
if (isOLAMaplet) {
|
||||
albedo = albedo / 199. * intensityRange + intensityMin;
|
||||
} else {
|
||||
albedo = albedo / 100.0D;
|
||||
}
|
||||
data[n++][i + halfsize][j + halfsize] = albedo;
|
||||
|
||||
// sigmas default to 0 unless a SIGMAS file was specified
|
||||
float sigmaVal = getSigma(sigmaInput, sigmaScale);
|
||||
|
||||
data[n++][i + halfsize][j + halfsize] = sigmaVal;
|
||||
|
||||
// quality defaults to 0 unless a quality file was specified
|
||||
float qualVal = getQuality(qualityInput);
|
||||
|
||||
data[n++][i + halfsize][j + halfsize] = qualVal;
|
||||
|
||||
if ((excludePosition) && (!hazParam.noHazard)) {
|
||||
// NFT MLN; includes Hazard plane, initialized to initial value
|
||||
data[n++][i + halfsize][j + halfsize] = hazParam.initialValue;
|
||||
}
|
||||
}
|
||||
|
||||
is.close();
|
||||
if (sigmabis != null) sigmabis.close();
|
||||
if (qualbis != null) qualbis.close();
|
||||
|
||||
String sigmaSum = null;
|
||||
if (sigsumFile != null) {
|
||||
sigmaSum = parseSigsumFile(sigsumFile);
|
||||
}
|
||||
|
||||
// Map<String, HeaderCard> headerValues = new LinkedHashMap<String, HeaderCard>();
|
||||
|
||||
// create new fits header builder
|
||||
FitsHdrBuilder hdrBuilder = FitsHdr.getBuilder();
|
||||
|
||||
if (fitsConfigFile != null) {
|
||||
// initialize header cards with values from configfile
|
||||
hdrBuilder = FitsHdr.configHdrBuilder(fitsConfigFile, hdrBuilder);
|
||||
}
|
||||
|
||||
if (sigmaSum != null) {
|
||||
// able to parse sigma summary value. Show this in header builder
|
||||
String hdrTag = HeaderTag.SIGMA.toString();
|
||||
// update sigma summary value
|
||||
hdrBuilder.setVCbyHeaderTag(HeaderTag.SIGMA, sigmaSum, HeaderTag.SIGMA.comment());
|
||||
|
||||
// headerValues.put(HeaderTag.SIGMA.toString(),
|
||||
// new HeaderCard(HeaderTag.SIGMA.toString(), sigmaSum, HeaderTag.SIGMA.comment()));
|
||||
|
||||
// define this SIGMA as a measurement of height uncertainty
|
||||
|
||||
hdrBuilder.setVCbyHeaderTag(
|
||||
HeaderTag.SIG_DEF, "height uncertainty", HeaderTag.SIG_DEF.comment());
|
||||
|
||||
// headerValues.put(HeaderTag.SIG_DEF.toString(),
|
||||
// new HeaderCard(HeaderTag.SIG_DEF.toString(), "Uncertainty", HeaderTag.SIG_DEF.comment()));
|
||||
//// "Definition of the SIGMA uncertainty metric"));
|
||||
|
||||
}
|
||||
|
||||
// set the mapletFile as the DATASRC
|
||||
hdrBuilder.setVbyHeaderTag(HeaderTag.DATASRCF, new File(mapletFile).getName());
|
||||
|
||||
// set the MAP_NAME
|
||||
hdrBuilder.setVbyHeaderTag(HeaderTag.MAP_NAME, mapName);
|
||||
|
||||
// create list describing the planes in the datacube
|
||||
List<PlaneInfo> planeList = new ArrayList<>();
|
||||
|
||||
// determine SrcProductType from header builder
|
||||
String dataSource = SrcProductType.UNKNOWN.toString();
|
||||
if (hdrBuilder.containsKey(HeaderTag.DATASRC.toString())) {
|
||||
HeaderCard srcCard = hdrBuilder.getCard(HeaderTag.DATASRC.toString());
|
||||
dataSource = srcCard.getValue();
|
||||
}
|
||||
|
||||
// use scalFactor to determine GSD
|
||||
double gsd = scale * scalFactor;
|
||||
|
||||
// assume all maplets are local, not global.
|
||||
boolean isGlobal = false;
|
||||
|
||||
// create FitsData object. Stores data and relevant information about the data
|
||||
FitsDataBuilder fitsDataB = new FitsDataBuilder(data, isGlobal);
|
||||
FitsData fitsData =
|
||||
fitsDataB
|
||||
.setV(V)
|
||||
.setAltProdType(productType)
|
||||
.setDataSource(dataSource)
|
||||
.setU(ux, UnitDir.UX)
|
||||
.setU(uy, UnitDir.UY)
|
||||
.setU(uz, UnitDir.UZ)
|
||||
.setScale(scale)
|
||||
.setGSD(gsd)
|
||||
.build();
|
||||
|
||||
if (excludePosition) {
|
||||
|
||||
// define SIG_DEF using just "Uncertainty" for NFT
|
||||
hdrBuilder.setVCbyHeaderTag(HeaderTag.SIG_DEF, "Uncertainty", HeaderTag.SIG_DEF.comment());
|
||||
|
||||
// change comment for DQUAL_2
|
||||
hdrBuilder.setCbyHeaderTag(HeaderTag.DQUAL_2, "Data Quality Metric: mean residual [m]");
|
||||
|
||||
planeList.add(PlaneInfo.HEIGHT);
|
||||
// call this plane ALBEDO even if OLA is the source
|
||||
planeList.add(PlaneInfo.ALBEDO);
|
||||
planeList.add(PlaneInfo.SIGMA);
|
||||
planeList.add(PlaneInfo.QUALITY);
|
||||
if (!hazParam.noHazard) {
|
||||
planeList.add(PlaneInfo.HAZARD);
|
||||
}
|
||||
|
||||
// create llrData object. Stores lat,lon,radius information. Needed to fill out fits header
|
||||
FitsDataBuilder llrDataB = new FitsDataBuilder(llrData, isGlobal);
|
||||
FitsData llrFitsData =
|
||||
llrDataB
|
||||
.setV(V)
|
||||
.setAltProdType(productType)
|
||||
.setDataSource(dataSource)
|
||||
.setU(ux, UnitDir.UX)
|
||||
.setU(uy, UnitDir.UY)
|
||||
.setU(uz, UnitDir.UZ)
|
||||
.setScale(scale)
|
||||
.setGSD(gsd)
|
||||
.build();
|
||||
|
||||
// fill out fits header with information in llrFitsData
|
||||
hdrBuilder.setByFitsData(llrFitsData);
|
||||
|
||||
saveNFTFits(hdrBuilder, fitsData, planeList, namingConvention, outfile);
|
||||
|
||||
} else {
|
||||
|
||||
planeList.add(PlaneInfo.LAT);
|
||||
planeList.add(PlaneInfo.LON);
|
||||
planeList.add(PlaneInfo.RAD);
|
||||
planeList.add(PlaneInfo.X);
|
||||
planeList.add(PlaneInfo.Y);
|
||||
planeList.add(PlaneInfo.Z);
|
||||
planeList.add(PlaneInfo.HEIGHT);
|
||||
planeList.add(PlaneInfo.ALBEDO);
|
||||
planeList.add(PlaneInfo.SIGMA);
|
||||
planeList.add(PlaneInfo.QUALITY);
|
||||
|
||||
saveDTMFits(
|
||||
hdrBuilder, fitsData, planeList, namingConvention, productType, isGlobal, outfile);
|
||||
}
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(
|
||||
Option.builder("input-map").required().hasArg().desc("input maplet file").build());
|
||||
options.addOption(
|
||||
Option.builder("output-fits").required().hasArg().desc("output FITS file").build());
|
||||
options.addOption(
|
||||
Option.builder("exclude-position")
|
||||
.desc(
|
||||
"Only save out the height, albedo, sigma, quality, and hazard planes to the output file. Used for creating NFT MLNs.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("noHazard")
|
||||
.desc(
|
||||
"Only used in conjunction with -exclude-position. If present then the NFT MLN will NOT include a Hazard plane initially filled with all ones.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("hazardVal")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Only used in conjunction with -exclude-position. If present then will use the specified value.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("lsb")
|
||||
.desc(
|
||||
"""
|
||||
By default the sigmas and quality binary files are assumed to be in big-endian floating
|
||||
point format. Pass this argument if you know your sigma and quality binary files are
|
||||
in little-endian format. For example, products created by SPC executables are OS
|
||||
dependent and intel Linux OSes use little-endian.""")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("scalFactor")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Enter scale factor used to convert scale to ground sample distance in mm i.e. for SPC maplets the scale factor is 1000000 (km to mm). Set to 1.0e6 by default.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("sigmas-file")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Path to binary sigmas file containing sigma values per pixel, in same order as the maplet file. If this option is omitted, the sigma plane is set to all zeros.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("sigsum-file")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Path to ascii sigma summary file, containing the overall sigma value of the maplet.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("sigmaScale")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Scale sigmas from sigmas-file by <value>. Only applicable if -sigmas-file is used. Defaults to 1 if not specified.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("mapname")
|
||||
.hasArg()
|
||||
.desc("Sets the MAP_NAME fits keyword to <mapname>. Default is 'Non-NFT DTM'")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("quality-file")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Path to binary quality file containing quality values. If this option is omitted, the quality plane is set to all zeros.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("configFile")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"""
|
||||
Path to fits configuration file that contains keywords and values which should be included
|
||||
in the fits header. The fits header will always be fully populated with all keywords
|
||||
that are required by the ALTWG SIS. The values may not be populated or UNK if they cannot
|
||||
be derived from the data itself. This configuration file is a way to populate those values.""")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("namingConvention")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"""
|
||||
Renames the output fits file per the naming convention specified by the string.
|
||||
Currently supports 'altproduct', 'dartproduct', and 'altnftmln' conventions.
|
||||
NOTE that -exclude-position does not automatically choose the 'altnftmln'
|
||||
naming convention! 'ALTWG NFT MLN naming convention (altnftmln) MUST BE EXPLICITLY SPECIFIED
|
||||
The renamed file is placed in the path specified by -output-fits""")
|
||||
.build());
|
||||
return options;
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws FitsException, IOException {
|
||||
TerrasaurTool defaultOBJ = new Maplet2FITS();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
boolean excludePosition = cl.hasOption("exclude-position");
|
||||
boolean noHazard = cl.hasOption("noHazard");
|
||||
String namingConvention = cl.getOptionValue("namingConvention", "noneused");
|
||||
boolean swapBytes = cl.hasOption("lsb");
|
||||
String sigmasFile = cl.hasOption("sigmas-file") ? cl.getOptionValue("sigmas-file") : null;
|
||||
String sigsumFile = cl.hasOption("sigsum-file") ? cl.getOptionValue("sigsum-file") : null;
|
||||
String qualityFile = cl.hasOption("quality-file") ? cl.getOptionValue("quality-file") : null;
|
||||
String fitsConfigFile = cl.hasOption("configFile") ? cl.getOptionValue("configFile") : null;
|
||||
String mapName = cl.getOptionValue("mapname", "Non-NFT DTM");
|
||||
|
||||
double scalFactor =
|
||||
Double.parseDouble(cl.getOptionValue("scalFactor", "1e6").replaceAll("[dD]", "e"));
|
||||
double sigmaScale =
|
||||
Double.parseDouble(cl.getOptionValue("sigmaScale", "1.0").replaceAll("[dD]", "e"));
|
||||
double hazardVal =
|
||||
Double.parseDouble(cl.getOptionValue("hazardVal", "1.0").replaceAll("[dD]", "e"));
|
||||
AltwgDataType altwgProduct = AltwgDataType.NA;
|
||||
|
||||
if (sigsumFile != null) logger.info("using {} to parse for global uncertainty.", sigsumFile);
|
||||
|
||||
if (!namingConvention.isEmpty()) {
|
||||
if (excludePosition) {
|
||||
altwgProduct = AltwgDataType.NFTDTM;
|
||||
} else {
|
||||
altwgProduct = AltwgDataType.DTM;
|
||||
}
|
||||
}
|
||||
|
||||
HazardParams hazParams = getHazardParam(noHazard, hazardVal);
|
||||
|
||||
String mapletFile = cl.getOptionValue("input-map");
|
||||
String outfile = cl.getOptionValue("output-fits");
|
||||
logger.info("altwgProductType:{}", altwgProduct.toString());
|
||||
run(
|
||||
mapletFile,
|
||||
outfile,
|
||||
altwgProduct,
|
||||
excludePosition,
|
||||
fitsConfigFile,
|
||||
sigmasFile,
|
||||
sigsumFile,
|
||||
qualityFile,
|
||||
namingConvention,
|
||||
swapBytes,
|
||||
scalFactor,
|
||||
sigmaScale,
|
||||
mapName,
|
||||
hazParams);
|
||||
}
|
||||
|
||||
public static String parseSigsumFile(String sigsumFile) throws IOException {
|
||||
if (!Files.exists(Paths.get(sigsumFile))) {
|
||||
System.out.println(
|
||||
"Warning! Sigmas summary file:"
|
||||
+ sigsumFile
|
||||
+ " does not exist! Not "
|
||||
+ "able to parse sigma summary.");
|
||||
return null;
|
||||
} else {
|
||||
List<String> content = FileUtils.readLines(new File(sigsumFile), Charset.defaultCharset());
|
||||
String firstLine = content.get(0);
|
||||
|
||||
if (firstLine != null) {
|
||||
|
||||
// first assume format is <mapletfilename> <sigma summary> <blah>
|
||||
// this is the SPC format
|
||||
// try to parse a double from this
|
||||
String[] array = firstLine.split(" +");
|
||||
boolean foundFirst = false;
|
||||
if (array.length > 1) {
|
||||
System.out.println("Assuming sigma summary file of form:");
|
||||
System.out.println("<maplet Filename> <sigma summary> <blah>");
|
||||
for (String s : array) {
|
||||
if (foundFirst) {
|
||||
// second cell after finding the first non-empty cell should be the sigma summary
|
||||
// value!
|
||||
double thisD = Double.parseDouble(s.replaceAll("[dD]", "e"));
|
||||
return Double.isNaN(thisD) ? null : Double.toString(thisD);
|
||||
}
|
||||
if (!s.isEmpty()) {
|
||||
// loop through array until you find first non-empty string.
|
||||
// This should be the maplet filename
|
||||
foundFirst = true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
||||
// assume format is mean on first line, median on second line
|
||||
if (content.size() > 1) {
|
||||
System.out.println("Assuming sigma summary file of form:");
|
||||
System.out.println("<mean value>");
|
||||
System.out.println("<median value>");
|
||||
System.out.println("Will use median value");
|
||||
return content.get(1).replaceAll("\\s+", "");
|
||||
} else {
|
||||
// first line is not null but there is only one line
|
||||
System.out.println("Assuming first value in first line contains sigma summary!");
|
||||
return content.get(0).replaceAll("\\s+", "");
|
||||
}
|
||||
}
|
||||
|
||||
// did not find first non-empty string or did not find sigma
|
||||
System.out.println("WARNING: Could not parse sigma summary file!");
|
||||
return null;
|
||||
|
||||
/*
|
||||
* firstLine = firstLine.replace(" ", ""); double thisD = StringUtil.parseSafeD(firstLine);
|
||||
* if (Double.isNaN(thisD)) { Pattern p = Pattern.compile("^\\s*(\\d+\\.?\\d+)\\s*.*");
|
||||
* Matcher m = p.matcher(firstLine); if (m.matches()) { return m.group(1); } else { return
|
||||
* null; } } else { return Double.toString(thisD); }
|
||||
*/
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a maplet, return the command call to Maplet2FITS to turn it into a fits file. Allows one
|
||||
* to specify filenames of sigma, sigma summary, and quality files. Set string variables to empty
|
||||
* string if you do not wish to include them in the command call. Method generates the full set of
|
||||
* fits file planes, i.e. NOT an NFT Fits file.
|
||||
*
|
||||
* @param mapletFile - path to maplet
|
||||
* @param configFile - configuration file to use
|
||||
* @param lsb - if True assume sigma and quality files are in little-endian
|
||||
* @param sigmaFile - pointer to sigma file. Leave as empty string if no file exists/needed
|
||||
* (defaults to 0).
|
||||
* @param sigmaScaleF - sigma scale factor to be used with sigma values read from sigma file.
|
||||
* @param sigsumFile - pointer to simga summary file. Leave as empty string if no file
|
||||
* exists/needed (defaults to 0)
|
||||
* @param qualityFile - pointer to quality file. Leave as empty string if no file exists/needed
|
||||
* (defaults to 0).
|
||||
* @return
|
||||
*/
|
||||
public static String getCmd(
|
||||
File mapletFile,
|
||||
String outFits,
|
||||
String configFile,
|
||||
boolean altwgNaming,
|
||||
boolean lsb,
|
||||
String sigmaFile,
|
||||
String sigsumFile,
|
||||
String sigmaScaleF,
|
||||
String qualityFile) {
|
||||
|
||||
StringBuilder toolexe = new StringBuilder("Maplet2FITS");
|
||||
if (!configFile.isEmpty()) {
|
||||
toolexe.append(" -configFile ");
|
||||
toolexe.append(configFile);
|
||||
}
|
||||
|
||||
if (altwgNaming) {
|
||||
toolexe.append(" -altwgNaming");
|
||||
}
|
||||
|
||||
if (lsb) {
|
||||
toolexe.append(" -lsb");
|
||||
}
|
||||
|
||||
Path thisFile;
|
||||
if (!sigmaFile.isEmpty()) {
|
||||
thisFile = Paths.get(sigmaFile);
|
||||
if (Files.exists(thisFile)) {
|
||||
toolexe.append(" -sigmas-file ");
|
||||
toolexe.append(thisFile.toAbsolutePath());
|
||||
} else {
|
||||
logger.warn("Could not find sigmas file:{}", sigmaFile);
|
||||
}
|
||||
}
|
||||
|
||||
if (!sigmaScaleF.isEmpty()) {
|
||||
toolexe.append(" -sigmaScale ");
|
||||
toolexe.append(sigmaScaleF);
|
||||
}
|
||||
|
||||
if (!sigsumFile.isEmpty()) {
|
||||
thisFile = Paths.get(sigsumFile);
|
||||
if (Files.exists(thisFile)) {
|
||||
toolexe.append(" -sigsum-file ");
|
||||
toolexe.append(thisFile.toAbsolutePath());
|
||||
} else {
|
||||
logger.warn("Could not find sigma summary file:{}", sigsumFile);
|
||||
}
|
||||
}
|
||||
|
||||
if (!qualityFile.isEmpty()) {
|
||||
thisFile = Paths.get(qualityFile);
|
||||
if (Files.exists(thisFile)) {
|
||||
toolexe.append(" -quality-file ");
|
||||
toolexe.append(thisFile.toAbsolutePath());
|
||||
} else {
|
||||
logger.warn("Could not find quality file:{}", qualityFile);
|
||||
}
|
||||
}
|
||||
|
||||
toolexe.append(" ");
|
||||
toolexe.append(mapletFile.getAbsolutePath());
|
||||
toolexe.append(" ");
|
||||
toolexe.append(outFits);
|
||||
|
||||
return toolexe.toString();
|
||||
}
|
||||
|
||||
private static float getSigma(DataInput dataIn, double sigmaScale) throws IOException {
|
||||
float floatVal = 0f;
|
||||
if (dataIn != null) {
|
||||
floatVal = dataIn.readFloat();
|
||||
}
|
||||
floatVal = floatVal * (float) sigmaScale;
|
||||
return floatVal;
|
||||
}
|
||||
|
||||
private static float getQuality(DataInput dataIn) throws IOException {
|
||||
float floatVal = 0f;
|
||||
if (dataIn != null) {
|
||||
floatVal = dataIn.readFloat();
|
||||
}
|
||||
return floatVal;
|
||||
}
|
||||
|
||||
private static void saveNFTFits(
|
||||
FitsHdrBuilder hdrBuilder,
|
||||
FitsData fitsData,
|
||||
List<PlaneInfo> planeList,
|
||||
String namingConvention,
|
||||
String outfile)
|
||||
throws FitsException, IOException {
|
||||
|
||||
File crossrefFile = null;
|
||||
|
||||
NameConvention nameConvention = NameConvention.parseNameConvention(namingConvention);
|
||||
if (nameConvention != NameConvention.NONEUSED) {
|
||||
|
||||
String outNFTFname = outfile;
|
||||
Path outPath = Paths.get(outNFTFname);
|
||||
|
||||
// save old filename
|
||||
String oldFilename = outPath.getFileName().toString();
|
||||
|
||||
// hardcode for now. NFT is not a nominal Toolkit product.
|
||||
AltwgDataType productType = null;
|
||||
boolean isGlobal = false;
|
||||
|
||||
// generate new NFT output filename based on naming convention
|
||||
ProductNamer productNamer = NamingFactory.parseNamingConvention(namingConvention);
|
||||
String newbaseName = productNamer.productbaseName(hdrBuilder, productType, isGlobal);
|
||||
|
||||
String newFilename = newbaseName + ".fits";
|
||||
|
||||
// replace outfile with new nft filename and write FITS file to it.
|
||||
outNFTFname = outNFTFname.replace(oldFilename, newFilename);
|
||||
|
||||
// save new PDS name in cross-reference file, for future reference
|
||||
crossrefFile = new File(outNFTFname + ".crf");
|
||||
AsciiFile crfFile = new AsciiFile(crossrefFile.getAbsolutePath());
|
||||
crfFile.streamSToFile(newbaseName, 0);
|
||||
crfFile.closeFile();
|
||||
|
||||
outfile = outNFTFname;
|
||||
}
|
||||
|
||||
FitsHeaderType hdrType = FitsHeaderType.NFTMLN;
|
||||
ProductFits.saveNftFits(fitsData, planeList, outfile, hdrBuilder, hdrType, crossrefFile);
|
||||
}
|
||||
|
||||
private static void saveDTMFits(
|
||||
FitsHdrBuilder hdrBuilder,
|
||||
FitsData fitsData,
|
||||
List<PlaneInfo> planeList,
|
||||
String namingConvention,
|
||||
AltwgDataType productType,
|
||||
boolean isGlobal,
|
||||
String outfile)
|
||||
throws FitsException, IOException {
|
||||
|
||||
// Use a different static method to create the ALTWG product. This allows me to differentiate
|
||||
// between different kinds of fits header types.
|
||||
FitsHeaderType hdrType = FitsHeaderType.DTMLOCALALTWG;
|
||||
|
||||
File crossrefFile;
|
||||
String outFitsFname = outfile;
|
||||
|
||||
// possible renaming of output file
|
||||
NameConvention nameConvention = NameConvention.parseNameConvention(namingConvention);
|
||||
File[] outFiles =
|
||||
NamingFactory.getBaseNameAndCrossRef(
|
||||
nameConvention, hdrBuilder, productType, isGlobal, outfile);
|
||||
|
||||
// check if cross-ref file is not null. If so then output file was renamed to naming convention.
|
||||
crossrefFile = outFiles[1];
|
||||
if (crossrefFile != null) {
|
||||
|
||||
// rename fitsFile per naming convention and create a cross-reference file.
|
||||
String baseOutputName = outFiles[0].toString();
|
||||
|
||||
// determine whether original outfile is a directory
|
||||
File outFname = new File(outfile);
|
||||
String outputFolder = outFname.getAbsoluteFile().getParent();
|
||||
if (outFname.isDirectory()) {
|
||||
outputFolder = outfile;
|
||||
|
||||
// cannot create cross-reference file if original outfile was a directory. make it null;
|
||||
crossrefFile = null;
|
||||
}
|
||||
outFitsFname = new File(outputFolder, baseOutputName + ".fits").getAbsolutePath();
|
||||
}
|
||||
ProductFits.saveDataCubeFits(
|
||||
fitsData, planeList, outFitsFname, hdrBuilder, hdrType, crossrefFile);
|
||||
}
|
||||
}
|
||||
593
src/main/java/terrasaur/apps/OBJ2DSK.java
Normal file
@@ -0,0 +1,593 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.FileSystems;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import com.beust.jcommander.JCommander;
|
||||
import com.beust.jcommander.Parameter;
|
||||
import com.beust.jcommander.ParameterException;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import terrasaur.fits.HeaderTag;
|
||||
import terrasaur.fits.ProductFits;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.AppVersion;
|
||||
import terrasaur.utils.JCommanderUsage;
|
||||
import terrasaur.utils.NativeLibraryLoader;
|
||||
import terrasaur.utils.PolyDataUtil;
|
||||
import terrasaur.utils.ProcessUtils;
|
||||
import vtk.vtkPolyData;
|
||||
|
||||
public class OBJ2DSK implements TerrasaurTool {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Convert an OBJ shape file to SPICE DSK format.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
Arguments arguments = new Arguments();
|
||||
JCommander jcommander = new JCommander(arguments);
|
||||
jcommander.setProgramName("OBJ2DSK");
|
||||
|
||||
JCommanderUsage jcUsage = new JCommanderUsage(jcommander);
|
||||
jcUsage.setColumnSize(100);
|
||||
jcUsage.usage(builder, 4, arguments.commandDescription);
|
||||
return builder.toString();
|
||||
|
||||
}
|
||||
|
||||
private enum DSK_KEYS {
|
||||
SURF_NAME, CENTER_NAME, REFFRAME_NAME, NAIF_SURFNAME, NAIF_SURFCODE, NAIF_SURFBODY, METAK, COMMENTFILE
|
||||
}
|
||||
|
||||
private static class Arguments {
|
||||
|
||||
private final String commandDescription = AppVersion.getVersionString()
|
||||
+ "\n\nConverts a triangular plate model in OBJ format to a SPICE DSK file.\n"
|
||||
+ "The SPICE utility application 'mkdsk' must already be present on your PATH.\n";
|
||||
|
||||
@Parameter(names = "-help", help = true)
|
||||
private boolean help;
|
||||
|
||||
@Parameter(names = "--latMin", description = "<latMin> Minimum latitude of OBJ in degrees.",
|
||||
required = false, order = 0)
|
||||
private double latMin = -90D;
|
||||
|
||||
@Parameter(names = "--latMax", description = "<latMax> Maximum latitude of OBJ in degrees.",
|
||||
required = false, order = 1)
|
||||
private double latMax = 90D;
|
||||
|
||||
@Parameter(names = "--lonMin", description = "<lonMin> Minimum longitude of OBJ in degrees.",
|
||||
required = false, order = 2)
|
||||
private double lonMin = 0;
|
||||
|
||||
@Parameter(names = "--lonMax", description = "<lonMax> Maximum longitude of OBJ in degrees.",
|
||||
required = false, order = 3)
|
||||
private double lonMax = 360D;
|
||||
|
||||
@Parameter(names = "--fitsFile",
|
||||
description = "<filename> path to DTM fits file containing lat,lon"
|
||||
+ " information as planes. Assumes PLANE1=latitude, PLANE2=longitude. Use in place of specifying lat/lon min/max values.",
|
||||
required = false, order = 4)
|
||||
private String fitsFile = "";
|
||||
|
||||
@Parameter(names = "--fine-scale",
|
||||
description = "<fine-voxel-scale> Floating point value representing the "
|
||||
+ " ratio of the spatial index's fine voxel edge length to the average plate extent. "
|
||||
+ " The 'extent' of a plate in a given coordinate direction is the difference between the maximum and minimum "
|
||||
+ " values of that coordinate attained on the plate. Only required if mkdsk version is "
|
||||
+ " lower than 66.",
|
||||
required = false, order = 11)
|
||||
double fineVoxScale = Double.NaN;
|
||||
|
||||
@Parameter(names = "--coarse-scale",
|
||||
description = " <coarse-voxel-scale>"
|
||||
+ " Integer value representing the ratio of the edge length of coarse voxels to"
|
||||
+ " fine voxels. The number must be large enough that the total number of coarse"
|
||||
+ " voxels is less than the value of MAXCGR, which is currently 1E5."
|
||||
+ " Only required if mkdsk version is lower than 66.",
|
||||
required = false, order = 12)
|
||||
Integer coarseVoxScale = -999;
|
||||
|
||||
@Parameter(names = "--useSetupFile",
|
||||
description = "<inputSetupFile> Use <inputSetupFile>"
|
||||
+ " instead of the default setup file created by the tool.",
|
||||
required = false, order = 13)
|
||||
String inputSetup = "";
|
||||
|
||||
@Parameter(names = "--writesetupFile",
|
||||
description = "<outputSetupFile> Write the setup file"
|
||||
+ " to the specified path instead of writing it as a temporary file which gets deleted"
|
||||
+ " after execution.",
|
||||
required = false, order = 14)
|
||||
String outsetupFname = "";
|
||||
|
||||
@Parameter(names = "--keepTempFiles",
|
||||
description = "enable this to prevent setup files"
|
||||
+ " from being deleted. Used for debugging purposes to see what is being sent"
|
||||
+ " to mkdsk.",
|
||||
required = false, order = 15)
|
||||
boolean keepTmpFiles = false;
|
||||
|
||||
@Parameter(names = "--mkFile",
|
||||
description = "<spice-meta-kernel-file> path to SPICE meta kernel file."
|
||||
+ " Metakernel only needs to point to leap seconds kernel and a frames kernel that contains"
|
||||
+ " the digital ID to CENTER_NAME and REF_FRAME_NAME lookup table."
|
||||
+ " This argument is REQUIRED if user does NOT supply a setupFile!",
|
||||
required = false, order = 4)
|
||||
String mkFile = "";
|
||||
|
||||
@Parameter(names = "--surfName",
|
||||
description = "<surfaceName> Allows user to modify the "
|
||||
+ " SURFACE_NAME (name of the specific shape data set for the central body)"
|
||||
+ " used in the default setup file created by the tool. This is a required"
|
||||
+ " keyword in the setup file.",
|
||||
required = false, order = 5)
|
||||
String surfaceName = "BENNU";
|
||||
|
||||
@Parameter(names = "--centName", description = "<centerName> Allows user to modify the "
|
||||
+ " CENTER_NAME (central body name) used in the default setup file created by the tool. "
|
||||
+ " Can also be an ID code. This is a required keyword in the setup file.",
|
||||
required = false, order = 6)
|
||||
String centerName = "BENNU";
|
||||
|
||||
@Parameter(names = "--refName", description = "<refFrameName> Allows user to modify the "
|
||||
+ " REF_FRAME_NAME (reference frame name) used in the default setup file created by the tool. "
|
||||
+ " This is a required keyword in the setup file.", required = false, order = 7)
|
||||
String refFrameName = "IAU_BENNU";
|
||||
|
||||
@Parameter(names = "--naif_surfName",
|
||||
description = "<naif surface name> Allows user to add the "
|
||||
+ " NAIF_SURFACE_NAME to the default setup file created by the tool. "
|
||||
+ " This may be needed under certain conditions. Optional keyword. "
|
||||
+ " Default is not to use it.",
|
||||
required = false, order = 8)
|
||||
String naifSurfName = "";
|
||||
|
||||
@Parameter(names = "--naif_surfCode",
|
||||
description = "<integer ID surface code> Allows user to add the "
|
||||
+ " NAIF_SURFACE_CODE to the default setup file created by the tool. "
|
||||
+ " Allows the tool to associate this ID code to the NAIF_SURFACE_NAME. Optional keyword. "
|
||||
+ " Default is not to use it.",
|
||||
required = false, order = 9)
|
||||
String naifSurfCode = "";
|
||||
|
||||
@Parameter(names = "--naif_surfBody",
|
||||
description = "<integer body ID code> Allows user to add the "
|
||||
+ " NAIF_SURFACE_BODY to the default setup file created by the tool. "
|
||||
+ " This may be needed under certain conditions. Optional keyword."
|
||||
+ " Default is not to use it.",
|
||||
required = false, order = 10)
|
||||
String naifSurfBody = "";
|
||||
|
||||
@Parameter(names = "--cmtFile",
|
||||
description = "<commentFile> Specify the comment file"
|
||||
+ " that mkdsk will add to the DSK. Comment file is an ASCII file containing"
|
||||
+ " additional information about the DSK. Default is single space.",
|
||||
required = false, order = 11)
|
||||
String cmtFile = " ";
|
||||
|
||||
@Parameter(names = "-shortDescription", hidden = true)
|
||||
private boolean shortDescription = false;
|
||||
|
||||
@Parameter(
|
||||
description = " Versions of mkdsk that are V066 and higher will automatically calculate the\n"
|
||||
+ " voxel scales which optimize processing time without exceeding maximum array sizes.\n"
|
||||
+ " However, if you are using a version of mkdsk that is below v066 you can specify the\n"
|
||||
+ " FINE_VOXEL_SCALE and COARSE_VOXEL_SCALE via the '--fine-scale' and '--coarse-scale'\n"
|
||||
+ " optional arguments.\n"
|
||||
+ " Run 'mkdsk' by itself and note the 'Toolkit ver' version number. If it is below\n"
|
||||
+ " 066 then you MUST specify the fine and coarse voxel scales.\n"
|
||||
+ " See SPICE documentation notes on recommended values.\n\n"
|
||||
+ "Usage: OBJ2DSK [options] <input-obj-file> <output-dsk-file>\nWhere:\n"
|
||||
+ " <input-obj-file>\n" + " input OBJ file\n" + " <output-dsk-file>\n"
|
||||
+ " output dsk file\n"
|
||||
+ " NOTE: MUST set --metakFile if not supplying a custom setup file!")
|
||||
private List<String> files = new ArrayList<>();
|
||||
|
||||
}
|
||||
|
||||
private final Double fineVoxScale;
|
||||
private final Integer coarseVoxScale;
|
||||
|
||||
public OBJ2DSK(double fineVoxScale, int coarseVoxScale) {
|
||||
this.fineVoxScale = fineVoxScale;
|
||||
this.coarseVoxScale = coarseVoxScale;
|
||||
}
|
||||
|
||||
public OBJ2DSK() {
|
||||
this(Double.NaN, -1);
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
TerrasaurTool defaultObj = new OBJ2DSK();
|
||||
|
||||
int i = 0;
|
||||
Map<String, Double> latLonMinMax = new HashMap<String, Double>();
|
||||
// String fitsFname = "";
|
||||
// String temp;
|
||||
// String inputSetup = "";
|
||||
// boolean keepTmpFiles = false;
|
||||
|
||||
// check for -shortDescription before looking for required arguments
|
||||
for (String arg : args) {
|
||||
if (arg.equals("-shortDescription")) {
|
||||
System.out.println(defaultObj.shortDescription());
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
Arguments arg = new Arguments();
|
||||
|
||||
JCommander command = new JCommander(arg);
|
||||
try {
|
||||
// @Deprecated
|
||||
// command = new JCommander(arg, args);
|
||||
command.parse(args);
|
||||
} catch (ParameterException ex) {
|
||||
System.out.println(defaultObj.fullDescription(null));
|
||||
System.out.println("Error parsing input arguments:");
|
||||
System.out.println(ex.getMessage());
|
||||
command = new JCommander(arg);
|
||||
System.exit(1);
|
||||
|
||||
}
|
||||
|
||||
if ((args.length < 1) || (arg.help)) {
|
||||
System.out.println(defaultObj.fullDescription(null));
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
// This is to avoid java crashing due to inability to connect to an X display
|
||||
System.setProperty("java.awt.headless", "true");
|
||||
|
||||
String spiceFile = "";
|
||||
String inFile = "";
|
||||
String outFile = "";
|
||||
if (arg.files.size() != 2) {
|
||||
String errMesg = "ERROR! Expecting 2 required inputs: input OBJ, output DSK";
|
||||
throw new RuntimeException(errMesg);
|
||||
} else {
|
||||
spiceFile = arg.mkFile;
|
||||
inFile = arg.files.get(0);
|
||||
outFile = arg.files.get(1);
|
||||
}
|
||||
|
||||
boolean useFitsFile = false;
|
||||
boolean latLonSet = false;
|
||||
|
||||
String fitsFname = arg.fitsFile;
|
||||
if (!fitsFname.isEmpty()) {
|
||||
useFitsFile = true;
|
||||
System.out.println("Will use lat,lons from " + fitsFname + " to set lat,lon bounds.");
|
||||
// load the fits header and parse for min, max lat, lon values
|
||||
latLonMinMax = ProductFits.minMaxLLFromFits(new File(fitsFname));
|
||||
|
||||
if (latLonMinMax.size() < 4) {
|
||||
System.out.println("ERROR! Could not parse all min,max lat/lon corners!");
|
||||
System.out.println("Unable to create DSK for " + inFile + "!");
|
||||
System.exit(1);
|
||||
}
|
||||
} else {
|
||||
// parse lat,lon bounds. Some of these may be set to default values.
|
||||
latLonMinMax.put(HeaderTag.MINLAT.toString(), arg.latMin);
|
||||
latLonMinMax.put(HeaderTag.MAXLAT.toString(), arg.latMax);
|
||||
latLonMinMax.put(HeaderTag.MINLON.toString(), arg.lonMin);
|
||||
latLonMinMax.put(HeaderTag.MAXLON.toString(), arg.lonMax);
|
||||
latLonSet = true;
|
||||
}
|
||||
|
||||
System.out.println("Using these lat, lon bounds:");
|
||||
for (String thisKey : latLonMinMax.keySet()) {
|
||||
System.out.println("key:" + thisKey + ", value:" + latLonMinMax.get(thisKey));
|
||||
}
|
||||
|
||||
NativeLibraryLoader.loadVtkLibraries();
|
||||
|
||||
|
||||
OBJ2DSK obj2dsk;
|
||||
if ((Double.isNaN(arg.fineVoxScale)) || (arg.coarseVoxScale < 0)) {
|
||||
obj2dsk = new OBJ2DSK();
|
||||
} else {
|
||||
obj2dsk = new OBJ2DSK(arg.fineVoxScale, arg.coarseVoxScale);
|
||||
}
|
||||
|
||||
// generate setup file if needed
|
||||
File setupFile = null;
|
||||
Map<DSK_KEYS, String> dskParams = new HashMap<DSK_KEYS, String>();
|
||||
dskParams.put(DSK_KEYS.SURF_NAME, arg.surfaceName);
|
||||
dskParams.put(DSK_KEYS.CENTER_NAME, arg.centerName);
|
||||
dskParams.put(DSK_KEYS.REFFRAME_NAME, arg.refFrameName);
|
||||
dskParams.put(DSK_KEYS.NAIF_SURFNAME, arg.naifSurfName);
|
||||
dskParams.put(DSK_KEYS.NAIF_SURFCODE, arg.naifSurfCode);
|
||||
dskParams.put(DSK_KEYS.NAIF_SURFBODY, arg.naifSurfBody);
|
||||
dskParams.put(DSK_KEYS.COMMENTFILE, arg.cmtFile);
|
||||
dskParams.put(DSK_KEYS.METAK, spiceFile);
|
||||
|
||||
String outsetupFname = arg.outsetupFname;
|
||||
String inputSetup = arg.inputSetup;
|
||||
|
||||
boolean keepTmpFiles = arg.keepTmpFiles;
|
||||
if (inputSetup.isEmpty()) {
|
||||
|
||||
if (spiceFile.isEmpty()) {
|
||||
String errMesg = "ERROR! MUST supply path to SPICE metakernel via --mkFile!";
|
||||
throw new RuntimeException(errMesg);
|
||||
}
|
||||
|
||||
System.out.println("Creating default setup file");
|
||||
setupFile = createSetup(latLonMinMax, obj2dsk.fineVoxScale, obj2dsk.coarseVoxScale, dskParams,
|
||||
outsetupFname);
|
||||
if (keepTmpFiles) {
|
||||
System.out.println("setup file created here:" + outsetupFname);
|
||||
} else {
|
||||
setupFile.deleteOnExit();
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
// check that input setup file exists
|
||||
setupFile = new File(inputSetup);
|
||||
if (!setupFile.exists()) {
|
||||
String errMesg = "Custom setup file:" + inputSetup + " not found!";
|
||||
throw new RuntimeException(errMesg);
|
||||
}
|
||||
System.out.println("Using custom setup file:" + inputSetup);
|
||||
|
||||
}
|
||||
obj2dsk.run(inFile, outFile, latLonMinMax, setupFile, outsetupFname, keepTmpFiles);
|
||||
|
||||
/*
|
||||
* for (; i < args.length; ++i) { if (args[i].equals("-shortDescription")) {
|
||||
* System.out.println(defaultObj.shortDescription()); System.exit(0); } else if
|
||||
* (args[i].equals("--latMin")) { latLonMinMax.put(HeaderTag.MINLAT.toString(),
|
||||
* StringUtil.parseSafeD(args[++i])); latLonSet = true; } else if (args[i].equals("--latMax")) {
|
||||
* latLonMinMax.put(HeaderTag.MAXLAT.toString(), StringUtil.parseSafeD(args[++i])); latLonSet =
|
||||
* true; } else if (args[i].equals("--lonMin")) { latLonMinMax.put(HeaderTag.MINLON.toString(),
|
||||
* StringUtil.parseSafeD(args[++i])); latLonSet = true; } else if (args[i].equals("--lonMax")) {
|
||||
* latLonMinMax.put(HeaderTag.MAXLON.toString(), StringUtil.parseSafeD(args[++i])); latLonSet =
|
||||
* true; } else if (args[i].equals("--fits-file")) { fitsFname = args[++i]; useFitsFile = true;
|
||||
* } else if (args[i].equals("--fine-scale")) { temp = args[++i]; fineVoxScale =
|
||||
* StringUtil.parseSafeD(temp); if (fineVoxScale == Double.NaN) {
|
||||
* System.err.println("ERROR! value for fine-scale:" + temp);
|
||||
* System.err.println("Could not be converted into a double!");
|
||||
* System.out.println("Exiting program."); System.exit(1); } userVox = true;
|
||||
*
|
||||
* } else if (args[i].equals("--coarse-scale")) {
|
||||
*
|
||||
* temp = args[++i]; double tempD = StringUtil.parseSafeD(temp); if (tempD == Double.NaN) {
|
||||
* System.err.println("ERROR! value for coarse-scale:" + temp);
|
||||
* System.err.println("Could not be converted into a number!");
|
||||
* System.out.println("Exiting program."); System.exit(1); } coarseVoxScale = (int) tempD;
|
||||
* userVox = true;
|
||||
*
|
||||
* } else if (args[i].equals("--writesetupFile")) { outsetupFname = args[++i]; } else if
|
||||
* (args[i].equals("--useSetupFile")) { inputSetup = args[++i]; keepTmpFiles = true; } else if
|
||||
* (args[i].equals("--keepTempFiles")) { keepTmpFiles = true; } else { break; } }
|
||||
*
|
||||
* // There must be numRequiredArgs arguments remaining after the options. // Otherwise abort.
|
||||
* int numberRequiredArgs = 3; if (args.length - i != numberRequiredArgs) {
|
||||
* System.out.println(defaultObj.fullDescription()); System.exit(0); }
|
||||
*
|
||||
* String current = new java.io.File(".").getAbsolutePath();
|
||||
* System.out.println("Running OBJ2DSK. Current dir:" + current);
|
||||
*
|
||||
* String spicefile = args[i++]; String infile = args[i++]; String outfile = args[i++];
|
||||
*/
|
||||
|
||||
}
|
||||
|
||||
public void run(String infile, String outfile, Map<String, Double> latLonMinMax, File setupFile,
|
||||
String outsetupFname, boolean keepTmpFiles) throws Exception {
|
||||
|
||||
System.out.println("Running OBJ2DSK.");
|
||||
System.out.println("FINE_VOXEL_SCALE = " + Double.toString(fineVoxScale));
|
||||
System.out.println("COARSE_VOXEL_SCALE = " + Integer.toString(coarseVoxScale));
|
||||
|
||||
// File setupFile = null;
|
||||
// if (inputSetup.length() < 1) {
|
||||
// System.out.println("Creating default setup file");
|
||||
// setupFile = createSetup(spicefile, latLonMinMax, this.fineVoxScale, this.coarseVoxScale,
|
||||
// outsetupFname);
|
||||
// if (keepTmpFiles) {
|
||||
// System.out.println("setup file created here:" + outsetupFname);
|
||||
// } else {
|
||||
// setupFile.deleteOnExit();
|
||||
// }
|
||||
// } else {
|
||||
// // check that input setup file exists
|
||||
// setupFile = new File(inputSetup);
|
||||
// if (!setupFile.exists()) {
|
||||
// String errMesg = "Custom setup file:" + inputSetup + " not found!";
|
||||
// throw new RuntimeException(errMesg);
|
||||
// }
|
||||
// System.out.println("Using custom setup file:" + inputSetup);
|
||||
// }
|
||||
|
||||
vtkPolyData inpolydata = PolyDataUtil.loadShapeModelAndComputeNormals(infile);
|
||||
|
||||
// We need to save out the OBJ file again in case it contains comment
|
||||
// lines since mkdsk does not support lines beginning with #
|
||||
// The OBJ file is saved to a temporary filename in order to preserve the
|
||||
// original OBJ. The temporary filename is deleted afterwards.
|
||||
File shapeModel = File.createTempFile("shapemodel-", null);
|
||||
shapeModel.deleteOnExit();
|
||||
PolyDataUtil.saveShapeModelAsOBJ(inpolydata, shapeModel.getAbsolutePath());
|
||||
|
||||
// Delete dsk file if already exists since otherwise mkdsk will complain
|
||||
if (new File(outfile).isFile())
|
||||
new File(outfile).delete();
|
||||
|
||||
String command = "mkdsk -setup " + setupFile.getAbsolutePath() + " -input "
|
||||
+ shapeModel.getAbsolutePath() + " -output " + outfile;
|
||||
ProcessUtils.runProgramAndWait(command, null, false);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Create the setup file for mkdsk executable.
|
||||
*
|
||||
* @param latLonCorners
|
||||
* @param fineVoxScale
|
||||
* @param coarseVoxScale
|
||||
* @param dskParams
|
||||
* @param setupFname
|
||||
* @return
|
||||
*/
|
||||
private static File createSetup(Map<String, Double> latLonCorners, Double fineVoxScale,
|
||||
Integer coarseVoxScale, Map<DSK_KEYS, String> dskParams, String setupFname) {
|
||||
|
||||
// evaluate latlon corners. Exit program if any are NaN.
|
||||
evaluateCorners(latLonCorners);
|
||||
|
||||
File setupFile;
|
||||
|
||||
if (setupFname.length() < 1) {
|
||||
setupFile = null;
|
||||
try {
|
||||
setupFile = File.createTempFile("setupfile-", null);
|
||||
} catch (IOException e) {
|
||||
String errMesg = "ERROR creating setupfile:" + setupFname;
|
||||
throw new RuntimeException(errMesg);
|
||||
}
|
||||
} else {
|
||||
setupFile = new File(setupFname);
|
||||
}
|
||||
System.out
|
||||
.println("Setup file for mkdsk created here:" + setupFile.getAbsolutePath().toString());
|
||||
|
||||
// relativize the path to the metakernel file. Do this because mkdsk has a limit on the string
|
||||
// length to the metakernel. Get normalized absolute path to mkFile in case the user enters a
|
||||
// relative path string, e.x. ../../SPICE/spice-kernels.mk
|
||||
Path currDir = FileSystems.getDefault().getPath("").toAbsolutePath();
|
||||
Path mkFile = Paths.get(dskParams.get(DSK_KEYS.METAK)).toAbsolutePath().normalize();
|
||||
|
||||
System.out.println("currDir:" + currDir.toString());
|
||||
System.out.println("mkFile:" + mkFile.toString());
|
||||
|
||||
// mkFile path relative to currDir
|
||||
Path relPath = currDir.relativize(mkFile);
|
||||
String spicefile = relPath.toString();
|
||||
|
||||
if (spicefile.length() > 80) {
|
||||
System.out.println("Error: pointer to SPICE metakernel kernel file may not be longer than"
|
||||
+ " 80 characters.");
|
||||
System.out.println("The paths inside the metakernel file can be as long as 255 characters.");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
|
||||
// create the content of setup file
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("\\begindata\n");
|
||||
sb.append("COMMENT_FILE = '" + dskParams.get(DSK_KEYS.COMMENTFILE) + "'\n");
|
||||
sb.append("LEAPSECONDS_FILE = '" + spicefile + "'\n");
|
||||
sb.append("SURFACE_NAME = '" + dskParams.get(DSK_KEYS.SURF_NAME) + "'\n");
|
||||
sb.append("CENTER_NAME = '" + dskParams.get(DSK_KEYS.CENTER_NAME) + "'\n");
|
||||
sb.append("REF_FRAME_NAME = '" + dskParams.get(DSK_KEYS.REFFRAME_NAME) + "'\n");
|
||||
// sb.append("SURFACE_NAME = 'BENNU'\n");
|
||||
// sb.append("CENTER_NAME = 'BENNU'\n");
|
||||
// sb.append("REF_FRAME_NAME = 'IAU_BENNU'\n");
|
||||
sb.append("START_TIME = '1950-JAN-1/00:00:00'\n");
|
||||
sb.append("STOP_TIME = '2050-JAN-1/00:00:00'\n");
|
||||
sb.append("DATA_CLASS = 2\n");
|
||||
sb.append("INPUT_DATA_UNITS = ( 'ANGLES = DEGREES'\n");
|
||||
sb.append(" 'DISTANCES = KILOMETERS' )\n");
|
||||
sb.append("COORDINATE_SYSTEM = 'LATITUDINAL'\n");
|
||||
String valueString = String.format("MINIMUM_LATITUDE = %.5f\n",
|
||||
latLonCorners.get(HeaderTag.MINLAT.toString()));
|
||||
sb.append(valueString);
|
||||
|
||||
// out.write("MAXIMUM_LATITUDE = 90\n");
|
||||
valueString = String.format("MAXIMUM_LATITUDE = %.5f\n",
|
||||
latLonCorners.get(HeaderTag.MAXLAT.toString()));
|
||||
sb.append(valueString);
|
||||
|
||||
// out.write("MINIMUM_LONGITUDE = -180\n");
|
||||
valueString = String.format("MINIMUM_LONGITUDE = %.5f\n",
|
||||
latLonCorners.get(HeaderTag.MINLON.toString()));
|
||||
sb.append(valueString);
|
||||
|
||||
// out.write("MAXIMUM_LONGITUDE = 180\n");
|
||||
valueString = String.format("MAXIMUM_LONGITUDE = %.5f\n",
|
||||
latLonCorners.get(HeaderTag.MAXLON.toString()));
|
||||
sb.append(valueString);
|
||||
|
||||
sb.append("DATA_TYPE = 2\n");
|
||||
sb.append("PLATE_TYPE = 3\n");
|
||||
|
||||
String val;
|
||||
if (fineVoxScale > 0D) {
|
||||
val = fineVoxScale.toString();
|
||||
val = val.trim();
|
||||
sb.append("FINE_VOXEL_SCALE = " + val + "\n");
|
||||
}
|
||||
if (coarseVoxScale > 0) {
|
||||
val = coarseVoxScale.toString();
|
||||
val = val.trim();
|
||||
sb.append("COARSE_VOXEL_SCALE = " + val + "\n");
|
||||
}
|
||||
|
||||
String naifSurf = dskParams.get(DSK_KEYS.NAIF_SURFNAME);
|
||||
String naifCode = dskParams.get(DSK_KEYS.NAIF_SURFCODE);
|
||||
String naifBody = dskParams.get(DSK_KEYS.NAIF_SURFBODY);
|
||||
if ((naifSurf.length() > 0) && (naifCode.length() > 0) && (naifBody.length() > 0)) {
|
||||
sb.append("NAIF_SURFACE_NAME +=" + "'" + dskParams.get(DSK_KEYS.NAIF_SURFNAME) + "'\n");
|
||||
sb.append("NAIF_SURFACE_CODE +=" + dskParams.get(DSK_KEYS.NAIF_SURFCODE) + "\n");
|
||||
sb.append("NAIF_SURFACE_BODY +=" + dskParams.get(DSK_KEYS.NAIF_SURFBODY) + "\n");
|
||||
|
||||
} else {
|
||||
System.out.println("optional NAIF body keywords not set. Will not use them in setup file.");
|
||||
}
|
||||
|
||||
sb.append("\\begintext\n");
|
||||
|
||||
try {
|
||||
FileWriter os = new FileWriter(setupFile);
|
||||
BufferedWriter out = new BufferedWriter(os);
|
||||
out.write(sb.toString());
|
||||
out.close();
|
||||
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
System.err.println("ERROR creating setupfile for OBJ2DSK! Stopping with error!");
|
||||
System.exit(1);
|
||||
}
|
||||
return setupFile;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluate results of string parsing. Throw exception if any resolved to NaN
|
||||
*
|
||||
* @param latLonCorners
|
||||
*/
|
||||
private static void evaluateCorners(Map<String, Double> latLonCorners) {
|
||||
|
||||
for (String key : latLonCorners.keySet()) {
|
||||
if (latLonCorners.get(key) == Double.NaN) {
|
||||
System.err.println("ERROR! Value for:" + key + " is NaN! Retry wiht proper string double.");
|
||||
System.err.println("Exiting program!");
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
526
src/main/java/terrasaur/apps/PointCloudFormatConverter.java
Normal file
@@ -0,0 +1,526 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.DataInputStream;
|
||||
import java.io.DataOutputStream;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.FileReader;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.util.Map;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.spi.StandardLevel;
|
||||
import picante.math.vectorspace.UnwritableVectorIJK;
|
||||
import spice.basic.Vector3;
|
||||
import terrasaur.enums.FORMATS;
|
||||
import terrasaur.smallBodyModel.BoundingBox;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.*;
|
||||
import vtk.vtkCellArray;
|
||||
import vtk.vtkFloatArray;
|
||||
import vtk.vtkIdList;
|
||||
import vtk.vtkPoints;
|
||||
import vtk.vtkPolyData;
|
||||
import vtk.vtkPolyDataWriter;
|
||||
|
||||
public class PointCloudFormatConverter implements TerrasaurTool {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Convert an input point cloud to a new format.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
String header = "";
|
||||
String footer =
|
||||
"""
|
||||
This program converts an input point cloud to a new format.
|
||||
|
||||
Supported input formats are ASCII, BIN3 (x,y,z), BIN4 (x, y, z, w), BIN7 (t, x, y, z, s/c x, y, z), FITS, ICQ, OBJ, PLY, and VTK. Supported output formats are ASCII, BIN3, OBJ, and VTK.
|
||||
|
||||
ASCII format is white spaced delimited x y z coordinates. BINARY files must contain double precision x y z coordinates.""";
|
||||
return TerrasaurTool.super.fullDescription(options, header, footer);
|
||||
}
|
||||
|
||||
private FORMATS inFormat;
|
||||
private FORMATS outFormat;
|
||||
private vtkPoints pointsXYZ;
|
||||
// private List<Double> receivedIntensity;
|
||||
private vtkPolyData polyData;
|
||||
private Vector3 center;
|
||||
private int halfSize;
|
||||
private double groundSampleDistance;
|
||||
private double clip;
|
||||
private String additionalGMTArgs;
|
||||
private double mapRadius;
|
||||
|
||||
public static vtkPoints readPointCloud(String filename) {
|
||||
PointCloudFormatConverter pcfc = new PointCloudFormatConverter(filename, FORMATS.VTK);
|
||||
pcfc.read(filename, false);
|
||||
return pcfc.getPoints();
|
||||
}
|
||||
|
||||
private PointCloudFormatConverter() {}
|
||||
|
||||
public PointCloudFormatConverter(FORMATS inFormat, String outFilename) {
|
||||
this(inFormat, FORMATS.formatFromExtension(outFilename));
|
||||
}
|
||||
|
||||
public PointCloudFormatConverter(String inFilename, FORMATS outFormat) {
|
||||
this(FORMATS.formatFromExtension(inFilename), outFormat);
|
||||
}
|
||||
|
||||
public PointCloudFormatConverter(String inFilename, String outFilename) {
|
||||
this(FORMATS.formatFromExtension(inFilename), FORMATS.formatFromExtension(outFilename));
|
||||
}
|
||||
|
||||
public PointCloudFormatConverter(FORMATS inFormat, FORMATS outFormat) {
|
||||
this.inFormat = inFormat;
|
||||
this.outFormat = outFormat;
|
||||
this.pointsXYZ = new vtkPoints();
|
||||
this.polyData = null;
|
||||
|
||||
this.center = null;
|
||||
this.mapRadius = Math.sqrt(2);
|
||||
this.halfSize = -1;
|
||||
this.groundSampleDistance = -1;
|
||||
this.clip = 1;
|
||||
this.additionalGMTArgs = "";
|
||||
}
|
||||
|
||||
public PointCloudFormatConverter setPoints(vtkPoints pointsXYZ) {
|
||||
this.pointsXYZ = pointsXYZ;
|
||||
return this;
|
||||
}
|
||||
|
||||
public vtkPoints getPoints() {
|
||||
return pointsXYZ;
|
||||
}
|
||||
|
||||
public void setClip(Double clip) {
|
||||
this.clip = clip;
|
||||
}
|
||||
|
||||
public void setCenter(double[] centerPt) {
|
||||
center = new Vector3(centerPt);
|
||||
}
|
||||
|
||||
public void setMapRadius(double mapRadius) {
|
||||
this.mapRadius = mapRadius;
|
||||
}
|
||||
|
||||
public PointCloudFormatConverter setHalfSize(int halfSize) {
|
||||
this.halfSize = halfSize;
|
||||
return this;
|
||||
}
|
||||
|
||||
public PointCloudFormatConverter setGroundSampleDistance(double groundSampleDistance) {
|
||||
this.groundSampleDistance = groundSampleDistance;
|
||||
return this;
|
||||
}
|
||||
|
||||
public PointCloudFormatConverter setGMTArgs(String args) {
|
||||
this.additionalGMTArgs = args;
|
||||
return this;
|
||||
}
|
||||
|
||||
public void read(String inFile, boolean inLLR) {
|
||||
switch (inFormat) {
|
||||
case ASCII:
|
||||
try (BufferedReader br = new BufferedReader(new FileReader(inFile))) {
|
||||
String line = br.readLine();
|
||||
while (line != null) {
|
||||
line = line.trim();
|
||||
if (!line.isEmpty() && !line.startsWith("#")) {
|
||||
String[] parts = line.split("\\s+");
|
||||
if (inLLR) {
|
||||
double lon = Math.toRadians(Double.parseDouble(parts[0].trim()));
|
||||
double lat = Math.toRadians(Double.parseDouble(parts[1].trim()));
|
||||
double range = Double.parseDouble(parts[2].trim());
|
||||
double[] xyz = new Vector3D(lon, lat).scalarMultiply(range).toArray();
|
||||
pointsXYZ.InsertNextPoint(xyz);
|
||||
} else {
|
||||
double[] xyz = new double[3];
|
||||
xyz[0] = Double.parseDouble(parts[0].trim());
|
||||
xyz[1] = Double.parseDouble(parts[1].trim());
|
||||
xyz[2] = Double.parseDouble(parts[2].trim());
|
||||
pointsXYZ.InsertNextPoint(xyz);
|
||||
}
|
||||
}
|
||||
line = br.readLine();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
break;
|
||||
case BIN3:
|
||||
case BIN4:
|
||||
case BIN7:
|
||||
try (DataInputStream dis =
|
||||
new DataInputStream(new BufferedInputStream(new FileInputStream(inFile)))) {
|
||||
while (dis.available() > 0) {
|
||||
if (inFormat == FORMATS.BIN7) {
|
||||
// skip time field
|
||||
BinaryUtils.readDoubleAndSwap(dis);
|
||||
}
|
||||
if (inLLR) {
|
||||
double lon = Math.toRadians(BinaryUtils.readDoubleAndSwap(dis));
|
||||
double lat = Math.toRadians(BinaryUtils.readDoubleAndSwap(dis));
|
||||
double range = BinaryUtils.readDoubleAndSwap(dis);
|
||||
double[] xyz = new Vector3D(lon, lat).scalarMultiply(range).toArray();
|
||||
pointsXYZ.InsertNextPoint(xyz);
|
||||
} else {
|
||||
double[] xyz = new double[3];
|
||||
xyz[0] = BinaryUtils.readDoubleAndSwap(dis);
|
||||
xyz[1] = BinaryUtils.readDoubleAndSwap(dis);
|
||||
xyz[2] = BinaryUtils.readDoubleAndSwap(dis);
|
||||
pointsXYZ.InsertNextPoint(xyz);
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
case ICQ:
|
||||
case OBJ:
|
||||
case PLT:
|
||||
case PLY:
|
||||
case VTK:
|
||||
try {
|
||||
polyData = PolyDataUtil.loadShapeModel(inFile);
|
||||
pointsXYZ.DeepCopy(polyData.GetPoints());
|
||||
} catch (Exception e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
break;
|
||||
case FITS:
|
||||
try {
|
||||
polyData = PolyDataUtil.loadFITShapeModel(inFile);
|
||||
pointsXYZ.DeepCopy(polyData.GetPoints());
|
||||
} catch (Exception e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
if (clip != 1) {
|
||||
BoundingBox bbox = new BoundingBox();
|
||||
for (int i = 0; i < pointsXYZ.GetNumberOfPoints(); i++) {
|
||||
double[] point = pointsXYZ.GetPoint(i);
|
||||
bbox.update(new UnwritableVectorIJK(point[0], point[1], point[2]));
|
||||
}
|
||||
BoundingBox clipped = bbox.getScaledBoundingBox(clip);
|
||||
vtkPoints clippedPoints = new vtkPoints();
|
||||
for (int i = 0; i < pointsXYZ.GetNumberOfPoints(); i++) {
|
||||
if (clipped.contains(pointsXYZ.GetPoint(i)))
|
||||
clippedPoints.InsertNextPoint(pointsXYZ.GetPoint(i));
|
||||
}
|
||||
pointsXYZ = clippedPoints;
|
||||
polyData = null;
|
||||
}
|
||||
}
|
||||
|
||||
public void write(String outFile, boolean outLLR) {
|
||||
switch (outFormat) {
|
||||
case ASCII:
|
||||
try (PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(outFile)))) {
|
||||
for (int i = 0; i < pointsXYZ.GetNumberOfPoints(); i++) {
|
||||
double[] thisPoint = pointsXYZ.GetPoint(i);
|
||||
if (outLLR) {
|
||||
Vector3D v = new Vector3D(thisPoint);
|
||||
out.printf(
|
||||
"%f %f %f\n",
|
||||
Math.toDegrees(v.getAlpha()), Math.toDegrees(v.getDelta()), v.getNorm());
|
||||
} else {
|
||||
out.printf("%f %f %f\n", thisPoint[0], thisPoint[1], thisPoint[2]);
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
break;
|
||||
case BIN3:
|
||||
try (DataOutputStream os =
|
||||
new DataOutputStream(new BufferedOutputStream(new FileOutputStream(outFile)))) {
|
||||
|
||||
for (int i = 0; i < pointsXYZ.GetNumberOfPoints(); i++) {
|
||||
double[] thisPoint = pointsXYZ.GetPoint(i);
|
||||
if (outLLR) {
|
||||
Vector3D v = new Vector3D(thisPoint);
|
||||
|
||||
BinaryUtils.writeDoubleAndSwap(os, Math.toDegrees(v.getAlpha()));
|
||||
BinaryUtils.writeDoubleAndSwap(os, Math.toDegrees(v.getDelta()));
|
||||
BinaryUtils.writeDoubleAndSwap(os, v.getNorm());
|
||||
} else {
|
||||
for (int ii = 0; ii < 3; ii++) BinaryUtils.writeDoubleAndSwap(os, thisPoint[ii]);
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
|
||||
break;
|
||||
case OBJ:
|
||||
if (polyData != null) {
|
||||
try {
|
||||
PolyDataUtil.saveShapeModelAsOBJ(polyData, outFile);
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
} else {
|
||||
if (halfSize < 0 || groundSampleDistance < 0) {
|
||||
System.out.printf(
|
||||
"Must supply -halfSize and -groundSampleDistance for %s output\n",
|
||||
outFormat);
|
||||
return;
|
||||
}
|
||||
|
||||
final double radius = mapRadius * halfSize * groundSampleDistance;
|
||||
vtkPoints vtkPoints = pointsXYZ;
|
||||
if (center != null) {
|
||||
vtkPoints = new vtkPoints();
|
||||
for (int i = 0; i < pointsXYZ.GetNumberOfPoints(); i++) {
|
||||
Vector3 pt = new Vector3(pointsXYZ.GetPoint(i));
|
||||
if (center.sub(new Vector3(pt)).norm() > radius) continue;
|
||||
vtkPoints.InsertNextPoint(pt.toArray());
|
||||
}
|
||||
}
|
||||
|
||||
PointCloudToPlane pctp = new PointCloudToPlane(vtkPoints, halfSize, groundSampleDistance);
|
||||
pctp.getGMU().setFieldToHeight();
|
||||
pctp.getGMU().setGMTArgs(additionalGMTArgs);
|
||||
try {
|
||||
double[][][] regridField = pctp.getGMU().regridField();
|
||||
vtkPolyData griddedXYZ = PolyDataUtil.loadLocalFitsLLRModelN(regridField);
|
||||
PolyDataUtil.saveShapeModelAsOBJ(griddedXYZ, outFile);
|
||||
} catch (Exception e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
}
|
||||
break;
|
||||
case VTK:
|
||||
if (polyData == null) {
|
||||
polyData = new vtkPolyData();
|
||||
polyData.SetPoints(pointsXYZ);
|
||||
}
|
||||
|
||||
vtkCellArray cells = new vtkCellArray();
|
||||
vtkFloatArray albedo = new vtkFloatArray();
|
||||
albedo.SetName("albedo");
|
||||
polyData.SetPolys(cells);
|
||||
polyData.GetPointData().AddArray(albedo);
|
||||
|
||||
for (int i = 0; i < pointsXYZ.GetNumberOfPoints(); i++) {
|
||||
vtkIdList idList = new vtkIdList();
|
||||
idList.InsertNextId(i);
|
||||
cells.InsertNextCell(idList);
|
||||
albedo.InsertNextValue(0.5f);
|
||||
}
|
||||
|
||||
vtkPolyDataWriter writer = new vtkPolyDataWriter();
|
||||
writer.SetInputData(polyData);
|
||||
writer.SetFileName(outFile);
|
||||
writer.SetFileTypeToBinary();
|
||||
writer.Update();
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(
|
||||
Option.builder("logFile")
|
||||
.hasArg()
|
||||
.desc("If present, save screen output to log file.")
|
||||
.build());
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (StandardLevel l : StandardLevel.values()) sb.append(String.format("%s ", l.name()));
|
||||
options.addOption(
|
||||
Option.builder("logLevel")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"If present, print messages above selected priority. Valid values are "
|
||||
+ sb.toString().trim()
|
||||
+ ". Default is INFO.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("inputFormat")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Format of input file. If not present format will be inferred from inputFile extension.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("inputFile")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc("Required. Name of input file.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("outputFormat")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Format of output file. If not present format will be inferred from outputFile extension.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("outputFile")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc("Required. Name of output file.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("inllr")
|
||||
.desc(
|
||||
"Only used with ASCII or BINARY formats. If present, input values are assumed to be lon, lat, rad. Default is x, y, z.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("outllr")
|
||||
.desc(
|
||||
"Only used with ASCII or BINARY formats. If present, output values will be lon, lat, rad. Default is x, y, z.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("centerXYZ")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Only used to generate OBJ output. Center output shape on supplied coordinates. Specify XYZ coordinates as three floating point numbers separated"
|
||||
+ " by commas.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("centerLonLat")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Only used to generate OBJ output. Center output shape on supplied lon,lat. Specify lon,lat in degrees as floating point numbers separated"
|
||||
+ " by a comma. Shape will be centered on the point closest to this lon,lat pair.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("halfSize")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Only used to generate OBJ output. Used with -groundSampleDistance to resample to a uniform grid. Grid dimensions are (2*halfSize+1)x(2*halfSize+1).")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("groundSampleDistance")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Used with -halfSize to resample to a uniform grid. Spacing between grid points. Only used to generate OBJ output. "
|
||||
+ "Units are the same as the input file, usually km.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("mapRadius")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Only used to generate OBJ output. Used with -centerXYZ to resample to a uniform grid. Only include points within "
|
||||
+ "mapRadius*groundSampleDistance*halfSize of centerXYZ. Default value is sqrt(2).")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("gmtArgs")
|
||||
.hasArg()
|
||||
.longOpt("gmt-args")
|
||||
.desc(
|
||||
"Only used to generate OBJ output. Pass additional options to GMTSurface. May be used multiple times, use once per additional argument.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("clip")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Shrink bounding box to a relative size of <arg> and clip any points outside of it. Default is 1 (no clipping).")
|
||||
.build());
|
||||
return options;
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
TerrasaurTool defaultOBJ = new PointCloudFormatConverter();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
NativeLibraryLoader.loadVtkLibraries();
|
||||
NativeLibraryLoader.loadSpiceLibraries();
|
||||
|
||||
String inFile = cl.getOptionValue("inputFile");
|
||||
String outFile = cl.getOptionValue("outputFile");
|
||||
boolean inLLR = cl.hasOption("inllr");
|
||||
boolean outLLR = cl.hasOption("outllr");
|
||||
|
||||
FORMATS inFormat =
|
||||
cl.hasOption("inputFormat")
|
||||
? FORMATS.valueOf(cl.getOptionValue("inputFormat").toUpperCase())
|
||||
: FORMATS.formatFromExtension(cl.getOptionValue("inputFile"));
|
||||
FORMATS outFormat =
|
||||
cl.hasOption("outputFormat")
|
||||
? FORMATS.valueOf(cl.getOptionValue("outputFormat").toUpperCase())
|
||||
: FORMATS.formatFromExtension(cl.getOptionValue("outputFile"));
|
||||
|
||||
PointCloudFormatConverter pcfc = new PointCloudFormatConverter(inFormat, outFormat);
|
||||
|
||||
if (cl.hasOption("centerXYZ")) {
|
||||
String[] params = cl.getOptionValue("centerXYZ").split(",");
|
||||
double[] array = new double[3];
|
||||
for (int i = 0; i < 3; i++) array[i] = Double.parseDouble(params[i].trim());
|
||||
pcfc.setCenter(array);
|
||||
}
|
||||
|
||||
if (cl.hasOption("clip")) {
|
||||
pcfc.setClip(Double.valueOf(cl.getOptionValue("clip")));
|
||||
}
|
||||
|
||||
if (cl.hasOption("gmtArgs")) {
|
||||
StringBuilder gmtArgs = new StringBuilder();
|
||||
for (String arg : cl.getOptionValues("gmtArgs")) gmtArgs.append(String.format("%s ", arg));
|
||||
pcfc.setGMTArgs(gmtArgs.toString());
|
||||
}
|
||||
|
||||
pcfc.read(inFile, inLLR);
|
||||
|
||||
if (cl.hasOption("centerLonLat")) {
|
||||
String[] params = cl.getOptionValue("centerLonLat").split(",");
|
||||
Vector3D lcDir =
|
||||
new Vector3D(
|
||||
Math.toRadians(Double.parseDouble(params[0].trim())),
|
||||
Math.toRadians(Double.parseDouble(params[1].trim())));
|
||||
double[] center = null;
|
||||
double minSep = Double.MAX_VALUE;
|
||||
vtkPoints vtkPoints = pcfc.getPoints();
|
||||
for (int i = 0; i < vtkPoints.GetNumberOfPoints(); i++) {
|
||||
double[] pt = vtkPoints.GetPoint(i);
|
||||
double sep = Vector3D.angle(lcDir, new Vector3D(pt));
|
||||
if (sep < minSep) {
|
||||
minSep = sep;
|
||||
center = pt;
|
||||
}
|
||||
}
|
||||
pcfc.setCenter(center);
|
||||
}
|
||||
|
||||
pcfc.setMapRadius(
|
||||
cl.hasOption("mapRadius") ? Double.parseDouble(cl.getOptionValue("mapRadius")) : Math.sqrt(2));
|
||||
|
||||
if (cl.hasOption("halfSize") && cl.hasOption("groundSampleDistance")) {
|
||||
// resample on a uniform XY grid
|
||||
pcfc.setHalfSize(Integer.parseInt(cl.getOptionValue("halfSize")));
|
||||
pcfc.setGroundSampleDistance(Double.parseDouble(cl.getOptionValue("groundSampleDistance")));
|
||||
}
|
||||
|
||||
pcfc.write(outFile, outLLR);
|
||||
}
|
||||
}
|
||||
389
src/main/java/terrasaur/apps/PointCloudToPlane.java
Normal file
@@ -0,0 +1,389 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.awt.Rectangle;
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.PrintStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.spi.StandardLevel;
|
||||
import picante.math.coords.CoordConverters;
|
||||
import picante.math.vectorspace.VectorIJK;
|
||||
import spice.basic.LatitudinalCoordinates;
|
||||
import spice.basic.Matrix33;
|
||||
import spice.basic.SpiceException;
|
||||
import spice.basic.Vector3;
|
||||
import terrasaur.enums.FORMATS;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.*;
|
||||
import terrasaur.utils.math.MathConversions;
|
||||
import terrasaur.utils.saaPlotLib.canvas.DiscreteDataPlot;
|
||||
import terrasaur.utils.saaPlotLib.canvas.MapPlot;
|
||||
import terrasaur.utils.saaPlotLib.canvas.PlotCanvas;
|
||||
import terrasaur.utils.saaPlotLib.canvas.axis.AxisX;
|
||||
import terrasaur.utils.saaPlotLib.canvas.axis.AxisY;
|
||||
import terrasaur.utils.saaPlotLib.canvas.projection.ProjectionOrthographic;
|
||||
import terrasaur.utils.saaPlotLib.canvas.symbol.Circle;
|
||||
import terrasaur.utils.saaPlotLib.colorMaps.ColorRamp;
|
||||
import terrasaur.utils.saaPlotLib.colorMaps.ColorRamp.TYPE;
|
||||
import terrasaur.utils.saaPlotLib.colorMaps.ImmutableColorBar;
|
||||
import terrasaur.utils.saaPlotLib.config.ImmutablePlotConfig;
|
||||
import terrasaur.utils.saaPlotLib.config.PlotConfig;
|
||||
import terrasaur.utils.saaPlotLib.data.DiscreteDataSet;
|
||||
import terrasaur.utils.saaPlotLib.util.StringFunctions;
|
||||
import vtk.vtkPoints;
|
||||
|
||||
public class PointCloudToPlane implements TerrasaurTool {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Find a rotation and translation to transform a point cloud to a height field above the best fit plane.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
String header = "";
|
||||
String footer =
|
||||
"\nThis program finds a rotation and translation to transform a point cloud to a height field above the best fit plane. "
|
||||
+ "Supported input formats are ASCII, BINARY, L2, OBJ, and VTK.\n\n"
|
||||
+ "ASCII format is white spaced delimited x y z coordinates. BINARY files must contain double precision x y z coordinates. ";
|
||||
return TerrasaurTool.super.fullDescription(options, header, footer);
|
||||
}
|
||||
|
||||
private GMTGridUtil gmu;
|
||||
|
||||
public GMTGridUtil getGMU() {
|
||||
return gmu;
|
||||
}
|
||||
|
||||
public void writeOutput(String outputFile) {
|
||||
if (outputFile != null) {
|
||||
try (PrintStream ps = new PrintStream(outputFile)) {
|
||||
double[][] transformation = gmu.getTransformation();
|
||||
for (int i = 0; i < 4; i++) {
|
||||
for (int j = 0; j < 4; j++) {
|
||||
ps.printf("%24.16e ", transformation[i][j]);
|
||||
}
|
||||
ps.println();
|
||||
}
|
||||
} catch (FileNotFoundException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private PointCloudToPlane() {}
|
||||
|
||||
public PointCloudToPlane(vtkPoints points) {
|
||||
this(points, 0, 0.);
|
||||
}
|
||||
|
||||
public PointCloudToPlane(vtkPoints points, int halfSize, double groundSampleDistance) {
|
||||
double[] x = new double[(int) points.GetNumberOfPoints()];
|
||||
double[] y = new double[(int) points.GetNumberOfPoints()];
|
||||
double[] z = new double[(int) points.GetNumberOfPoints()];
|
||||
|
||||
for (int i = 0; i < points.GetNumberOfPoints(); i++) {
|
||||
double[] thisPoint = points.GetPoint(i);
|
||||
x[i] = thisPoint[0];
|
||||
y[i] = thisPoint[1];
|
||||
z[i] = thisPoint[2];
|
||||
}
|
||||
|
||||
gmu = new GMTGridUtil(halfSize, groundSampleDistance);
|
||||
gmu.setXYZ(x, y, z);
|
||||
}
|
||||
|
||||
public BufferedImage makePlot(List<Vector3> points, String name) throws SpiceException {
|
||||
DescriptiveStatistics stats = new DescriptiveStatistics();
|
||||
VectorStatistics vStats = new VectorStatistics();
|
||||
DiscreteDataSet data = new DiscreteDataSet(name);
|
||||
|
||||
PlotConfig config = ImmutablePlotConfig.builder().title(name).build();
|
||||
|
||||
DiscreteDataPlot canvas;
|
||||
boolean orthographic = false;
|
||||
if (orthographic) {
|
||||
for (Vector3 p : points) {
|
||||
stats.addValue(p.getElt(2));
|
||||
vStats.add(p);
|
||||
LatitudinalCoordinates lc = new LatitudinalCoordinates(p);
|
||||
data.add(lc.getLongitude(), lc.getLatitude(), 0, p.getElt(2));
|
||||
}
|
||||
|
||||
double min = stats.getMin();
|
||||
double max = stats.getMax();
|
||||
ColorRamp ramp = ColorRamp.create(TYPE.CBSPECTRAL, min, max).createReverse();
|
||||
data.setSymbol(new Circle().setSize(1.0));
|
||||
data.setColorRamp(ramp);
|
||||
|
||||
Vector3 center = MathConversions.toVector3(vStats.getMean());
|
||||
|
||||
double halfExtent = 0;
|
||||
for (Vector3 p : points) {
|
||||
double dist = center.sep(p);
|
||||
if (dist > halfExtent) halfExtent = dist;
|
||||
}
|
||||
|
||||
ProjectionOrthographic p =
|
||||
new ProjectionOrthographic(
|
||||
config.width(),
|
||||
config.height(),
|
||||
CoordConverters.convertToLatitudinal(
|
||||
new VectorIJK(center.getElt(0), center.getElt(1), center.getElt(2))));
|
||||
p.setRadius(Math.max(0.5, .6 / halfExtent));
|
||||
|
||||
canvas = new MapPlot(config, p);
|
||||
canvas.drawAxes();
|
||||
canvas.plot(data);
|
||||
((MapPlot) canvas).drawLatLonGrid(Math.toRadians(5), Math.toRadians(5), true);
|
||||
|
||||
canvas.drawColorBar(
|
||||
ImmutableColorBar.builder()
|
||||
.rect(new Rectangle(config.leftMargin(), 40, config.width(), 10))
|
||||
.ramp(ramp)
|
||||
.numTicks(5)
|
||||
.tickFunction(StringFunctions.fixedFormat("%.3f"))
|
||||
.build());
|
||||
} else {
|
||||
for (Vector3 p : points) {
|
||||
stats.addValue(p.getElt(2));
|
||||
vStats.add(p);
|
||||
data.add(p.getElt(0), p.getElt(1), 0, p.getElt(2));
|
||||
}
|
||||
|
||||
double min = stats.getMin();
|
||||
double max = stats.getMax();
|
||||
ColorRamp ramp = ColorRamp.create(TYPE.CBSPECTRAL, min, max).createReverse();
|
||||
data.setSymbol(new Circle().setSize(1.0));
|
||||
data.setColorRamp(ramp);
|
||||
|
||||
canvas = new DiscreteDataPlot(config);
|
||||
AxisX xAxis = data.defaultXAxis("X");
|
||||
AxisY yAxis = data.defaultYAxis("Y");
|
||||
canvas.setAxes(xAxis, yAxis);
|
||||
canvas.drawAxes();
|
||||
canvas.plot(data);
|
||||
|
||||
canvas.drawColorBar(
|
||||
ImmutableColorBar.builder()
|
||||
.rect(new Rectangle(config.leftMargin(), 40, config.width(), 10))
|
||||
.ramp(ramp)
|
||||
.numTicks(5)
|
||||
.tickFunction(StringFunctions.fixedFormat("%.3f"))
|
||||
.build());
|
||||
}
|
||||
return canvas.getImage();
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(
|
||||
Option.builder("inputFormat")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Format of input file. If not present format is inferred from inputFile extension.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("inputFile")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc("Required. Name of input file.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("inllr")
|
||||
.desc(
|
||||
"If present, input values are assumed to be lon, lat, rad. Default is x, y, z. Only used with ASCII or BINARY formats.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("logFile")
|
||||
.hasArg()
|
||||
.desc("If present, save screen output to log file.")
|
||||
.build());
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (StandardLevel l : StandardLevel.values()) sb.append(String.format("%s ", l.name()));
|
||||
options.addOption(
|
||||
Option.builder("logLevel")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"If present, print messages above selected priority. Valid values are "
|
||||
+ sb.toString().trim()
|
||||
+ ". Default is INFO.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("outputFile")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Name of output file to contain 4x4 transformation matrix. The top left 3x3 matrix is the rotation matrix. The top "
|
||||
+ "three entries in the right hand column are the translation vector. The bottom row is always 0 0 0 1.\nTo convert "
|
||||
+ "from global to local:\n transformed = rotation.mxv(point.sub(translation))")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("translate")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Translate surface points and spacecraft position. "
|
||||
+ "Specify by three floating point numbers separated by commas. "
|
||||
+ "Default is to use centroid of input point cloud.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("plotXYZ")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Plot X vs Y (in the local frame) colored by Z. "
|
||||
+ "Argument is the name of PNG file to write.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("plotXYR")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Plot X vs Y (in the local frame) colored by R. "
|
||||
+ "Argument is the name of PNG file to write.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("slope")
|
||||
.desc(
|
||||
"Choose local coordinate frame such that Z points normal to the plane "
|
||||
+ "and X points along the direction of steepest descent.")
|
||||
.build());
|
||||
return options;
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws SpiceException {
|
||||
TerrasaurTool defaultOBJ = new PointCloudToPlane();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
NativeLibraryLoader.loadVtkLibraries();
|
||||
NativeLibraryLoader.loadSpiceLibraries();
|
||||
|
||||
String inFile = cl.getOptionValue("inputFile");
|
||||
boolean inLLR = cl.hasOption("inllr");
|
||||
|
||||
FORMATS inFormat =
|
||||
cl.hasOption("inputFormat")
|
||||
? FORMATS.valueOf(cl.getOptionValue("inputFormat").toUpperCase())
|
||||
: FORMATS.formatFromExtension(inFile);
|
||||
|
||||
PointCloudFormatConverter pcfc = new PointCloudFormatConverter(inFormat, FORMATS.VTK);
|
||||
pcfc.read(inFile, inLLR);
|
||||
System.out.printf("%d points read from %s\n", pcfc.getPoints().GetNumberOfPoints(), inFile);
|
||||
|
||||
int halfSize = 0;
|
||||
double groundSampleDistance = 0;
|
||||
|
||||
vtkPoints points = pcfc.getPoints();
|
||||
PointCloudToPlane pctp = new PointCloudToPlane(points, halfSize, groundSampleDistance);
|
||||
|
||||
Vector3 translation;
|
||||
if (cl.hasOption("translate")) {
|
||||
translation =
|
||||
MathConversions.toVector3(VectorUtils.stringToVector3D(cl.getOptionValue("translate")));
|
||||
pctp.getGMU().setTranslation(translation.toArray());
|
||||
}
|
||||
pctp.getGMU().calculateTransformation();
|
||||
|
||||
List<Vector3> globalPts = new ArrayList<>();
|
||||
for (int i = 0; i < points.GetNumberOfPoints(); i++)
|
||||
globalPts.add(new Vector3(points.GetPoint(i)));
|
||||
|
||||
double[][] transformation = pctp.getGMU().getTransformation();
|
||||
StringBuilder sb =
|
||||
new StringBuilder(
|
||||
String.format(
|
||||
"translation vector:\n%24.16e%24.16e%24.16e\n",
|
||||
transformation[0][3], transformation[1][3], transformation[2][3]));
|
||||
logger.info(sb.toString());
|
||||
sb = new StringBuilder("rotation matrix:\n");
|
||||
for (int i = 0; i < 3; i++)
|
||||
sb.append(
|
||||
String.format(
|
||||
"%24.16e%24.16e%24.16e\n",
|
||||
transformation[i][0], transformation[i][1], transformation[i][2]));
|
||||
logger.info(sb.toString());
|
||||
|
||||
Matrix33 rotation = new Matrix33(pctp.getGMU().getRotation());
|
||||
translation = new Vector3(pctp.getGMU().getTranslation());
|
||||
|
||||
if (cl.hasOption("slope")) {
|
||||
Vector3 z = rotation.xpose().mxv(new Vector3(0, 0, 1));
|
||||
VectorStatistics vStats = new VectorStatistics();
|
||||
for (Vector3 pt : globalPts) vStats.add(pt);
|
||||
|
||||
Vector3 r = MathConversions.toVector3(vStats.getMean());
|
||||
|
||||
Vector3 y = r.cross(z).hat();
|
||||
Vector3 x = y.cross(z).hat();
|
||||
rotation = new Matrix33(x, y, z);
|
||||
}
|
||||
|
||||
List<Vector3> localPts = new ArrayList<>();
|
||||
for (Vector3 p : globalPts) localPts.add(rotation.mxv(p.sub(translation)));
|
||||
|
||||
VectorStatistics vStats = new VectorStatistics();
|
||||
for (Vector3 localPt : localPts) vStats.add(localPt);
|
||||
|
||||
if (cl.hasOption("plotXYZ")) {
|
||||
BufferedImage image = pctp.makePlot(localPts, "Z (height above plane)");
|
||||
PlotCanvas.writeImage(cl.getOptionValue("plotXYZ"), image);
|
||||
}
|
||||
|
||||
if (cl.hasOption("plotXYR")) {
|
||||
|
||||
// rotate but don't translate
|
||||
List<Vector3> xyr = new ArrayList<>();
|
||||
for (Vector3 p : globalPts) {
|
||||
Vector3 v = rotation.mxv(p);
|
||||
xyr.add(new Vector3(v.getElt(0), v.getElt(1), v.norm()));
|
||||
}
|
||||
|
||||
BufferedImage image = pctp.makePlot(xyr, "R");
|
||||
PlotCanvas.writeImage(cl.getOptionValue("plotXYR"), image);
|
||||
}
|
||||
|
||||
logger.info("statistics on full set");
|
||||
logger.info(vStats);
|
||||
|
||||
Vector3 mean = MathConversions.toVector3(vStats.getMean());
|
||||
Vector3 std = MathConversions.toVector3(vStats.getStandardDeviation());
|
||||
double scale = 5;
|
||||
List<Double> minList = new ArrayList<>();
|
||||
List<Double> maxList = new ArrayList<>();
|
||||
for (int i = 0; i < 3; i++) {
|
||||
minList.add(mean.getElt(i) - scale * std.getElt(i));
|
||||
maxList.add(mean.getElt(i) + scale * std.getElt(i));
|
||||
}
|
||||
|
||||
vStats = new VectorStatistics();
|
||||
for (Vector3 v : localPts) {
|
||||
boolean addThis = true;
|
||||
for (int i = 0; i < 3; i++) {
|
||||
if (v.getElt(i) < minList.get(i) || v.getElt(i) > maxList.get(i)) {
|
||||
addThis = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (addThis) vStats.add(v);
|
||||
}
|
||||
|
||||
logger.info("statistics on set without points more than 5 standard deviations from the mean:");
|
||||
logger.info(vStats);
|
||||
|
||||
if (cl.hasOption("outputFile")) pctp.writeOutput(cl.getOptionValue("outputFile"));
|
||||
}
|
||||
}
|
||||
72
src/main/java/terrasaur/apps/PrintShapeModelStatistics.java
Normal file
@@ -0,0 +1,72 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.NativeLibraryLoader;
|
||||
import terrasaur.utils.PolyDataStatistics;
|
||||
import terrasaur.utils.PolyDataUtil;
|
||||
import vtk.vtkPolyData;
|
||||
|
||||
/**
|
||||
* PrintShapeModelStatistics program. Takes a shape model in OBJ format and prints out various
|
||||
* statistics about it. about this program.
|
||||
*
|
||||
* @author Eli Kahn
|
||||
* @version 1.0
|
||||
*/
|
||||
public class PrintShapeModelStatistics implements TerrasaurTool {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
private PrintShapeModelStatistics() {}
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Print statistics about a shape model.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
String header = "This program prints various statistics about a shape model in OBJ format.";
|
||||
String footer = "";
|
||||
return TerrasaurTool.super.fullDescription(options, header, footer);
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(
|
||||
Option.builder("objFile").required().hasArg().desc("Path to OBJ file.").build());
|
||||
return options;
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
TerrasaurTool defaultOBJ = new PrintShapeModelStatistics();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
String filename = cl.getOptionValue("objFile");
|
||||
|
||||
NativeLibraryLoader.loadVtkLibraries();
|
||||
|
||||
vtkPolyData polydata = PolyDataUtil.loadShapeModelAndComputeNormals(filename);
|
||||
|
||||
PolyDataStatistics stat = new PolyDataStatistics(polydata);
|
||||
ArrayList<String> stats = stat.getShapeModelStats();
|
||||
for (String line : stats) {
|
||||
logger.info(line);
|
||||
}
|
||||
}
|
||||
}
|
||||
407
src/main/java/terrasaur/apps/RangeFromSumFile.java
Normal file
@@ -0,0 +1,407 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.AbstractMap;
|
||||
import java.util.Map;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
|
||||
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import picante.math.vectorspace.VectorIJK;
|
||||
import spice.basic.Plane;
|
||||
import spice.basic.SpiceException;
|
||||
import spice.basic.Vector3;
|
||||
import terrasaur.smallBodyModel.SmallBodyModel;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.*;
|
||||
import terrasaur.utils.math.MathConversions;
|
||||
import terrasaur.utils.mesh.TriangularFacet;
|
||||
import vtk.vtkIdList;
|
||||
import vtk.vtkPolyData;
|
||||
|
||||
public class RangeFromSumFile implements TerrasaurTool {
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Calculate range to surface from a sumfile.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
|
||||
String header = "";
|
||||
String footer =
|
||||
"""
|
||||
This program reads a sumfile along with a shape model and \
|
||||
calculates the range to the surface. NOTE: Spacecraft position is \
|
||||
assumed to be in kilometers. If not, use the -distanceScale option \
|
||||
to convert to km.
|
||||
""";
|
||||
return TerrasaurTool.super.fullDescription(options, header, footer);
|
||||
}
|
||||
|
||||
private SumFile sumFile;
|
||||
private vtkPolyData polyData;
|
||||
private SmallBodyModel smallBodyModel;
|
||||
|
||||
private int xOffset;
|
||||
private int yOffset;
|
||||
|
||||
private long facet;
|
||||
|
||||
private Vector3D scPos;
|
||||
private Vector3D sunXYZ;
|
||||
private Vector3D surfaceIntercept;
|
||||
|
||||
private double tiltDeg;
|
||||
private double tiltDir;
|
||||
|
||||
private double incidence;
|
||||
private double emission;
|
||||
private double phase;
|
||||
|
||||
private double scAzimuth;
|
||||
private double scElevation;
|
||||
|
||||
private double sunAzimuth;
|
||||
private double sunElevation;
|
||||
|
||||
private DescriptiveStatistics stats;
|
||||
private double centerX, centerY;
|
||||
|
||||
public DescriptiveStatistics getStats() {
|
||||
return stats;
|
||||
}
|
||||
|
||||
private RangeFromSumFile() {}
|
||||
|
||||
public RangeFromSumFile(SumFile sumFile, vtkPolyData polyData) {
|
||||
|
||||
this.sumFile = sumFile;
|
||||
|
||||
int nPixelsX = sumFile.imageWidth();
|
||||
int nPixelsY = sumFile.imageHeight();
|
||||
centerX = 0.5 * (nPixelsX - 1);
|
||||
centerY = 0.5 * (nPixelsY - 1);
|
||||
|
||||
this.polyData = polyData;
|
||||
|
||||
smallBodyModel = new SmallBodyModel(polyData);
|
||||
|
||||
scPos = sumFile.scobj().negate();
|
||||
sunXYZ = sumFile.sunDirection();
|
||||
|
||||
stats = new DescriptiveStatistics();
|
||||
}
|
||||
|
||||
public void setDistanceScale(double distanceScale) {
|
||||
this.scPos = sumFile.scobj().scalarMultiply(distanceScale).negate();
|
||||
}
|
||||
|
||||
/**
|
||||
* @param xOffset x offset in pixels
|
||||
* @param yOffset y offset in pixels
|
||||
* @return key is cell index, value is surface intercept for the desired pixel offset from the center of the image.
|
||||
*/
|
||||
public Map.Entry<Long, Vector3D> findIntercept(int xOffset, int yOffset) {
|
||||
|
||||
this.xOffset = xOffset;
|
||||
this.yOffset = yOffset;
|
||||
|
||||
Vector3D lookDir = new Vector3D(1.0, sumFile.boresight());
|
||||
|
||||
if (xOffset != 0) {
|
||||
Vector3D offset = new Vector3D(-xOffset, sumFile.xPerPixel());
|
||||
lookDir = lookDir.add(offset);
|
||||
}
|
||||
|
||||
if (yOffset != 0) {
|
||||
Vector3D offset = new Vector3D(-yOffset, sumFile.yPerPixel());
|
||||
lookDir = lookDir.add(offset);
|
||||
}
|
||||
|
||||
double[] tmp = new double[3];
|
||||
facet = smallBodyModel.computeRayIntersection(scPos.toArray(), lookDir.toArray(), tmp);
|
||||
|
||||
if (facet == -1) {
|
||||
surfaceIntercept = null;
|
||||
} else {
|
||||
surfaceIntercept = new Vector3D(tmp);
|
||||
|
||||
vtkIdList idList = new vtkIdList();
|
||||
double[] pt0 = new double[3];
|
||||
double[] pt1 = new double[3];
|
||||
double[] pt2 = new double[3];
|
||||
|
||||
polyData.GetCellPoints(facet, idList);
|
||||
|
||||
// get the ids for each point
|
||||
long id0 = idList.GetId(0);
|
||||
long id1 = idList.GetId(1);
|
||||
long id2 = idList.GetId(2);
|
||||
|
||||
// get points that comprise the cell
|
||||
polyData.GetPoint(id0, pt0);
|
||||
polyData.GetPoint(id1, pt1);
|
||||
polyData.GetPoint(id2, pt2);
|
||||
|
||||
TriangularFacet facet =
|
||||
new TriangularFacet(new VectorIJK(pt0), new VectorIJK(pt1), new VectorIJK(pt2));
|
||||
|
||||
Vector3 center3 = MathConversions.toVector3(facet.getCenter());
|
||||
Vector3D center3D = MathConversions.toVector3D(facet.getCenter());
|
||||
Vector3 normal3 = MathConversions.toVector3(facet.getNormal());
|
||||
Vector3D normal3D = MathConversions.toVector3D(facet.getNormal());
|
||||
|
||||
tiltDeg = Math.toDegrees(center3.sep(normal3));
|
||||
if (tiltDeg > 90) tiltDeg = 180 - tiltDeg;
|
||||
|
||||
tiltDir = Tilts.basicTiltDirDeg(surfaceIntercept.getAlpha(), normal3D);
|
||||
|
||||
|
||||
incidence = Vector3D.angle(sunXYZ, normal3D);
|
||||
emission = Vector3D.angle(scPos, normal3D);
|
||||
phase = Vector3D.angle(sunXYZ, scPos.subtract(center3D));
|
||||
|
||||
try {
|
||||
// scPos is in body fixed coordinates
|
||||
Plane p = new Plane(normal3, center3);
|
||||
Vector3 projectedNorth = p.project(new Vector3(0, 0, 1).add(center3)).sub(center3);
|
||||
Vector3 projected = p.project(MathConversions.toVector3(scPos)).sub(center3);
|
||||
|
||||
scAzimuth = projected.sep(projectedNorth);
|
||||
if (projected.cross(projectedNorth).dot(center3) < 0) scAzimuth = 2 * Math.PI - scAzimuth;
|
||||
scElevation = Math.PI / 2 - emission;
|
||||
|
||||
// sunXYZ is a unit vector pointing to the sun
|
||||
projected = p.project(MathConversions.toVector3(sunXYZ).add(center3)).sub(center3);
|
||||
|
||||
sunAzimuth = projected.sep(projectedNorth);
|
||||
if (projected.cross(projectedNorth).dot(center3) < 0) sunAzimuth = 2 * Math.PI - sunAzimuth;
|
||||
sunElevation = Math.PI / 2 - incidence;
|
||||
} catch (SpiceException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
|
||||
stats.addValue(scPos.distance(surfaceIntercept));
|
||||
}
|
||||
return new AbstractMap.SimpleEntry<>(facet, surfaceIntercept);
|
||||
}
|
||||
|
||||
public String getHeader(String filename) {
|
||||
StringBuffer sb = new StringBuffer();
|
||||
sb.append("# x increases to the right and y increases down. Top left corner is 0, 0.\n");
|
||||
sb.append(String.format("# %s\n", filename));
|
||||
sb.append(String.format("%7s", "# x"));
|
||||
sb.append(String.format("%7s", "y"));
|
||||
sb.append(StringUtils.center("facet", 8));
|
||||
sb.append(StringUtils.center("Tilt", 12));
|
||||
sb.append(StringUtils.center("Tilt Dir", 12));
|
||||
sb.append(StringUtils.center("s/c position XYZ", 36));
|
||||
sb.append(StringUtils.center("surface intercept XYZ", 36));
|
||||
sb.append(StringUtils.center("lon", 12));
|
||||
sb.append(StringUtils.center("lat", 12));
|
||||
sb.append(StringUtils.center("rad", 12));
|
||||
sb.append(StringUtils.center("range", 12));
|
||||
sb.append(StringUtils.center("inc", 12));
|
||||
sb.append(StringUtils.center("ems", 12));
|
||||
sb.append(StringUtils.center("phase", 12));
|
||||
sb.append(StringUtils.center("s/c az", 12));
|
||||
sb.append(StringUtils.center("s/c el", 12));
|
||||
sb.append(StringUtils.center("sun az", 12));
|
||||
sb.append(StringUtils.center("sun el", 12));
|
||||
|
||||
sb.append("\n");
|
||||
sb.append(String.format("%7s", "# "));
|
||||
sb.append(String.format("%7s", ""));
|
||||
sb.append(String.format("%8s", ""));
|
||||
sb.append(StringUtils.center("(deg)", 12));
|
||||
sb.append(StringUtils.center("(deg)", 12));
|
||||
sb.append(StringUtils.center("(km)", 36));
|
||||
sb.append(StringUtils.center("(km)", 36));
|
||||
sb.append(StringUtils.center("(deg)", 12));
|
||||
sb.append(StringUtils.center("(deg)", 12));
|
||||
sb.append(StringUtils.center("(km)", 12));
|
||||
sb.append(StringUtils.center("(km)", 12));
|
||||
sb.append(StringUtils.center("(deg)", 12));
|
||||
sb.append(StringUtils.center("(deg)", 12));
|
||||
sb.append(StringUtils.center("(deg)", 12));
|
||||
sb.append(StringUtils.center("(deg)", 12));
|
||||
sb.append(StringUtils.center("(deg)", 12));
|
||||
sb.append(StringUtils.center("(deg)", 12));
|
||||
sb.append(StringUtils.center("(deg)", 12));
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(String.format("%7.2f", xOffset + centerX));
|
||||
sb.append(String.format("%7.2f", yOffset + centerY));
|
||||
sb.append(String.format("%8d", facet));
|
||||
sb.append(String.format("%12.6f", tiltDeg));
|
||||
sb.append(String.format("%12.6f", tiltDir));
|
||||
sb.append(String.format("%12.6f", scPos.getX()));
|
||||
sb.append(String.format("%12.6f", scPos.getY()));
|
||||
sb.append(String.format("%12.6f", scPos.getZ()));
|
||||
sb.append(String.format("%12.6f", surfaceIntercept.getX()));
|
||||
sb.append(String.format("%12.6f", surfaceIntercept.getY()));
|
||||
sb.append(String.format("%12.6f", surfaceIntercept.getZ()));
|
||||
|
||||
double lon = Math.toDegrees(surfaceIntercept.getAlpha());
|
||||
if (lon < 0) lon += 360;
|
||||
sb.append(String.format("%12.6f", lon));
|
||||
sb.append(String.format("%12.6f", Math.toDegrees(surfaceIntercept.getDelta())));
|
||||
sb.append(String.format("%12.6f", surfaceIntercept.getNorm()));
|
||||
|
||||
sb.append(String.format("%12.6f", scPos.distance(surfaceIntercept)));
|
||||
sb.append(String.format("%12.6f", Math.toDegrees(incidence)));
|
||||
sb.append(String.format("%12.6f", Math.toDegrees(emission)));
|
||||
sb.append(String.format("%12.6f", Math.toDegrees(phase)));
|
||||
sb.append(String.format("%12.6f", Math.toDegrees(scAzimuth)));
|
||||
sb.append(String.format("%12.6f", Math.toDegrees(scElevation)));
|
||||
sb.append(String.format("%12.6f", Math.toDegrees(sunAzimuth)));
|
||||
sb.append(String.format("%12.6f", Math.toDegrees(sunElevation)));
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(
|
||||
Option.builder("sumFile")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc("Required. Name of sum file to read.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("objFile")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc("Required. Name of OBJ shape file.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("pixelOffset")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Pixel offset from center of image, given as a comma separated pair (no spaces). Default is 0,0. "
|
||||
+ "x increases to the right and y increases down.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("xRange")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Range of X pixel offsets from center of image, given as a comma separated triplet (xStart, xStop, xSpacing with no spaces). "
|
||||
+ "For example -50,50,5.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("yRange")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Range of Y pixel offsets from center of image, given as a comma separated triplet (yStart, yStop, ySpacing with no spaces). "
|
||||
+ "For example -50,50,5.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("radius")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Evaluate all pixels within specified distance (in pixels) of desired pixel. This value will be rounded to the nearest integer.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("distanceScale")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Spacecraft position is assumed to be in kilometers. If not, scale by this value (e.g. Use 0.001 if s/c pos is in meters).")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("stats")
|
||||
.desc("Print out statistics about range to all selected pixels.")
|
||||
.build());
|
||||
return options;
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
TerrasaurTool defaultOBJ = new RangeFromSumFile();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
NativeLibraryLoader.loadSpiceLibraries();
|
||||
NativeLibraryLoader.loadVtkLibraries();
|
||||
|
||||
SumFile sumFile = SumFile.fromFile(new File(cl.getOptionValue("sumFile")));
|
||||
|
||||
int xStart = 0;
|
||||
int xStop = 1;
|
||||
int xSpacing = 1;
|
||||
int yStart = 0;
|
||||
int yStop = 1;
|
||||
int ySpacing = 1;
|
||||
if (cl.hasOption("pixelOffset")) {
|
||||
String[] parts = cl.getOptionValue("pixelOffset").split(",");
|
||||
|
||||
int x = Integer.parseInt(parts[0].trim());
|
||||
int y = Integer.parseInt(parts[1].trim());
|
||||
|
||||
xStart = x;
|
||||
xStop = x + 1;
|
||||
yStart = y;
|
||||
yStop = y + 1;
|
||||
}
|
||||
|
||||
if (cl.hasOption("xRange")) {
|
||||
String[] parts = cl.getOptionValue("xRange").split(",");
|
||||
xStart = Integer.parseInt(parts[0].trim());
|
||||
xStop = Integer.parseInt(parts[1].trim());
|
||||
xSpacing = Integer.parseInt(parts[2].trim());
|
||||
}
|
||||
|
||||
if (cl.hasOption("yRange")) {
|
||||
String[] parts = cl.getOptionValue("yRange").split(",");
|
||||
yStart = Integer.parseInt(parts[0].trim());
|
||||
yStop = Integer.parseInt(parts[1].trim());
|
||||
ySpacing = Integer.parseInt(parts[2].trim());
|
||||
}
|
||||
|
||||
int checkRadius = 0;
|
||||
if (cl.hasOption("radius")) {
|
||||
checkRadius = (int) Math.round(Double.parseDouble(cl.getOptionValue("radius")));
|
||||
xStart -= checkRadius;
|
||||
xStop += checkRadius;
|
||||
yStart -= checkRadius;
|
||||
yStop += checkRadius;
|
||||
}
|
||||
|
||||
String objFile = cl.getOptionValue("objFile");
|
||||
vtkPolyData polyData = PolyDataUtil.loadShapeModel(objFile);
|
||||
RangeFromSumFile rfsf = new RangeFromSumFile(sumFile, polyData);
|
||||
|
||||
if (cl.hasOption("distanceScale"))
|
||||
rfsf.setDistanceScale(Double.parseDouble(cl.getOptionValue("distanceScale")));
|
||||
|
||||
System.out.println(rfsf.getHeader(cl.getOptionValue("sumFile")));
|
||||
|
||||
for (int ix = xStart; ix < xStop; ix += xSpacing) {
|
||||
for (int iy = yStart; iy < yStop; iy += ySpacing) {
|
||||
if (checkRadius > 0) {
|
||||
double midx = (xStart + xStop) / 2.;
|
||||
double midy = (yStart + yStop) / 2.;
|
||||
if ((ix - midx) * (ix - midx) + (iy - midy) * (iy - midy) > checkRadius * checkRadius)
|
||||
continue;
|
||||
}
|
||||
long cellID = rfsf.findIntercept(ix, iy).getKey();
|
||||
if (cellID > -1) System.out.println(rfsf);
|
||||
}
|
||||
}
|
||||
if (cl.hasOption("stats")) System.out.println("Range " + rfsf.getStats());
|
||||
}
|
||||
}
|
||||
842
src/main/java/terrasaur/apps/RenderShapeFromSumFile.java
Normal file
@@ -0,0 +1,842 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.awt.AlphaComposite;
|
||||
import java.awt.Color;
|
||||
import java.awt.Graphics2D;
|
||||
import java.awt.Image;
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.nio.charset.Charset;
|
||||
import java.time.Instant;
|
||||
import java.util.*;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.stream.IntStream;
|
||||
import javax.imageio.ImageIO;
|
||||
import net.jafama.FastMath;
|
||||
import nom.tam.fits.Fits;
|
||||
import nom.tam.fits.Header;
|
||||
import nom.tam.fits.ImageHDU;
|
||||
import nom.tam.fits.header.DateTime;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import org.apache.commons.math3.geometry.euclidean.threed.Rotation;
|
||||
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.spi.StandardLevel;
|
||||
import terrasaur.smallBodyModel.LocalModelCollection;
|
||||
import terrasaur.smallBodyModel.SmallBodyModel;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.*;
|
||||
import terrasaur.utils.math.RotationUtils;
|
||||
import terrasaur.utils.saaPlotLib.util.StringFunctions;
|
||||
import vtk.vtkIdList;
|
||||
import vtk.vtkPoints;
|
||||
import vtk.vtkPolyData;
|
||||
|
||||
public class RenderShapeFromSumFile implements TerrasaurTool {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Render a simulated camera image given a shape model and sumFile.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
String header = "";
|
||||
String footer = "\nRender a simulated camera image given a shape model and sumFile.\n";
|
||||
return TerrasaurTool.super.fullDescription(options, header, footer);
|
||||
}
|
||||
|
||||
private RenderShapeFromSumFile() {}
|
||||
|
||||
private String globalOBJname;
|
||||
private Double scale;
|
||||
private Rotation rotation;
|
||||
|
||||
/** Sun position in body fixed coordinates */
|
||||
private Vector3D sunXYZ;
|
||||
|
||||
/** Set camera position in body fixed coordinates */
|
||||
private Vector3D cameraXYZ;
|
||||
|
||||
private Rotation cameraToBodyFixed;
|
||||
|
||||
/** set instantaneous field of view in radians/pixel */
|
||||
private double ifov;
|
||||
|
||||
private int nPixelsX, nPixelsY;
|
||||
private double centerX, centerY;
|
||||
private int subPixel;
|
||||
|
||||
private ThreadLocal<SmallBodyModel> sbm;
|
||||
// key is cell index, value is albedo
|
||||
private Map<Long, Double> albedoMap;
|
||||
// key is resolution, value is local shape model
|
||||
private NavigableMap<Double, LocalModelCollection> lmcMap;
|
||||
|
||||
// key is field name, value is pair of comment and metadata value
|
||||
private NavigableMap<String, Map.Entry<String, String>> metadata;
|
||||
|
||||
public RenderShapeFromSumFile(String globalOBJname, Double scale, Rotation rotation) {
|
||||
this.globalOBJname = globalOBJname;
|
||||
this.scale = scale;
|
||||
this.rotation = rotation;
|
||||
|
||||
subPixel = 2;
|
||||
|
||||
sbm = new ThreadLocal<>();
|
||||
albedoMap = new HashMap<>();
|
||||
lmcMap = new TreeMap<>();
|
||||
|
||||
metadata = new TreeMap<>();
|
||||
}
|
||||
|
||||
private void loadAlbedoFile(String albedoFile) {
|
||||
try {
|
||||
List<String> lines = FileUtils.readLines(new File(albedoFile), Charset.defaultCharset());
|
||||
for (String line : lines) {
|
||||
String trimLine = line.strip();
|
||||
if (trimLine.isEmpty() || trimLine.startsWith("#")) continue;
|
||||
|
||||
String[] parts = trimLine.split(",");
|
||||
long index = Long.parseLong(parts[0].trim());
|
||||
albedoMap.put(index, Double.parseDouble(parts[1].trim()));
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
private SmallBodyModel getGlobalModel() {
|
||||
if (sbm.get() == null) {
|
||||
try {
|
||||
vtkPolyData model = PolyDataUtil.loadShapeModel(globalOBJname);
|
||||
if (scale != null || rotation != null) {
|
||||
PolyDataStatistics stats = new PolyDataStatistics(model);
|
||||
Vector3D center = new Vector3D(stats.getCentroid());
|
||||
|
||||
vtkPoints points = model.GetPoints();
|
||||
for (int i = 0; i < points.GetNumberOfPoints(); i++) {
|
||||
Vector3D thisPoint = new Vector3D(points.GetPoint(i));
|
||||
if (scale != null)
|
||||
thisPoint = thisPoint.subtract(center).scalarMultiply(scale).add(center);
|
||||
if (rotation != null)
|
||||
thisPoint = rotation.applyTo(thisPoint.subtract(center)).add(center);
|
||||
points.SetPoint(i, thisPoint.toArray());
|
||||
}
|
||||
}
|
||||
sbm.set(new SmallBodyModel(model));
|
||||
} catch (Exception e) {
|
||||
logger.error(e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
return sbm.get();
|
||||
}
|
||||
|
||||
public void addMetaData(String key, String comment, String value) {
|
||||
metadata.put(key, new AbstractMap.SimpleEntry<>(comment, value));
|
||||
}
|
||||
|
||||
public void setSubPixel(int subPixel) {
|
||||
this.subPixel = subPixel;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a unit 3D vector in the body fixed frame given pixel coordinates x and y. (0,0) is the
|
||||
* upper left corner with X increasing to the right and Y increasing down.
|
||||
*
|
||||
* @param ix pixel x value
|
||||
* @param iy pixel y value
|
||||
* @return look direction
|
||||
*/
|
||||
public Vector3D pixelToBodyFixed(double ix, double iy) {
|
||||
|
||||
double[] xyz = new double[3];
|
||||
xyz[0] = FastMath.sin(ifov * (ix - centerX));
|
||||
xyz[1] = FastMath.sin(ifov * (iy - centerY));
|
||||
xyz[2] = FastMath.sqrt(1 - xyz[0] * xyz[0] - xyz[1] * xyz[1]);
|
||||
|
||||
return cameraToBodyFixed.applyTo(new Vector3D(xyz));
|
||||
}
|
||||
|
||||
private static class Brightness {
|
||||
private final double incidence;
|
||||
private final double emission;
|
||||
private final double phase;
|
||||
private final double brightness;
|
||||
private final double range;
|
||||
private final double facetX;
|
||||
private final double facetY;
|
||||
private final double facetZ;
|
||||
private final double normalX;
|
||||
private final double normalY;
|
||||
private final double normalZ;
|
||||
|
||||
private Brightness(
|
||||
double incidence,
|
||||
double emission,
|
||||
double phase,
|
||||
double brightness,
|
||||
double range,
|
||||
Vector3D facet,
|
||||
Vector3D normal) {
|
||||
this.incidence = Math.toDegrees(incidence);
|
||||
this.emission = Math.toDegrees(emission);
|
||||
this.phase = Math.toDegrees(phase);
|
||||
this.brightness = brightness;
|
||||
this.range = range;
|
||||
this.facetX = facet.getX();
|
||||
this.facetY = facet.getY();
|
||||
this.facetZ = facet.getZ();
|
||||
this.normalX = normal.getX();
|
||||
this.normalY = normal.getY();
|
||||
this.normalZ = normal.getZ();
|
||||
}
|
||||
|
||||
private double[] values() {
|
||||
return new double[] {
|
||||
this.brightness,
|
||||
this.incidence,
|
||||
this.emission,
|
||||
this.phase,
|
||||
this.range,
|
||||
this.facetX,
|
||||
this.facetY,
|
||||
this.facetZ,
|
||||
this.normalX,
|
||||
this.normalY,
|
||||
this.normalZ
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param pf Photometric function
|
||||
* @param intersect cell id of intersection point
|
||||
* @param intersectPoint XYZ coordinates of intersection point
|
||||
* @param isDefault true if this is the default model, false if local
|
||||
* @return Brightness structure
|
||||
*/
|
||||
private Brightness getBrightness(
|
||||
PhotometricFunction pf,
|
||||
SmallBodyModel sbm,
|
||||
long intersect,
|
||||
Vector3D intersectPoint,
|
||||
boolean isDefault) {
|
||||
|
||||
Vector3D facetToCamera = cameraXYZ.subtract(intersectPoint);
|
||||
|
||||
CellInfo ci = CellInfo.getCellInfo(sbm.getSmallBodyPolyData(), intersect, new vtkIdList());
|
||||
Vector3D normal = ci.normal();
|
||||
|
||||
double emission = Vector3D.angle(facetToCamera, normal);
|
||||
double distFromCamera = facetToCamera.getNorm();
|
||||
|
||||
// speeds up calculation along the limb. Need to combine all pixels in the ifov.
|
||||
double kmPerPixel =
|
||||
ifov * distFromCamera / Math.abs(FastMath.cos(Math.min(Math.toRadians(60), emission)));
|
||||
|
||||
double sum = 0;
|
||||
|
||||
Set<Long> cells = sbm.findClosestCellsWithinRadius(intersectPoint.toArray(), kmPerPixel / 2);
|
||||
cells.add(intersect);
|
||||
double incidence = 0;
|
||||
double phase = 0;
|
||||
for (long cell : cells) {
|
||||
|
||||
ci = CellInfo.getCellInfo(sbm.getSmallBodyPolyData(), cell, new vtkIdList(), true);
|
||||
facetToCamera = cameraXYZ.subtract(ci.center());
|
||||
normal = new Vector3D(sbm.getCellNormals().GetTuple3(cell));
|
||||
emission = Vector3D.angle(facetToCamera, normal);
|
||||
incidence = 0;
|
||||
phase = 0;
|
||||
|
||||
if (sunXYZ != null) {
|
||||
incidence = Vector3D.angle(sunXYZ, normal);
|
||||
phase = Vector3D.angle(facetToCamera, sunXYZ);
|
||||
|
||||
Vector3D sunToFacet = ci.center().subtract(sunXYZ);
|
||||
|
||||
// check for shadowing
|
||||
double[] sunIntersectPoint = new double[3];
|
||||
long sunIntersect =
|
||||
sbm.computeRayIntersection(sunXYZ.toArray(), sunToFacet.toArray(), sunIntersectPoint);
|
||||
if (sunIntersect != cell) {
|
||||
// don't allow points in shadow to have a 0 value
|
||||
sum += .001;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
double albedo = (isDefault && albedoMap.containsKey(cell)) ? albedoMap.get(cell) : 1;
|
||||
sum +=
|
||||
albedo
|
||||
* pf.getValue(
|
||||
FastMath.cos(incidence), FastMath.cos(emission), FastMath.toDegrees(phase));
|
||||
}
|
||||
|
||||
logger.printf(
|
||||
Level.DEBUG,
|
||||
"Thread %d lat/lon %.2f/%.2f, %s, sum %f, cells %d, %.2f",
|
||||
Thread.currentThread().getId(),
|
||||
Math.toDegrees(intersectPoint.getDelta()),
|
||||
Math.toDegrees(intersectPoint.getAlpha()),
|
||||
intersectPoint.toString(),
|
||||
sum,
|
||||
cells.size(),
|
||||
sum / cells.size());
|
||||
|
||||
return new Brightness(
|
||||
incidence, emission, phase, sum / cells.size(), distFromCamera, facetToCamera, normal);
|
||||
}
|
||||
|
||||
class BrightnessCalculator implements Callable<Map<Integer, Brightness>> {
|
||||
|
||||
Collection<Integer> pixelIndices;
|
||||
PhotometricFunction pf;
|
||||
|
||||
private BrightnessCalculator(Collection<Integer> pixelIndices, PhotometricFunction pf) {
|
||||
this.pixelIndices = pixelIndices;
|
||||
this.pf = pf;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<Integer, Brightness> call() throws Exception {
|
||||
|
||||
logger.info("Thread {}: starting", Thread.currentThread().getId());
|
||||
|
||||
int xPixels = subPixel * nPixelsX;
|
||||
|
||||
SmallBodyModel globalModel = getGlobalModel();
|
||||
|
||||
Map<Integer, Brightness> brightness = new HashMap<>();
|
||||
double[] intersectPoint = new double[3];
|
||||
double[] cameraXYZArray = cameraXYZ.toArray();
|
||||
for (Integer index : pixelIndices) {
|
||||
int j = index / xPixels;
|
||||
int i = index % xPixels;
|
||||
Vector3D pixelDir = pixelToBodyFixed(((double) i) / subPixel, ((double) j) / subPixel);
|
||||
|
||||
long intersect =
|
||||
globalModel.computeRayIntersection(cameraXYZArray, pixelDir.toArray(), intersectPoint);
|
||||
|
||||
if (intersect > -1) {
|
||||
|
||||
Vector3D intersectPt3D = new Vector3D(intersectPoint);
|
||||
|
||||
// resolution in m/pixel
|
||||
double resolution = ifov * intersectPt3D.distance(cameraXYZ) * 1e3;
|
||||
|
||||
// if no ceiling entry exists, stick with the global model
|
||||
Entry<Double, LocalModelCollection> lmcEntry = lmcMap.ceilingEntry(resolution);
|
||||
if (lmcEntry != null) {
|
||||
|
||||
LocalModelCollection lmc = lmcEntry.getValue();
|
||||
double[] localIntersectPoint = new double[3];
|
||||
|
||||
SmallBodyModel localModel = lmc.get(intersectPt3D);
|
||||
if (localModel != null) {
|
||||
long localIntersect =
|
||||
localModel.computeRayIntersection(
|
||||
cameraXYZArray, pixelDir.toArray(), localIntersectPoint);
|
||||
if (localIntersect != -1) {
|
||||
break;
|
||||
} else {
|
||||
logger.debug(
|
||||
String.format(
|
||||
"Thread %d: No intersection with local model for pixel (%d,%d): lat/lon %.2f/%.2f, using global intersection %d %s",
|
||||
Thread.currentThread().getId(),
|
||||
i,
|
||||
j,
|
||||
Math.toDegrees(intersectPt3D.getDelta()),
|
||||
Math.toDegrees(intersectPt3D.getAlpha()),
|
||||
intersect,
|
||||
intersectPt3D));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
boolean isDefault = lmcEntry == null;
|
||||
Brightness b = getBrightness(pf, globalModel, intersect, intersectPt3D, isDefault);
|
||||
brightness.put(j * xPixels + i, b);
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("Thread {}: finished", Thread.currentThread().getId());
|
||||
|
||||
return brightness;
|
||||
}
|
||||
}
|
||||
|
||||
public double[][][] getFits(PhotometricFunction pf, int numThreads) {
|
||||
|
||||
int xPixels = subPixel * nPixelsX;
|
||||
int yPixels = subPixel * nPixelsY;
|
||||
|
||||
Map<Integer, Brightness> brightness = new HashMap<>();
|
||||
try (ExecutorService executor = Executors.newFixedThreadPool(numThreads)) {
|
||||
|
||||
List<Integer> indices = IntStream.range(0, yPixels * xPixels).boxed().toList();
|
||||
|
||||
int numPixels = indices.size();
|
||||
|
||||
Set<BrightnessCalculator> callables = new HashSet<>();
|
||||
for (int i = 0; i < numThreads; i++) {
|
||||
int fromIndex = i * numPixels / numThreads;
|
||||
int toIndex = Math.min(numPixels, fromIndex + numPixels / numThreads);
|
||||
callables.add(new BrightnessCalculator(indices.subList(fromIndex, toIndex), pf));
|
||||
}
|
||||
|
||||
Set<Future<Map<Integer, Brightness>>> futures = new HashSet<>();
|
||||
for (BrightnessCalculator callable : callables) futures.add(executor.submit(callable));
|
||||
|
||||
for (Future<Map<Integer, Brightness>> future : futures) {
|
||||
try {
|
||||
Map<Integer, Brightness> values = future.get();
|
||||
brightness.putAll(values);
|
||||
} catch (Exception e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
}
|
||||
executor.shutdown();
|
||||
}
|
||||
|
||||
double[][][] img = new double[11][yPixels][xPixels];
|
||||
for (int i = 0; i < xPixels; i++) {
|
||||
for (int j = 0; j < yPixels; j++) {
|
||||
Brightness pixel = brightness.get(j * xPixels + i);
|
||||
if (pixel == null) continue;
|
||||
double[] pixels = pixel.values();
|
||||
for (int k = 0; k < img.length; k++) {
|
||||
img[k][yPixels - j - 1][i] = pixels[k];
|
||||
}
|
||||
}
|
||||
}
|
||||
return img;
|
||||
}
|
||||
|
||||
public BufferedImage getImage(PhotometricFunction pf, int numThreads) {
|
||||
|
||||
int xPixels = subPixel * nPixelsX;
|
||||
int yPixels = subPixel * nPixelsY;
|
||||
BufferedImage image = new BufferedImage(xPixels, yPixels, BufferedImage.TYPE_INT_ARGB);
|
||||
Graphics2D g = image.createGraphics();
|
||||
g.setComposite(AlphaComposite.Clear);
|
||||
g.fillRect(0, 0, image.getWidth(), image.getHeight());
|
||||
g.setComposite(AlphaComposite.Src);
|
||||
g.setColor(Color.BLACK);
|
||||
g.fillRect(0, 0, image.getWidth(), image.getHeight());
|
||||
|
||||
Map<Integer, Brightness> brightness = new HashMap<>();
|
||||
double maxBrightness = -Double.MAX_VALUE;
|
||||
|
||||
try (ExecutorService executor = Executors.newFixedThreadPool(numThreads)) {
|
||||
|
||||
List<Integer> indices = IntStream.range(0, yPixels * xPixels).boxed().toList();
|
||||
|
||||
int numPixels = indices.size();
|
||||
|
||||
Set<BrightnessCalculator> callables = new HashSet<>();
|
||||
for (int i = 0; i < numThreads; i++) {
|
||||
int fromIndex = i * numPixels / numThreads;
|
||||
int toIndex = Math.min(numPixels, fromIndex + numPixels / numThreads);
|
||||
callables.add(new BrightnessCalculator(indices.subList(fromIndex, toIndex), pf));
|
||||
}
|
||||
|
||||
Set<Future<Map<Integer, Brightness>>> futures = new HashSet<>();
|
||||
for (BrightnessCalculator callable : callables) futures.add(executor.submit(callable));
|
||||
|
||||
for (Future<Map<Integer, Brightness>> future : futures) {
|
||||
try {
|
||||
Map<Integer, Brightness> brightnessMap = future.get();
|
||||
for (Brightness b : brightnessMap.values())
|
||||
if (maxBrightness < b.brightness) maxBrightness = b.brightness;
|
||||
brightness.putAll(future.get());
|
||||
} catch (Exception e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
}
|
||||
executor.shutdown();
|
||||
}
|
||||
|
||||
/*-
|
||||
double[] intersectPoint = new double[3];
|
||||
|
||||
Map<Integer, Double> brightness = new HashMap<>();
|
||||
|
||||
double[] cameraXYZArray = cameraXYZ.toArray();
|
||||
for (int i = 0; i < xPixels; i++) {
|
||||
for (int j = 0; j < yPixels; j++) {
|
||||
Vector3D pixelDir = pixelToBodyFixed(((double) i) / scale, ((double) j) / scale);
|
||||
|
||||
int intersect =
|
||||
sbm.computeRayIntersection(cameraXYZArray, pixelDir.toArray(), intersectPoint);
|
||||
if (intersect > 0) {
|
||||
brightness.put(i * xPixels + j,
|
||||
getBrightness(pf, intersect, new Vector3D(intersectPoint)));
|
||||
}
|
||||
}
|
||||
}
|
||||
*/
|
||||
if (brightness.isEmpty()) {
|
||||
logger.info("No intersections with shape model found!");
|
||||
} else {
|
||||
for (int j = 0; j < yPixels; j++) {
|
||||
for (int i = 0; i < xPixels; i++) {
|
||||
if (!brightness.containsKey(j * xPixels + i)) continue;
|
||||
double value = brightness.get(j * xPixels + i).brightness;
|
||||
|
||||
int grey = value < 0.01 ? 1 : (int) (255 * value / maxBrightness);
|
||||
|
||||
image.setRGB(i, j, new Color(grey, grey, grey).getRGB());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
BufferedImage img = new BufferedImage(nPixelsX, nPixelsY, BufferedImage.TYPE_INT_RGB);
|
||||
img.createGraphics()
|
||||
.drawImage(image.getScaledInstance(nPixelsX, nPixelsY, Image.SCALE_SMOOTH), 0, 0, null);
|
||||
|
||||
return img;
|
||||
}
|
||||
|
||||
public SumFile loadSumFile(String filename) {
|
||||
|
||||
SumFile sumFile = SumFile.fromFile(new File(filename));
|
||||
|
||||
addMetaData("image.utc", "Imaging date. Taken from sumfile", sumFile.utcString());
|
||||
|
||||
nPixelsX = sumFile.imageWidth();
|
||||
nPixelsY = sumFile.imageHeight();
|
||||
|
||||
centerX = 0.5 * (nPixelsX - 1);
|
||||
centerY = 0.5 * (nPixelsY - 1);
|
||||
|
||||
// let's assume square pixels
|
||||
ifov = sumFile.horizontalResolution();
|
||||
|
||||
// put the sun far away
|
||||
sunXYZ = sumFile.sunDirection().scalarMultiply(1e8);
|
||||
cameraXYZ = sumFile.scobj().negate();
|
||||
|
||||
cameraToBodyFixed = sumFile.getBodyFixedToCamera().revert();
|
||||
|
||||
double[] intersectPoint = new double[3];
|
||||
Vector3D boresight = sumFile.boresight();
|
||||
long intersect =
|
||||
getGlobalModel()
|
||||
.computeRayIntersection(cameraXYZ.toArray(), boresight.toArray(), intersectPoint);
|
||||
if (intersect > 0) {
|
||||
Vector3D nadirPt = new Vector3D(intersectPoint);
|
||||
double lat = nadirPt.getDelta();
|
||||
double lon = nadirPt.getAlpha();
|
||||
Vector3D normal = new Vector3D(getGlobalModel().getCellNormals().GetTuple3(intersect));
|
||||
double inc = Vector3D.angle(sunXYZ, normal);
|
||||
Vector3D toCamera = cameraXYZ.subtract(nadirPt);
|
||||
double ems = Vector3D.angle(toCamera, normal);
|
||||
double phs = Vector3D.angle(sunXYZ, toCamera);
|
||||
double range = cameraXYZ.subtract(nadirPt).getNorm();
|
||||
|
||||
addMetaData("image.cell", "Index of center pixel cell", Long.toString(intersect));
|
||||
addMetaData("image.lat", "Center latitude", StringFunctions.toDegreesLat("%.2f ").apply(lat));
|
||||
addMetaData(
|
||||
"image.lon", "Center longitude", StringFunctions.toDegreesELon("%.2f ").apply(lon));
|
||||
addMetaData(
|
||||
"image.inc", "Center incidence in degrees", String.format("%.2f", Math.toDegrees(inc)));
|
||||
addMetaData(
|
||||
"image.ems",
|
||||
"Center emission in degrees (may not be zero if facet is tilted)",
|
||||
String.format("%.2f", Math.toDegrees(ems)));
|
||||
addMetaData(
|
||||
"image.phs", "Center phase in degrees", String.format("%.2f", Math.toDegrees(phs)));
|
||||
addMetaData("image.range", "Center point range in m", String.format("%.3f", range * 1e3));
|
||||
addMetaData(
|
||||
"image.resolution",
|
||||
"Center point resolution in m/pixel",
|
||||
String.format("%.3f", ifov * range * 1e3));
|
||||
}
|
||||
return sumFile;
|
||||
}
|
||||
|
||||
/**
|
||||
* load a local model file
|
||||
*
|
||||
* @param lmcName local model filename
|
||||
*/
|
||||
public void loadLocalModels(String lmcName) {
|
||||
LocalModelCollection lmc = new LocalModelCollection(128, scale, rotation);
|
||||
try {
|
||||
List<String> lines = FileUtils.readLines(new File(lmcName), Charset.defaultCharset());
|
||||
|
||||
Double localResolution = null;
|
||||
|
||||
for (String line : lines) {
|
||||
String strippedLine = line.strip();
|
||||
if (strippedLine.isEmpty() || strippedLine.startsWith("#")) continue;
|
||||
String[] parts = strippedLine.split(",");
|
||||
|
||||
if (localResolution == null) {
|
||||
localResolution = Double.valueOf(parts[0].strip());
|
||||
logger.debug("Loading {} with a resolution of {} m/pixel", lmcName, localResolution);
|
||||
} else {
|
||||
double lat = Math.toRadians(Double.parseDouble(parts[0]));
|
||||
double lon = Math.toRadians(Double.parseDouble(parts[1]));
|
||||
String filename = parts[2].strip();
|
||||
lmc.addModel(lat, lon, filename);
|
||||
}
|
||||
}
|
||||
this.lmcMap.put(localResolution, lmc);
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a metadata file containing information about the simulated image.
|
||||
*
|
||||
* @param metadataFile file to write
|
||||
* @param arguments command line arguments
|
||||
*/
|
||||
public void writeMetadata(File metadataFile, String arguments) {
|
||||
try (PrintWriter pw = new PrintWriter(metadataFile)) {
|
||||
|
||||
pw.printf("# Created %s by %s\n", Instant.now().toString(), AppVersion.getVersionString());
|
||||
pw.printf("# arguments: %s\n\n", arguments);
|
||||
|
||||
String lastSection = null;
|
||||
for (String key : metadata.keySet()) {
|
||||
String thisSection = key.substring(0, key.indexOf("."));
|
||||
if (lastSection == null) lastSection = thisSection;
|
||||
if (!lastSection.equals(thisSection)) {
|
||||
pw.println();
|
||||
lastSection = thisSection;
|
||||
}
|
||||
Map.Entry<String, String> value = metadata.get(key);
|
||||
String[] comments = value.getKey().split("\\r?\\n");
|
||||
for (String comment : comments) if (!comment.trim().isEmpty()) pw.printf("# %s\n", comment);
|
||||
pw.printf("%s = %s\n", key, value.getValue());
|
||||
}
|
||||
|
||||
} catch (FileNotFoundException e) {
|
||||
logger.log(Level.ERROR, e.getLocalizedMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(
|
||||
Option.builder("albedoFile")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"""
|
||||
Name of albedo file. This is a CSV file with facet
|
||||
index in the first column and albedo (in the range 0
|
||||
to 1) in the second. Additional columns are
|
||||
ignored. Albedo for facets not specified will be
|
||||
set to 1. Lines starting with # or blank
|
||||
lines are ignored. This file applies only to the
|
||||
default shape model, not any local ones."""
|
||||
.replaceAll("\\s+", " ")
|
||||
.strip())
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("localModels")
|
||||
.hasArgs()
|
||||
.desc(
|
||||
"""
|
||||
File containing local shape models, one per line.
|
||||
The first line of the file should contain the
|
||||
coarsest resolution in m/pixel where these models
|
||||
should be used. Usually this can be about half the
|
||||
resolution of the next coarser model. For example,
|
||||
if the global model has a resolution of 1 m/pixel
|
||||
the local model file should be used for resolutions
|
||||
better than 0.5 m/pixel. Format of each remaining
|
||||
line is lat, lon, filename as comma separated
|
||||
values. Lat and lon are in degrees. This option
|
||||
may be specified multiple times to load multiple
|
||||
sets of models. Lines starting with # or blank
|
||||
lines are ignored."""
|
||||
.replaceAll("\\s+", " ")
|
||||
.strip())
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("logFile")
|
||||
.hasArg()
|
||||
.desc("If present, save screen output to log file.")
|
||||
.build());
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (StandardLevel l : StandardLevel.values()) sb.append(String.format("%s ", l.name()));
|
||||
options.addOption(
|
||||
Option.builder("logLevel")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"If present, print messages above selected priority. Valid values are "
|
||||
+ sb.toString().trim()
|
||||
+ ". Default is INFO.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("model")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Required. Default shape model filename. Supported formats are OBJ, PLT, PLY, STL, or VTK format.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("numThreads")
|
||||
.hasArg()
|
||||
.desc("Number of threads to run in parallel when generating the image. Default is 2.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("photo")
|
||||
.hasArg()
|
||||
.desc(PhotometricFunction.getOptionString().trim() + " Default is OREX.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("output")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc("Required. Name of image file to write. Valid extensions are fits or png.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("rotateModel")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"""
|
||||
If present, rotate shape model. Specify by an angle
|
||||
(degrees) and a 3 element rotation axis vector (XYZ)
|
||||
separated by commas.
|
||||
"""
|
||||
.replaceAll("\\s+", " ")
|
||||
.strip())
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("scaleModel")
|
||||
.hasArg()
|
||||
.desc("If present, factor to scale shape model. The center is unchanged.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("subPixel")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"""
|
||||
Generate the simulated image a factor of subPixel
|
||||
(must be an integer) larger than the dimensions in
|
||||
the sum file. The simulated image is them reduced
|
||||
in size to the dimensions in the sum file. The
|
||||
default is 2.
|
||||
"""
|
||||
.replaceAll("\\s+", " ")
|
||||
.strip())
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("sumFile")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc("Required. Name of sum file to read.")
|
||||
.build());
|
||||
return options;
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
TerrasaurTool defaultOBJ = new RenderShapeFromSumFile();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
NativeLibraryLoader.loadVtkLibraries();
|
||||
|
||||
Double scale =
|
||||
cl.hasOption("scaleModel") ? Double.parseDouble(cl.getOptionValue("scaleModel")) : null;
|
||||
Rotation rotation =
|
||||
cl.hasOption("rotateModel")
|
||||
? RotationUtils.stringToRotation(cl.getOptionValue("rotateModel"))
|
||||
: null;
|
||||
|
||||
RenderShapeFromSumFile app =
|
||||
new RenderShapeFromSumFile(cl.getOptionValue("model"), scale, rotation);
|
||||
SumFile sumFile = app.loadSumFile(cl.getOptionValue("sumFile"));
|
||||
|
||||
if (cl.hasOption("albedoFile")) app.loadAlbedoFile(cl.getOptionValue("albedoFile"));
|
||||
|
||||
if (cl.hasOption("localModels"))
|
||||
for (String localModel : cl.getOptionValues("localModels")) app.loadLocalModels(localModel);
|
||||
|
||||
if (cl.hasOption("subPixel")) app.setSubPixel(Integer.parseInt(cl.getOptionValue("subPixel")));
|
||||
|
||||
PhotometricFunction pf = PhotometricFunction.OREX1;
|
||||
if (cl.hasOption("photo"))
|
||||
pf = PhotometricFunction.getPhotometricFunction(cl.getOptionValue("photo"));
|
||||
int numThreads =
|
||||
cl.hasOption("numThreads") ? Integer.parseInt(cl.getOptionValue("numThreads")) : 2;
|
||||
|
||||
String outputFilename = cl.getOptionValue("output");
|
||||
String dirname = FilenameUtils.getPath(outputFilename);
|
||||
if (dirname.trim().isEmpty()) dirname = ".";
|
||||
String basename = FilenameUtils.getBaseName(outputFilename);
|
||||
String extension = FilenameUtils.getExtension(outputFilename);
|
||||
|
||||
if (extension.equalsIgnoreCase("png")) {
|
||||
BufferedImage image = app.getImage(pf, numThreads);
|
||||
File png = new File(dirname, basename + "." + extension);
|
||||
File metadataFile = new File(dirname, basename + ".txt");
|
||||
|
||||
ImageIO.write(image, "PNG", png);
|
||||
app.writeMetadata(metadataFile, startupMessages.get(MessageLabel.ARGUMENTS));
|
||||
logger.info("Wrote {}", outputFilename);
|
||||
} else if (extension.equalsIgnoreCase("fits")) {
|
||||
Fits fits = new Fits();
|
||||
ImageHDU imageHDU = (ImageHDU) Fits.makeHDU(app.getFits(pf, numThreads));
|
||||
Header header = imageHDU.getHeader();
|
||||
header.addValue(
|
||||
DateTime.TIMESYS_UTC, app.metadata.get("image.utc").getValue(), "Time from the SUM file");
|
||||
header.addValue("TITLE", sumFile.picnm(), "Title of SUM file");
|
||||
header.addValue("PLANE1", "brightness", "from 0 to 1");
|
||||
header.addValue("PLANE2", "incidence", "degrees");
|
||||
header.addValue("PLANE3", "emission", "degrees");
|
||||
header.addValue("PLANE4", "phase", "degrees");
|
||||
header.addValue("PLANE5", "range", "kilometers");
|
||||
header.addValue("PLANE6", "facetX", "kilometers");
|
||||
header.addValue("PLANE7", "facetY", "kilometers");
|
||||
header.addValue("PLANE8", "facetZ", "kilometers");
|
||||
header.addValue("PLANE9", "normalX", "X component of unit normal");
|
||||
header.addValue("PLANE10", "normalY", "Y component of unit normal");
|
||||
header.addValue("PLANE11", "normalZ", "Z component of unit normal");
|
||||
header.addValue("MMFL", sumFile.mmfl(), "From SUM file");
|
||||
header.addValue("SCOBJ", sumFile.scobj().toString(), "From SUM file");
|
||||
header.addValue("CX", sumFile.cx().toString(), "From SUM file");
|
||||
header.addValue("CY", sumFile.cy().toString(), "From SUM file");
|
||||
header.addValue("CZ", sumFile.cz().toString(), "From SUM file");
|
||||
header.addValue("SZ", sumFile.sz().toString(), "From SUM file");
|
||||
header.addValue("KMAT1", sumFile.kmat1().toString(), "From SUM file");
|
||||
header.addValue("KMAT2", sumFile.kmat2().toString(), "From SUM file");
|
||||
header.addValue("DIST", sumFile.distortion().toString(), "From SUM file");
|
||||
header.addValue("SIGVSO", sumFile.sig_vso().toString(), "From SUM file");
|
||||
header.addValue("SIGPTG", sumFile.sig_ptg().toString(), "From SUM file");
|
||||
fits.addHDU(imageHDU);
|
||||
fits.write(outputFilename);
|
||||
fits.close();
|
||||
logger.info("wrote {}", outputFilename);
|
||||
} else {
|
||||
logger.error("Unsupported output file type: {}", outputFilename);
|
||||
}
|
||||
}
|
||||
}
|
||||
220
src/main/java/terrasaur/apps/RotationConversion.java
Normal file
@@ -0,0 +1,220 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.math3.geometry.euclidean.threed.CardanEulerSingularityException;
|
||||
import org.apache.commons.math3.geometry.euclidean.threed.Rotation;
|
||||
import org.apache.commons.math3.geometry.euclidean.threed.RotationConvention;
|
||||
import org.apache.commons.math3.geometry.euclidean.threed.RotationOrder;
|
||||
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.spi.StandardLevel;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
|
||||
public class RotationConversion implements TerrasaurTool {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Convert rotations between different types.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
|
||||
String header = "";
|
||||
String footer =
|
||||
"""
|
||||
This program converts rotations between angle and axis, 3x3 matrix, quaternions, \
|
||||
and ZXZ rotation Euler angles. Note that the rotation modifies the frame; \
|
||||
the vector is considered to be fixed. To find the rotation that modifies the \
|
||||
vector in a fixed frame, take the transpose of this matrix.
|
||||
""";
|
||||
|
||||
return TerrasaurTool.super.fullDescription(options, header, footer);
|
||||
|
||||
}
|
||||
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(Option.builder("logFile").hasArg()
|
||||
.desc("If present, save screen output to log file.").build());
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (StandardLevel l : StandardLevel.values())
|
||||
sb.append(String.format("%s ", l.name()));
|
||||
options.addOption(Option.builder("logLevel").hasArg()
|
||||
.desc("If present, print messages above selected priority. Valid values are "
|
||||
+ sb.toString().trim() + ". Default is INFO.")
|
||||
.build());
|
||||
|
||||
options.addOption(Option.builder("angle").hasArg().desc("Rotation angle, in radians.").build());
|
||||
options.addOption(
|
||||
Option.builder("axis0").hasArg().desc("First element of rotation axis.").build());
|
||||
options.addOption(
|
||||
Option.builder("axis1").hasArg().desc("Second element of rotation axis.").build());
|
||||
options.addOption(
|
||||
Option.builder("axis2").hasArg().desc("Third element of rotation axis.").build());
|
||||
options.addOption(Option.builder("cardanXYZ1").hasArg()
|
||||
.desc("Cardan angle for the first rotation (about the X axis) in radians.").build());
|
||||
options.addOption(Option.builder("cardanXYZ2").hasArg()
|
||||
.desc("Cardan angle for the second rotation (about the Y axis) in radians.").build());
|
||||
options.addOption(Option.builder("cardanXYZ3").hasArg()
|
||||
.desc("Cardan angle for the third rotation (about the Z axis) in radians.").build());
|
||||
options.addOption(Option.builder("eulerZXZ1").hasArg()
|
||||
.desc("Euler angle for the first rotation (about the Z axis) in radians.").build());
|
||||
options.addOption(Option.builder("eulerZXZ2").hasArg()
|
||||
.desc("Euler angle for the second rotation (about the rotated X axis) in radians.")
|
||||
.build());
|
||||
options.addOption(Option.builder("eulerZXZ3").hasArg()
|
||||
.desc("Euler angle for the third rotation (about the rotated Z axis) in radians.").build());
|
||||
options.addOption(
|
||||
Option.builder("q0").hasArg().desc("Scalar term for quaternion: cos(theta/2)").build());
|
||||
options.addOption(Option.builder("q1").hasArg()
|
||||
.desc("First vector term for quaternion: sin(theta/2) * V[0]").build());
|
||||
options.addOption(Option.builder("q2").hasArg()
|
||||
.desc("Second vector term for quaternion: sin(theta/2) * V[1]").build());
|
||||
options.addOption(Option.builder("q3").hasArg()
|
||||
.desc("Third vector term for quaternion: sin(theta/2) * V[2]").build());
|
||||
options.addOption(Option.builder("matrix").hasArg()
|
||||
.desc("name of file containing rotation matrix to convert to Euler angles. "
|
||||
+ "Format is 3x3 array in plain text separated by white space.")
|
||||
.build());
|
||||
options.addOption(Option.builder("anglesInDegrees").desc(
|
||||
"If present, input angles in degrees and print output angles in degrees. Default is false.")
|
||||
.build()); return options;
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
RotationConversion defaultOBJ = new RotationConversion();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
boolean inDegrees = cl.hasOption("anglesInDegrees");
|
||||
|
||||
boolean axisAndAngle = cl.hasOption("angle") && cl.hasOption("axis0") && cl.hasOption("axis1")
|
||||
&& cl.hasOption("axis2");
|
||||
boolean cardanXYZ =
|
||||
cl.hasOption("cardanXYZ1") && cl.hasOption("cardanXYZ2") && cl.hasOption("cardanXYZ3");
|
||||
boolean eulerZXZ =
|
||||
cl.hasOption("eulerZXZ1") && cl.hasOption("eulerZXZ3") && cl.hasOption("eulerZXZ3");
|
||||
boolean quaternion =
|
||||
cl.hasOption("q0") && cl.hasOption("q1") && cl.hasOption("q2") && cl.hasOption("q3");
|
||||
boolean matrix = cl.hasOption("matrix");
|
||||
|
||||
if (!(axisAndAngle || cardanXYZ || eulerZXZ || quaternion || matrix)) {
|
||||
logger.warn(
|
||||
"Must specify input rotation as axis and angle, Cardan or Euler angles, matrix, or quaternion.");
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
Rotation r = null;
|
||||
if (matrix) {
|
||||
List<String> lines =
|
||||
FileUtils.readLines(new File(cl.getOptionValue("matrix")), Charset.defaultCharset());
|
||||
double[][] m = new double[3][3];
|
||||
for (int i = 0; i < 3; i++) {
|
||||
String[] parts = lines.get(i).trim().split("\\s+");
|
||||
for (int j = 0; j < 3; j++)
|
||||
m[i][j] = Double.parseDouble(parts[j].trim());
|
||||
}
|
||||
r = new Rotation(m, 1e-10);
|
||||
}
|
||||
|
||||
if (axisAndAngle) {
|
||||
double angle = Double.parseDouble(cl.getOptionValue("angle").trim());
|
||||
if (inDegrees)
|
||||
angle = Math.toRadians(angle);
|
||||
r = new Rotation(
|
||||
new Vector3D(Double.parseDouble(cl.getOptionValue("axis0").trim()),
|
||||
Double.parseDouble(cl.getOptionValue("axis1").trim()),
|
||||
Double.parseDouble(cl.getOptionValue("axis2").trim())),
|
||||
angle, RotationConvention.FRAME_TRANSFORM);
|
||||
}
|
||||
|
||||
if (cardanXYZ) {
|
||||
double angle1 = Double.parseDouble(cl.getOptionValue("cardanXYZ1").trim());
|
||||
double angle2 = Double.parseDouble(cl.getOptionValue("cardanXYZ2").trim());
|
||||
double angle3 = Double.parseDouble(cl.getOptionValue("cardanXYZ3").trim());
|
||||
if (inDegrees) {
|
||||
angle1 = Math.toRadians(angle1);
|
||||
angle2 = Math.toRadians(angle2);
|
||||
angle3 = Math.toRadians(angle3);
|
||||
}
|
||||
r = new Rotation(RotationOrder.XYZ, RotationConvention.FRAME_TRANSFORM, angle1, angle2,
|
||||
angle3);
|
||||
}
|
||||
|
||||
if (eulerZXZ) {
|
||||
double angle1 = Double.parseDouble(cl.getOptionValue("eulerZXZ1").trim());
|
||||
double angle2 = Double.parseDouble(cl.getOptionValue("eulerZXZ2").trim());
|
||||
double angle3 = Double.parseDouble(cl.getOptionValue("eulerZXZ3").trim());
|
||||
if (inDegrees) {
|
||||
angle1 = Math.toRadians(angle1);
|
||||
angle2 = Math.toRadians(angle2);
|
||||
angle3 = Math.toRadians(angle3);
|
||||
}
|
||||
r = new Rotation(RotationOrder.ZXZ, RotationConvention.FRAME_TRANSFORM, angle1, angle2,
|
||||
angle3);
|
||||
}
|
||||
|
||||
if (quaternion) {
|
||||
r = new Rotation(Double.parseDouble(cl.getOptionValue("q0").trim()),
|
||||
Double.parseDouble(cl.getOptionValue("q1").trim()),
|
||||
Double.parseDouble(cl.getOptionValue("q2").trim()),
|
||||
Double.parseDouble(cl.getOptionValue("q3").trim()), true);
|
||||
}
|
||||
|
||||
double[][] m = r.getMatrix();
|
||||
String matrixString = String.format(
|
||||
"rotation matrix:\n%24.16e %24.16e %24.16e\n%24.16e %24.16e %24.16e\n%24.16e %24.16e %24.16e",
|
||||
m[0][0], m[0][1], m[0][2], m[1][0], m[1][1], m[1][2], m[2][0], m[2][1], m[2][2]);
|
||||
System.out.println(matrixString);
|
||||
|
||||
String axisAndAngleString = inDegrees
|
||||
? String.format("angle (degrees), axis:\n%g, %s", Math.toDegrees(r.getAngle()),
|
||||
r.getAxis(RotationConvention.FRAME_TRANSFORM))
|
||||
: String.format("angle (radians), axis:\n%g, %s", r.getAngle(),
|
||||
r.getAxis(RotationConvention.FRAME_TRANSFORM));
|
||||
System.out.println(axisAndAngleString);
|
||||
|
||||
try {
|
||||
double[] angles = r.getAngles(RotationOrder.XYZ, RotationConvention.FRAME_TRANSFORM);
|
||||
String cardanString = inDegrees
|
||||
? String.format("Cardan XYZ angles (degrees):\n%g, %g, %g", Math.toDegrees(angles[0]),
|
||||
Math.toDegrees(angles[1]), Math.toDegrees(angles[2]))
|
||||
: String.format("Cardan XYZ angles (radians):\n%g, %g, %g", angles[0], angles[1],
|
||||
angles[2]);
|
||||
System.out.println(cardanString);
|
||||
} catch (CardanEulerSingularityException e) {
|
||||
System.out.println("Cardan angles: encountered singularity, cannot solve");
|
||||
}
|
||||
|
||||
try {
|
||||
double[] angles = r.getAngles(RotationOrder.ZXZ, RotationConvention.FRAME_TRANSFORM);
|
||||
String eulerString = inDegrees
|
||||
? String.format("Euler ZXZ angles (degrees):\n%g, %g, %g", Math.toDegrees(angles[0]),
|
||||
Math.toDegrees(angles[1]), Math.toDegrees(angles[2]))
|
||||
: String.format("Euler ZXZ angles (radians):\n%g, %g, %g", angles[0], angles[1],
|
||||
angles[2]);
|
||||
System.out.println(eulerString);
|
||||
} catch (CardanEulerSingularityException e) {
|
||||
System.out.println("Euler angles: encountered singularity, cannot solve");
|
||||
}
|
||||
|
||||
System.out.printf("Quaternion:\n%g, %g, %g, %g\n", r.getQ0(), r.getQ1(), r.getQ2(), r.getQ3());
|
||||
}
|
||||
}
|
||||
450
src/main/java/terrasaur/apps/SPKFromSumFile.java
Normal file
@@ -0,0 +1,450 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.*;
|
||||
import org.apache.commons.cli.*;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import org.apache.commons.math3.analysis.polynomials.PolynomialFunction;
|
||||
import org.apache.commons.math3.fitting.PolynomialCurveFitter;
|
||||
import org.apache.commons.math3.fitting.WeightedObservedPoints;
|
||||
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
|
||||
import org.apache.commons.text.WordUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.spi.StandardLevel;
|
||||
import picante.math.intervals.UnwritableInterval;
|
||||
import spice.basic.*;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.*;
|
||||
import terrasaur.utils.math.MathConversions;
|
||||
|
||||
public class SPKFromSumFile implements TerrasaurTool {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger();
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Given three or more sumfiles, create an input file for MKSPK.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
String header = "";
|
||||
String footer = """
|
||||
Given three or more sumfiles, fit a parabola to the spacecraft
|
||||
trajectory in J2000 and create an input file for MKSPK.
|
||||
""";
|
||||
return TerrasaurTool.super.fullDescription(options, header, footer);
|
||||
}
|
||||
|
||||
private Body observer;
|
||||
private Body target;
|
||||
private ReferenceFrame J2000;
|
||||
private ReferenceFrame bodyFixed;
|
||||
private NavigableMap<Double, SumFile> sumFiles;
|
||||
private Map<SumFile, Double> weightMap;
|
||||
private NavigableMap<Double, String> sumFilenames;
|
||||
private UnwritableInterval interval;
|
||||
|
||||
private SPKFromSumFile(){}
|
||||
|
||||
private SPKFromSumFile(Body observer, Body target, ReferenceFrame bodyFixed, Map<String, Double> weightMap,
|
||||
double extend) throws SpiceException {
|
||||
this.observer = observer;
|
||||
this.target = target;
|
||||
this.bodyFixed = bodyFixed;
|
||||
this.J2000 = new ReferenceFrame("J2000");
|
||||
|
||||
this.sumFiles = new TreeMap<>();
|
||||
this.weightMap = new HashMap<>();
|
||||
this.sumFilenames = new TreeMap<>();
|
||||
|
||||
for (String filename : weightMap.keySet()) {
|
||||
SumFile s = SumFile.fromFile(new File(filename));
|
||||
double tdb = new TDBTime(s.utcString()).getTDBSeconds();
|
||||
this.sumFiles.put(tdb, s);
|
||||
this.weightMap.put(s, weightMap.get(filename));
|
||||
this.sumFilenames.put(tdb, filename);
|
||||
}
|
||||
|
||||
this.interval = new UnwritableInterval(this.sumFiles.firstKey(), this.sumFiles.lastKey() + extend);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param basename base name for MKSPK input files
|
||||
* @param comments comments to include in SPK
|
||||
* @param degree polynomial degree to use for fitting position
|
||||
* @param velocity User-supplied velocity (if null, use derivative of calculated fit to position)
|
||||
* @param velocityIsJ2000 if true, user-supplied velocity is in J2000 frame
|
||||
* @return command to run MKSPK
|
||||
*/
|
||||
public String writeMKSPKFiles(String basename, List<String> comments, int degree, final Vector3 velocity,
|
||||
boolean velocityIsJ2000) throws SpiceException {
|
||||
|
||||
String commentFile = basename + "-comments.txt";
|
||||
String setupFile = basename + ".setup";
|
||||
String inputFile = basename + ".inp";
|
||||
|
||||
try (PrintWriter pw = new PrintWriter(commentFile)) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
if (!comments.isEmpty()) {
|
||||
for (String comment : comments)
|
||||
sb.append(comment).append("\n");
|
||||
sb.append("\n");
|
||||
}
|
||||
sb.append(String.format("This SPK for %s was generated by fitting a parabola to each component of the " + "SCOBJ vector from " + "the following sumfiles:\n", target));
|
||||
for (String sumFile : sumFilenames.values()) {
|
||||
sb.append(String.format("\t%s\n", sumFile));
|
||||
}
|
||||
sb.append("The SCOBJ vector was transformed to J2000 and an aberration correction ");
|
||||
sb.append(String.format("was applied to find the geometric position relative to %s before the parabola " + "fit. ", target.getName()));
|
||||
sb.append(String.format("The period covered by this SPK is %s to %s.",
|
||||
new TDBTime(interval.getBegin()).toUTCString("ISOC", 3),
|
||||
new TDBTime(interval.getEnd()).toUTCString("ISOC", 3)));
|
||||
|
||||
String allComments = sb.toString();
|
||||
for (String comment : allComments.split("\\r?\\n"))
|
||||
pw.println(WordUtils.wrap(comment, 80));
|
||||
} catch (FileNotFoundException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
|
||||
int numTXT = KernelDatabase.ktotal("TEXT");
|
||||
File lsk = null;
|
||||
for (int i = 0; i < numTXT; i++) {
|
||||
String filename = KernelDatabase.getFileName(i, "TEXT");
|
||||
if (filename.toLowerCase().endsWith(".tls")) lsk = new File(filename);
|
||||
}
|
||||
|
||||
Map<String, String> map = new TreeMap<>();
|
||||
map.put("INPUT_DATA_TYPE", "'STATES'");
|
||||
map.put("OUTPUT_SPK_TYPE", "13"); // hermite polynomial, unevenly spaced in time
|
||||
map.put("OBJECT_ID", String.format("%d", observer.getIDCode()));
|
||||
map.put("CENTER_ID", String.format("%d", target.getIDCode()));
|
||||
map.put("COMMENT_FILE", String.format("'%s'", commentFile));
|
||||
map.put("REF_FRAME_NAME", "'J2000'");
|
||||
map.put("PRODUCER_ID", "'Hari.Nair@jhuapl.edu'");
|
||||
map.put("DATA_ORDER", "'EPOCH X Y Z VX VY VZ'");
|
||||
map.put("DATA_DELIMITER", "' '");
|
||||
map.put("LEAPSECONDS_FILE", String.format("'%s'", lsk));
|
||||
map.put("TIME_WRAPPER", "'# ETSECONDS'");
|
||||
map.put("POLYNOM_DEGREE", "7");
|
||||
map.put("SEGMENT_ID", "'SPK_STATES_13'");
|
||||
map.put("LINES_PER_RECORD", "1");
|
||||
try (PrintWriter pw = new PrintWriter(setupFile)) {
|
||||
pw.println("\\begindata");
|
||||
for (String key : map.keySet()) {
|
||||
pw.printf("%s = %s\n", key, map.get(key));
|
||||
}
|
||||
} catch (FileNotFoundException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
|
||||
RemoveAberration ra = new RemoveAberration(target, observer);
|
||||
|
||||
WeightedObservedPoints x = new WeightedObservedPoints();
|
||||
WeightedObservedPoints y = new WeightedObservedPoints();
|
||||
WeightedObservedPoints z = new WeightedObservedPoints();
|
||||
Map<Double, Vector3> geometricMap = new HashMap<>();
|
||||
for (Double t : sumFiles.keySet()) {
|
||||
SumFile sumFile = sumFiles.get(t);
|
||||
double weight = weightMap.get(sumFile);
|
||||
TDBTime tdb = new TDBTime(t);
|
||||
Matrix33 bodyFixedToJ2000 = bodyFixed.getPositionTransformation(J2000, tdb);
|
||||
Vector3 scObjJ2000 = bodyFixedToJ2000.mxv(MathConversions.toVector3(sumFile.scobj()));
|
||||
Vector3 geometricScPos = ra.getGeometricPosition(tdb, scObjJ2000);
|
||||
geometricMap.put(t, geometricScPos);
|
||||
x.add(weight, t, geometricScPos.getElt(0));
|
||||
y.add(weight, t, geometricScPos.getElt(1));
|
||||
z.add(weight, t, geometricScPos.getElt(2));
|
||||
}
|
||||
|
||||
// fit a polynomial to the geometric positions in J2000
|
||||
PolynomialCurveFitter fitter = PolynomialCurveFitter.create(degree);
|
||||
double[] xCoeff = fitter.fit(x.toList());
|
||||
double[] yCoeff = fitter.fit(y.toList());
|
||||
double[] zCoeff = fitter.fit(z.toList());
|
||||
|
||||
PolynomialFunction xPos = new PolynomialFunction(xCoeff);
|
||||
PolynomialFunction xVel = xPos.polynomialDerivative();
|
||||
PolynomialFunction yPos = new PolynomialFunction(yCoeff);
|
||||
PolynomialFunction yVel = yPos.polynomialDerivative();
|
||||
PolynomialFunction zPos = new PolynomialFunction(zCoeff);
|
||||
PolynomialFunction zVel = zPos.polynomialDerivative();
|
||||
|
||||
logger.info("Polynomial fitting coefficients for geometric position of {} relative to {} in J2000:",
|
||||
observer.getName(), target.getName());
|
||||
StringBuilder xMsg = new StringBuilder(String.format("X = %e ", xCoeff[0]));
|
||||
StringBuilder yMsg = new StringBuilder(String.format("Y = %e ", yCoeff[0]));
|
||||
StringBuilder zMsg = new StringBuilder(String.format("Z = %e ", zCoeff[0]));
|
||||
for (int i = 1; i <= degree; i++) {
|
||||
xMsg.append(xCoeff[i] < 0 ? "- " : "+ ").append(String.format("%e ", Math.abs(xCoeff[i]))).append("t").append(i > 1 ? "^" + i : "").append(" ");
|
||||
yMsg.append(yCoeff[i] < 0 ? "- " : "+ ").append(String.format("%e ", Math.abs(yCoeff[i]))).append("t").append(i > 1 ? "^" + i : "").append(" ");
|
||||
zMsg.append(zCoeff[i] < 0 ? "- " : "+ ").append(String.format("%e ", Math.abs(zCoeff[i]))).append("t").append(i > 1 ? "^" + i : "").append(" ");
|
||||
}
|
||||
logger.info(xMsg);
|
||||
logger.info(yMsg);
|
||||
logger.info(zMsg);
|
||||
|
||||
logger.debug("");
|
||||
logger.debug("NOTE: comparing aberration correction=LT+S positions from sumfile with aberration " +
|
||||
"correction=NONE for fit.");
|
||||
for (Double t : sumFiles.keySet()) {
|
||||
TDBTime tdb = new TDBTime(t);
|
||||
SumFile sumFile = sumFiles.get(t);
|
||||
Vector3 j2000Pos = new Vector3(xPos.value(t), yPos.value(t), zPos.value(t));
|
||||
Matrix33 bodyFixedToJ2000 = bodyFixed.getPositionTransformation(J2000, tdb);
|
||||
Vector3D bfPos = MathConversions.toVector3D(bodyFixedToJ2000.mtxv(j2000Pos));
|
||||
// comparing LT+S with NONE here
|
||||
logger.debug("UTC Date: {}", sumFile.utcString());
|
||||
logger.debug("Sumfile SCOBJ: {}", sumFile.scobj());
|
||||
logger.debug("Fit: {}", bfPos);
|
||||
logger.debug("residual (m): {}", sumFile.scobj().subtract(bfPos).scalarMultiply(1000));
|
||||
logger.debug("");
|
||||
}
|
||||
|
||||
try (PrintWriter pw = new PrintWriter(inputFile)) {
|
||||
for (double t = interval.getBegin(); t < interval.getEnd(); t++) {
|
||||
|
||||
if (velocity == null) {
|
||||
double vx = -xVel.value(t);
|
||||
double vy = -yVel.value(t);
|
||||
double vz = -zVel.value(t);
|
||||
pw.printf("%.16e %.16e %.16e %.16e %.16e %.16e %.16e\n", t, -xPos.value(t), -yPos.value(t),
|
||||
-zPos.value(t), vx, vy, vz);
|
||||
} else {
|
||||
Vector3 thisVelocity = new Vector3(velocity);
|
||||
if (!velocityIsJ2000) {
|
||||
TDBTime tdb = new TDBTime(t);
|
||||
thisVelocity = bodyFixed.getPositionTransformation(J2000, tdb).mxv(velocity);
|
||||
}
|
||||
double vx = thisVelocity.getElt(0);
|
||||
double vy = thisVelocity.getElt(1);
|
||||
double vz = thisVelocity.getElt(2);
|
||||
pw.printf("%.16e %.16e %.16e %.16e %.16e %.16e %.16e\n", t, -xPos.value(t), -yPos.value(t),
|
||||
-zPos.value(t), vx, vy, vz);
|
||||
}
|
||||
}
|
||||
} catch (FileNotFoundException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
|
||||
|
||||
try (PrintWriter pw = new PrintWriter(basename + ".csv")) {
|
||||
pw.println("# Note: fit quantities are without light time or aberration corrections");
|
||||
pw.println("# SCOBJ");
|
||||
pw.println("# UTC, TDB, SumFile, SPICE (body fixed) x, y, z, SCOBJ (body fixed) x, y, z, SCOBJ (J2000) x," +
|
||||
" y, z, SCOBJ (Geometric J2000) x, y, z, Fit SCOBJ (body fixed) x, y, z, Fit SCOBJ (Geometric " +
|
||||
"J2000) x, y, z");
|
||||
for (Double t : sumFiles.keySet()) {
|
||||
SumFile sumFile = sumFiles.get(t);
|
||||
pw.printf("%s,", sumFile.utcString());
|
||||
pw.printf("%.3f, ", t);
|
||||
pw.printf("%s, ", FilenameUtils.getBaseName(sumFilenames.get(t)));
|
||||
|
||||
TDBTime tdb = new TDBTime(t);
|
||||
StateRecord sr = new StateRecord(observer, tdb, bodyFixed, new AberrationCorrection("LT+S"), target);
|
||||
|
||||
pw.printf("%s, ", sr.getPosition().getElt(0));
|
||||
pw.printf("%s, ", sr.getPosition().getElt(1));
|
||||
pw.printf("%s, ", sr.getPosition().getElt(2));
|
||||
|
||||
pw.printf("%s, ", sumFile.scobj().getX());
|
||||
pw.printf("%s, ", sumFile.scobj().getY());
|
||||
pw.printf("%s, ", sumFile.scobj().getZ());
|
||||
|
||||
Matrix33 j2000ToBodyFixed = J2000.getPositionTransformation(bodyFixed, tdb);
|
||||
Vector3 scobjJ2000 = j2000ToBodyFixed.mtxv(MathConversions.toVector3(sumFile.scobj()));
|
||||
pw.printf("%s, ", scobjJ2000.getElt(0));
|
||||
pw.printf("%s, ", scobjJ2000.getElt(1));
|
||||
pw.printf("%s, ", scobjJ2000.getElt(2));
|
||||
|
||||
Vector3 geometricScPos = geometricMap.get(t);
|
||||
pw.printf("%s, ", geometricScPos.getElt(0));
|
||||
pw.printf("%s, ", geometricScPos.getElt(1));
|
||||
pw.printf("%s, ", geometricScPos.getElt(2));
|
||||
|
||||
Vector3 fitScOBJJ2000 = new Vector3(xPos.value(t), yPos.value(t), zPos.value(t));
|
||||
Vector3 fitScOBJ = j2000ToBodyFixed.mxv(fitScOBJJ2000);
|
||||
pw.printf("%s, ", fitScOBJ.getElt(0));
|
||||
pw.printf("%s, ", fitScOBJ.getElt(1));
|
||||
pw.printf("%s, ", fitScOBJ.getElt(2));
|
||||
|
||||
pw.printf("%s, ", fitScOBJJ2000.getElt(0));
|
||||
pw.printf("%s, ", fitScOBJJ2000.getElt(1));
|
||||
pw.printf("%s, ", fitScOBJJ2000.getElt(2));
|
||||
pw.println();
|
||||
}
|
||||
pw.println("\n# Velocity");
|
||||
pw.println("# UTC, TDB, SumFile, SPICE (body fixed) x, y, z, SPICE (J2000) x, y, z, Fit (body fixed) x, " +
|
||||
"y, z, Fit (J2000) x, y, z");
|
||||
for (Double t : sumFiles.keySet()) {
|
||||
SumFile sumFile = sumFiles.get(t);
|
||||
pw.printf("%s,", sumFile.utcString());
|
||||
pw.printf("%.3f, ", t);
|
||||
pw.printf("%s, ", FilenameUtils.getBaseName(sumFilenames.get(t)));
|
||||
|
||||
TDBTime tdb = new TDBTime(t);
|
||||
|
||||
Matrix66 j2000ToBodyFixed = J2000.getStateTransformation(bodyFixed, tdb);
|
||||
StateRecord sr = new StateRecord(observer, tdb, J2000, new AberrationCorrection("NONE"), target);
|
||||
Vector3 velJ2000 = sr.getVelocity();
|
||||
Vector3 velBodyFixed = j2000ToBodyFixed.mxv(sr.getStateVector()).getVector3(1);
|
||||
|
||||
pw.printf("%s, ", velBodyFixed.getElt(0));
|
||||
pw.printf("%s, ", velBodyFixed.getElt(1));
|
||||
pw.printf("%s, ", velBodyFixed.getElt(2));
|
||||
|
||||
pw.printf("%s, ", velJ2000.getElt(0));
|
||||
pw.printf("%s, ", velJ2000.getElt(1));
|
||||
pw.printf("%s, ", velJ2000.getElt(2));
|
||||
|
||||
velJ2000 = new Vector3(xVel.value(t), yVel.value(t), zVel.value(t));
|
||||
if (velocity != null) {
|
||||
Vector3 thisVelocity = new Vector3(velocity);
|
||||
if (!velocityIsJ2000) {
|
||||
thisVelocity = bodyFixed.getPositionTransformation(J2000, tdb).mxv(velocity);
|
||||
}
|
||||
double vx = thisVelocity.getElt(0);
|
||||
double vy = thisVelocity.getElt(1);
|
||||
double vz = thisVelocity.getElt(2);
|
||||
velJ2000 = new Vector3(vx, vy, vz);
|
||||
}
|
||||
|
||||
StateVector stateJ2000 = new StateVector(new Vector3(xPos.value(t), yPos.value(t), zPos.value(t)),
|
||||
velJ2000);
|
||||
velBodyFixed = j2000ToBodyFixed.mxv(stateJ2000).getVector3(1);
|
||||
|
||||
pw.printf("%s, ", velBodyFixed.getElt(0));
|
||||
pw.printf("%s, ", velBodyFixed.getElt(1));
|
||||
pw.printf("%s, ", velBodyFixed.getElt(2));
|
||||
|
||||
pw.printf("%s, ", velJ2000.getElt(0));
|
||||
pw.printf("%s, ", velJ2000.getElt(1));
|
||||
pw.printf("%s, ", velJ2000.getElt(2));
|
||||
pw.println();
|
||||
}
|
||||
} catch (FileNotFoundException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
|
||||
return String.format("mkspk -setup %s -input %s -output %s.bsp", setupFile, inputFile, basename);
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(Option.builder("degree").hasArg().desc("Degree of polynomial used to fit sumFile locations" + "." + " Default is 2.").build());
|
||||
options.addOption(Option.builder("extend").hasArg().desc("Extend SPK past the last sumFile by <arg> seconds. "
|
||||
+ " Default is zero.").build());
|
||||
options.addOption(Option.builder("frame").hasArg().desc("Name of body fixed frame. This will default to the "
|
||||
+ "target's body fixed frame.").build());
|
||||
options.addOption(Option.builder("logFile").hasArg().desc("If present, save screen output to log file.").build());
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (StandardLevel l : StandardLevel.values())
|
||||
sb.append(String.format("%s ", l.name()));
|
||||
options.addOption(Option.builder("logLevel").hasArg().desc("If present, print messages above selected " +
|
||||
"priority. Valid values are " + sb.toString().trim() + ". Default is INFO.").build());
|
||||
options.addOption(Option.builder("observer").required().hasArg().desc("Required. SPICE ID for the observer.").build());
|
||||
options.addOption(Option.builder("sumFile").hasArg().required().desc("""
|
||||
File listing sumfiles to read. This is a text file,
|
||||
one per line. You can include an optional weight
|
||||
after each filename. The default weight is 1.0.
|
||||
Lines starting with # are ignored.
|
||||
|
||||
Example:
|
||||
|
||||
D717506120G0.SUM
|
||||
D717506126G0.SUM
|
||||
D717506127G0.SUM
|
||||
D717506128G0.SUM
|
||||
D717506129G0.SUM
|
||||
D717506131G0.SUM
|
||||
# Weight this last image less than the others
|
||||
D717506132G0.SUM 0.25
|
||||
""").build());
|
||||
options.addOption(Option.builder("spice").required().hasArgs().desc("Required. SPICE metakernel file " +
|
||||
"containing body fixed frame and spacecraft kernels. Can specify more than one kernel, separated by "
|
||||
+ "whitespace.").build());
|
||||
options.addOption(Option.builder("target").required().hasArg().desc("Required. SPICE ID for the target.").build());
|
||||
options.addOption(Option.builder("velocity").hasArgs().desc("Spacecraft velocity relative to target in the " + "body fixed frame. If present, use this fixed velocity in the MKSPK input file. Default is to " + "take the derivative of the fit position. Specify as three floating point values in km/sec," + "separated by whitespace.").build());
|
||||
options.addOption(Option.builder("velocityJ2000").desc("If present, argument to -velocity is in J2000 frame. "
|
||||
+ " Ignored if -velocity is not set.").build()); return options;
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws SpiceException {
|
||||
TerrasaurTool defaultOBJ = new SPKFromSumFile();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
NativeLibraryLoader.loadSpiceLibraries();
|
||||
|
||||
|
||||
final double extend = cl.hasOption("extend") ? Double.parseDouble(cl.getOptionValue("extend")) : 0;
|
||||
|
||||
for (String kernel : cl.getOptionValues("spice"))
|
||||
KernelDatabase.load(kernel);
|
||||
|
||||
Body observer = new Body(cl.getOptionValue("observer"));
|
||||
Body target = new Body(cl.getOptionValue("target"));
|
||||
|
||||
ReferenceFrame bodyFixed = null;
|
||||
if (cl.hasOption("frame")) {
|
||||
bodyFixed = new ReferenceFrame(cl.getOptionValue("frame"));
|
||||
} else {
|
||||
String keyword = String.format("OBJECT_%d_FRAME", target.getIDCode());
|
||||
if (KernelPool.exists(keyword)) {
|
||||
bodyFixed = new ReferenceFrame(KernelPool.getCharacter(keyword)[0]);
|
||||
} else {
|
||||
logger.error("No keyword {} in kernel pool and -frame was not specified!", keyword);
|
||||
}
|
||||
}
|
||||
|
||||
final int degree = cl.hasOption("degree") ? Integer.parseInt(cl.getOptionValue("degree")) : 2;
|
||||
Vector3 velocity = null;
|
||||
if (cl.hasOption("velocity")) {
|
||||
String[] parts = cl.getOptionValues("velocity");
|
||||
if (parts.length == 3) {
|
||||
double x = Double.parseDouble(parts[0]);
|
||||
double y = Double.parseDouble(parts[1]);
|
||||
double z = Double.parseDouble(parts[2]);
|
||||
velocity = new Vector3(x, y, z);
|
||||
}
|
||||
}
|
||||
boolean velocityJ2000 = cl.hasOption("velocityJ2000");
|
||||
|
||||
Map<String, Double> weightMap = new HashMap<>();
|
||||
String first = null;
|
||||
String last = null;
|
||||
try {
|
||||
List<String> lines = FileUtils.readLines(new File(cl.getOptionValue("sumFile")), Charset.defaultCharset());
|
||||
for (String line : lines) {
|
||||
if (line.strip().startsWith("#")) continue;
|
||||
String[] parts = line.strip().split("\\s+");
|
||||
double weight = 1.0;
|
||||
if (parts.length > 1) weight = Double.parseDouble(parts[1]);
|
||||
weightMap.put(parts[0], weight);
|
||||
if (first == null) first = parts[0];
|
||||
last = parts[0];
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
SPKFromSumFile app = new SPKFromSumFile(observer, target, bodyFixed, weightMap, extend);
|
||||
String basename = String.format("%s_%s", FilenameUtils.getBaseName(first), FilenameUtils.getBaseName(last));
|
||||
List<String> comments = new ArrayList<>();
|
||||
String command = app.writeMKSPKFiles(basename, comments, degree, velocity, velocityJ2000);
|
||||
|
||||
logger.info("Generate new SPK:\n\t{}", command);
|
||||
|
||||
logger.info("Finished.");
|
||||
}
|
||||
|
||||
}
|
||||
508
src/main/java/terrasaur/apps/ShapeFormatConverter.java
Normal file
@@ -0,0 +1,508 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.PrintWriter;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import org.apache.commons.math3.geometry.euclidean.threed.Rotation;
|
||||
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
|
||||
import org.apache.commons.math3.linear.Array2DRowRealMatrix;
|
||||
import org.apache.commons.math3.linear.LUDecomposition;
|
||||
import org.apache.commons.math3.linear.RealMatrix;
|
||||
import org.apache.commons.math3.linear.SingularValueDecomposition;
|
||||
import org.apache.commons.math3.util.Pair;
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import spice.basic.Matrix33;
|
||||
import spice.basic.Vector3;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.*;
|
||||
import terrasaur.utils.ImmutableSBMTEllipseRecord.Builder;
|
||||
import terrasaur.utils.math.MathConversions;
|
||||
import terrasaur.utils.math.RotationUtils;
|
||||
import vtk.vtkPoints;
|
||||
import vtk.vtkPolyData;
|
||||
|
||||
public class ShapeFormatConverter implements TerrasaurTool {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Transform a shape model to a new coordinate system.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
|
||||
String header = "";
|
||||
String footer =
|
||||
"This program will rotate, translate, and/or scale a shape model. It can additionally transform a "
|
||||
+ "single point, a sum file, or an SBMT ellipse file. For a sum file, the SCOBJ vector is "
|
||||
+ "transformed and the cx, cy, cz, and sz vectors are rotated. For SBMT ellipse files, only "
|
||||
+ "center of the ellipse is transformed. The size, orientation, and all other fields in the "
|
||||
+ "file are unchanged.";
|
||||
return TerrasaurTool.super.fullDescription(options, header, footer);
|
||||
}
|
||||
|
||||
private enum COORDTYPE {
|
||||
LATLON,
|
||||
XYZ,
|
||||
POLYDATA
|
||||
}
|
||||
|
||||
private enum FORMATS {
|
||||
ICQ,
|
||||
LLR,
|
||||
OBJ,
|
||||
PDS,
|
||||
PLT,
|
||||
PLY,
|
||||
STL,
|
||||
VTK,
|
||||
FITS,
|
||||
SUM,
|
||||
SBMT
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(
|
||||
Option.builder("centerOfRotation")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Subtract this point before applying rotation matrix, add back after. "
|
||||
+ "Specify by three floating point numbers separated by commas. If not present default is (0,0,0).")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("decimate")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Reduce the number of facets in a shape model. The argument should be between 0 and 1. "
|
||||
+ "For example, if a model has 100 facets and the argument to -decimate is 0.90, "
|
||||
+ "there will be approximately 10 facets after the decimation.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("input")
|
||||
.required()
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Required. Name of shape model to transform. Extension must be icq, fits, llr, obj, pds, plt, ply, sbmt, stl, sum, or vtk. "
|
||||
+ "Alternately transform a single point using three floating point numbers separated "
|
||||
+ "by commas to specify XYZ coordinates, or latitude, longitude in degrees separated by commas. "
|
||||
+ "Transformed point will be written to stdout in the same format as the input string.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("inputFormat")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Format of input file. If not present format will be inferred from inputFile extension.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("output")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Required for all but single point input. Name of transformed file. "
|
||||
+ "Extension must be obj, plt, sbmt, stl, sum, or vtk.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("outputFormat")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Format of output file. If not present format will be inferred from outputFile extension.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("register")
|
||||
.hasArg()
|
||||
.desc("Use SVD to transform input file to best align with register file.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("rotate")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Rotate surface points and spacecraft position. "
|
||||
+ "Specify by an angle (degrees) and a 3 element rotation axis vector (XYZ) "
|
||||
+ "separated by commas.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("rotateToPrincipalAxes")
|
||||
.desc("Rotate body to align along its principal axes of inertia.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("scale")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Scale the shape model by <arg>. This can either be one value or three "
|
||||
+ "separated by commas. One value scales all three axes uniformly, "
|
||||
+ "three values scale the x, y, and z axes respectively. For example, "
|
||||
+ "-scale 0.5,0.25,1.5 scales the model in the x dimension by 0.5, the "
|
||||
+ "y dimension by 0.25, the z dimension by 1.5.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("translate")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Translate surface points and spacecraft position. "
|
||||
+ "Specify by three floating point numbers separated by commas.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("translateToCenter")
|
||||
.desc("Translate body so that its center of mass is at the origin.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("transform")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Translate and rotate surface points and spacecraft position. "
|
||||
+ "Specify a file containing a 4x4 combined translation/rotation matrix. The top left 3x3 matrix "
|
||||
+ "is the rotation matrix. The top three entries in the right hand column are the translation "
|
||||
+ "vector. The bottom row is always 0 0 0 1.")
|
||||
.build());
|
||||
return options;
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
TerrasaurTool defaultOBJ = new ShapeFormatConverter();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
NativeLibraryLoader.loadSpiceLibraries();
|
||||
NativeLibraryLoader.loadVtkLibraries();
|
||||
|
||||
String filename = cl.getOptionValue("input");
|
||||
COORDTYPE coordType = COORDTYPE.POLYDATA;
|
||||
vtkPolyData polydata = null;
|
||||
SumFile sumFile = null;
|
||||
List<SBMTEllipseRecord> sbmtEllipse = null;
|
||||
|
||||
String extension = null;
|
||||
if (cl.hasOption("inputFormat")) {
|
||||
try {
|
||||
extension =
|
||||
FORMATS.valueOf(cl.getOptionValue("inputFormat").toUpperCase()).name().toLowerCase();
|
||||
} catch (IllegalArgumentException e) {
|
||||
logger.warn("Unsupported -inputFormat {}", cl.getOptionValue("inputFormat"));
|
||||
}
|
||||
}
|
||||
if (extension == null) extension = FilenameUtils.getExtension(filename).toLowerCase();
|
||||
switch (extension) {
|
||||
case "icq", "llr", "obj", "pds", "plt", "ply", "stl", "vtk" ->
|
||||
polydata = PolyDataUtil.loadShapeModel(filename, extension);
|
||||
case "fits" -> polydata = PolyDataUtil.loadFITShapeModel(filename);
|
||||
case "sum" -> {
|
||||
List<String> lines = FileUtils.readLines(new File(filename), Charset.defaultCharset());
|
||||
sumFile = SumFile.fromLines(lines);
|
||||
}
|
||||
case "sbmt" -> {
|
||||
sbmtEllipse = new ArrayList<>();
|
||||
vtkPoints points = new vtkPoints();
|
||||
polydata = new vtkPolyData();
|
||||
polydata.SetPoints(points);
|
||||
for (String line : FileUtils.readLines(new File(filename), Charset.defaultCharset())) {
|
||||
SBMTEllipseRecord record = SBMTEllipseRecord.fromString(line);
|
||||
sbmtEllipse.add(record);
|
||||
points.InsertNextPoint(record.x(), record.y(), record.z());
|
||||
}
|
||||
}
|
||||
default -> {
|
||||
// Single point
|
||||
String[] params = filename.split(",");
|
||||
vtkPoints points = new vtkPoints();
|
||||
polydata = new vtkPolyData();
|
||||
polydata.SetPoints(points);
|
||||
if (params.length == 2) {
|
||||
double[] array =
|
||||
new Vector3D(
|
||||
Math.toRadians(Double.parseDouble(params[0].trim())),
|
||||
Math.toRadians(Double.parseDouble(params[1].trim())))
|
||||
.toArray();
|
||||
points.InsertNextPoint(array);
|
||||
coordType = COORDTYPE.LATLON;
|
||||
} else if (params.length == 3) {
|
||||
double[] array = new double[3];
|
||||
for (int i = 0; i < 3; i++) array[i] = Double.parseDouble(params[i].trim());
|
||||
points.InsertNextPoint(array);
|
||||
coordType = COORDTYPE.XYZ;
|
||||
} else {
|
||||
logger.error(
|
||||
"Can't read input shape model {} with format {}", filename, extension.toUpperCase());
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (cl.hasOption("decimate") && polydata != null) {
|
||||
double reduction = Double.parseDouble(cl.getOptionValue("decimate"));
|
||||
if (reduction < 0) {
|
||||
logger.printf(Level.WARN, "Argument to -decimate is %.f! Setting to zero.", reduction);
|
||||
reduction = 0;
|
||||
}
|
||||
if (reduction > 1) {
|
||||
logger.printf(Level.WARN, "Argument to -decimate is %.f! Setting to one.", reduction);
|
||||
reduction = 1;
|
||||
}
|
||||
PolyDataUtil.decimatePolyData(polydata, reduction);
|
||||
}
|
||||
|
||||
if (coordType == COORDTYPE.POLYDATA && !cl.hasOption("output")) {
|
||||
logger.error(String.format("No output file specified for input file %s", filename));
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
Vector3 centerOfRotation = null;
|
||||
Matrix33 rotation = null;
|
||||
Vector3 translation = null;
|
||||
Vector3 scale = new Vector3(1., 1., 1.);
|
||||
for (Option option : cl.getOptions()) {
|
||||
if (option.getOpt().equals("centerOfRotation"))
|
||||
centerOfRotation =
|
||||
MathConversions.toVector3(
|
||||
VectorUtils.stringToVector3D(cl.getOptionValue("centerOfRotation")));
|
||||
|
||||
if (option.getOpt().equals("rotate"))
|
||||
rotation =
|
||||
MathConversions.toMatrix33(RotationUtils.stringToRotation(cl.getOptionValue("rotate")));
|
||||
|
||||
if (option.getOpt().equals("scale")) {
|
||||
String scaleString = cl.getOptionValue("scale");
|
||||
if (scaleString.contains(",")) {
|
||||
scale = MathConversions.toVector3(VectorUtils.stringToVector3D(scaleString));
|
||||
} else {
|
||||
scale = scale.scale(Double.parseDouble(scaleString));
|
||||
}
|
||||
}
|
||||
|
||||
if (option.getOpt().equals("translate"))
|
||||
translation =
|
||||
MathConversions.toVector3(VectorUtils.stringToVector3D(cl.getOptionValue("translate")));
|
||||
|
||||
if (option.getOpt().equals("transform")) {
|
||||
List<String> lines =
|
||||
FileUtils.readLines(new File(cl.getOptionValue("transform")), Charset.defaultCharset());
|
||||
Pair<Vector3D, Rotation> pair = RotationUtils.stringToTransform(lines);
|
||||
translation = MathConversions.toVector3(pair.getKey());
|
||||
rotation = MathConversions.toMatrix33(pair.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
if (cl.hasOption("rotateToPrincipalAxes")) {
|
||||
if (polydata != null) {
|
||||
PolyDataStatistics stats = new PolyDataStatistics(polydata);
|
||||
if (stats.isClosed()) {
|
||||
ArrayList<double[]> axes = stats.getPrincipalAxes();
|
||||
// make X primary, Y secondary
|
||||
rotation = new Matrix33(new Vector3(axes.get(0)), 1, new Vector3(axes.get(1)), 2);
|
||||
} else {
|
||||
logger.warn("Shape is not closed, cannot determine principal axes.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (cl.hasOption("register")) {
|
||||
String register = cl.getOptionValue("register");
|
||||
vtkPolyData registeredPolydata = null;
|
||||
extension = FilenameUtils.getExtension(register).toLowerCase();
|
||||
if (extension.equals("llr")
|
||||
|| extension.equals("obj")
|
||||
|| extension.equals("pds")
|
||||
|| extension.equals("plt")
|
||||
|| extension.equals("ply")
|
||||
|| extension.equals("stl")
|
||||
|| extension.equals("vtk")) {
|
||||
registeredPolydata = PolyDataUtil.loadShapeModelAndComputeNormals(register);
|
||||
} else {
|
||||
logger.error(String.format("Can't read input shape model for registration: %s", register));
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
if (registeredPolydata != null) {
|
||||
Vector3D centerA = PolyDataUtil.computePolyDataCentroid(polydata);
|
||||
Vector3D centerB = PolyDataUtil.computePolyDataCentroid(registeredPolydata);
|
||||
|
||||
vtkPoints points = polydata.GetPoints();
|
||||
double[][] pointsA = new double[(int) points.GetNumberOfPoints()][3];
|
||||
for (int i = 0; i < points.GetNumberOfPoints(); i++)
|
||||
pointsA[i] = new Vector3D(points.GetPoint(i)).subtract(centerA).toArray();
|
||||
points = registeredPolydata.GetPoints();
|
||||
|
||||
if (points.GetNumberOfPoints() != polydata.GetPoints().GetNumberOfPoints()) {
|
||||
logger.error("registered polydata does not have the same number of points as input.");
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
double[][] pointsB = new double[(int) points.GetNumberOfPoints()][3];
|
||||
for (int i = 0; i < points.GetNumberOfPoints(); i++)
|
||||
pointsB[i] = new Vector3D(points.GetPoint(i)).subtract(centerB).toArray();
|
||||
|
||||
double[][] H = new double[3][3];
|
||||
for (int ii = 0; ii < points.GetNumberOfPoints(); ii++) {
|
||||
for (int i = 0; i < 3; i++) {
|
||||
for (int j = 0; j < 3; j++) {
|
||||
H[i][j] += pointsA[ii][i] * pointsB[ii][j];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
RealMatrix pointMatrix = new Array2DRowRealMatrix(H);
|
||||
|
||||
SingularValueDecomposition svd = new SingularValueDecomposition(pointMatrix);
|
||||
RealMatrix uT = svd.getUT();
|
||||
RealMatrix v = svd.getV();
|
||||
RealMatrix R = v.multiply(uT);
|
||||
|
||||
if (new LUDecomposition(R).getDeterminant() < 0) {
|
||||
for (int i = 0; i < 3; i++) {
|
||||
R.multiplyEntry(i, 2, -1);
|
||||
}
|
||||
}
|
||||
rotation = new Matrix33(R.getData());
|
||||
translation = MathConversions.toVector3(centerB);
|
||||
translation = translation.sub(rotation.mxv(MathConversions.toVector3(centerA)));
|
||||
}
|
||||
}
|
||||
|
||||
if (sumFile != null) {
|
||||
if (rotation != null && translation != null)
|
||||
sumFile.transform(
|
||||
MathConversions.toVector3D(translation), MathConversions.toRotation(rotation));
|
||||
} else {
|
||||
|
||||
Vector3 center;
|
||||
if (polydata.GetNumberOfPoints() > 1) {
|
||||
PolyDataStatistics stats = new PolyDataStatistics(polydata);
|
||||
center = new Vector3(stats.getCentroid());
|
||||
} else {
|
||||
center = new Vector3(polydata.GetPoint(0));
|
||||
}
|
||||
if (cl.hasOption("translateToCenter")) translation = center.negate();
|
||||
|
||||
double[] values = new double[3];
|
||||
for (int j = 0; j < 3; j++) values[j] = center.getElt(j) * scale.getElt(j);
|
||||
Vector3 scaledCenter = new Vector3(values);
|
||||
|
||||
vtkPoints points = polydata.GetPoints();
|
||||
for (int i = 0; i < points.GetNumberOfPoints(); i++) {
|
||||
Vector3 thisPoint = new Vector3(points.GetPoint(i));
|
||||
thisPoint = thisPoint.sub(center);
|
||||
for (int j = 0; j < 3; j++) values[j] = thisPoint.getElt(j) * scale.getElt(j);
|
||||
thisPoint = new Vector3(values);
|
||||
thisPoint = thisPoint.add(scaledCenter);
|
||||
|
||||
if (rotation != null) {
|
||||
if (centerOfRotation == null) centerOfRotation = new Vector3();
|
||||
/*-
|
||||
else {
|
||||
System.out.printf("Center of rotation:\n%s\n", centerOfRotation.toString());
|
||||
System.out.printf("-centerOfRotation %f,%f,%f\n", centerOfRotation.getElt(0),
|
||||
centerOfRotation.getElt(1), centerOfRotation.getElt(2));
|
||||
}
|
||||
*/
|
||||
thisPoint = rotation.mxv(thisPoint.sub(centerOfRotation)).add(centerOfRotation);
|
||||
}
|
||||
if (translation != null) thisPoint = thisPoint.add(translation);
|
||||
points.SetPoint(i, thisPoint.toArray());
|
||||
}
|
||||
}
|
||||
|
||||
/*-
|
||||
if (rotation != null) {
|
||||
AxisAndAngle aaa = new AxisAndAngle(rotation);
|
||||
System.out.printf("Rotation:\n%s\n", rotation.toString());
|
||||
System.out.printf("-rotate %.5e,%.5e,%.5e,%.5e\n", Math.toDegrees(aaa.getAngle()),
|
||||
aaa.getAxis().getElt(0), aaa.getAxis().getElt(1), aaa.getAxis().getElt(2));
|
||||
}
|
||||
|
||||
if (translation != null) {
|
||||
System.out.printf("Translation:\n%s\n", translation.toString());
|
||||
System.out.printf("-translate %.5e,%.5e,%.5e\n", translation.getElt(0), translation.getElt(1),
|
||||
translation.getElt(2));
|
||||
}
|
||||
*/
|
||||
|
||||
if (coordType == COORDTYPE.LATLON) {
|
||||
double[] pt = new double[3];
|
||||
polydata.GetPoint(0, pt);
|
||||
Vector3D point = new Vector3D(pt);
|
||||
double lon = Math.toDegrees(point.getAlpha());
|
||||
if (lon < 0) lon += 360;
|
||||
System.out.printf("%.16f,%.16f\n", Math.toDegrees(point.getDelta()), lon);
|
||||
} else if (coordType == COORDTYPE.XYZ) {
|
||||
double[] pt = new double[3];
|
||||
polydata.GetPoint(0, pt);
|
||||
System.out.printf("%.16f,%.16f,%.16f\n", pt[0], pt[1], pt[2]);
|
||||
} else {
|
||||
filename = cl.getOptionValue("output");
|
||||
extension = null;
|
||||
if (cl.hasOption("outputFormat")) {
|
||||
try {
|
||||
extension =
|
||||
FORMATS.valueOf(cl.getOptionValue("outputFormat").toUpperCase()).name().toLowerCase();
|
||||
} catch (IllegalArgumentException e) {
|
||||
logger.warn("Unsupported -outputFormat {}", cl.getOptionValue("outputFormat"));
|
||||
}
|
||||
}
|
||||
if (extension == null) extension = FilenameUtils.getExtension(filename).toLowerCase();
|
||||
|
||||
switch (extension) {
|
||||
case "vtk" -> PolyDataUtil.saveShapeModelAsVTK(polydata, filename);
|
||||
case "obj" -> PolyDataUtil.saveShapeModelAsOBJ(polydata, filename);
|
||||
case "plt" -> PolyDataUtil.saveShapeModelAsPLT(polydata, filename);
|
||||
case "stl" -> PolyDataUtil.saveShapeModelAsSTL(polydata, filename);
|
||||
case "sum" -> {
|
||||
try (PrintWriter pw = new PrintWriter(filename)) {
|
||||
pw.print(sumFile.toString());
|
||||
}
|
||||
}
|
||||
case "sbmt" -> {
|
||||
if (sbmtEllipse == null) {
|
||||
logger.error("No input SBMT ellipse specified!");
|
||||
System.exit(0);
|
||||
}
|
||||
double[] pt = new double[3];
|
||||
List<SBMTEllipseRecord> transformedRecords = new ArrayList<>();
|
||||
for (SBMTEllipseRecord record : sbmtEllipse) {
|
||||
polydata.GetPoint(0, pt);
|
||||
Vector3D point = new Vector3D(pt);
|
||||
double lon = Math.toDegrees(point.getAlpha());
|
||||
if (lon < 0) lon += 360;
|
||||
Builder builder = ImmutableSBMTEllipseRecord.builder().from(record);
|
||||
builder.x(point.getX());
|
||||
builder.y(point.getY());
|
||||
builder.z(point.getZ());
|
||||
builder.lat(Math.toDegrees(point.getDelta()));
|
||||
builder.lon(lon);
|
||||
builder.radius(point.getNorm());
|
||||
|
||||
transformedRecords.add(builder.build());
|
||||
}
|
||||
try (PrintWriter pw = new PrintWriter(filename)) {
|
||||
for (SBMTEllipseRecord record : transformedRecords) pw.println(record.toString());
|
||||
}
|
||||
}
|
||||
default -> {
|
||||
logger.error(
|
||||
"Can't write output shape model {} with format {}",
|
||||
filename,
|
||||
extension.toUpperCase());
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
logger.info("Wrote {}", filename);
|
||||
}
|
||||
}
|
||||
}
|
||||
461
src/main/java/terrasaur/apps/SumFilesFromFlyby.java
Normal file
@@ -0,0 +1,461 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
import net.jafama.FastMath;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import org.apache.commons.math3.geometry.euclidean.threed.Rotation;
|
||||
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
|
||||
import org.apache.commons.text.WordUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.spi.StandardLevel;
|
||||
import picante.math.intervals.IntervalSet;
|
||||
import picante.math.intervals.UnwritableInterval;
|
||||
import picante.time.TimeConversion;
|
||||
import spice.basic.KernelDatabase;
|
||||
import spice.basic.SpiceException;
|
||||
import spice.basic.SpiceQuaternion;
|
||||
import spice.basic.TDBTime;
|
||||
import spice.basic.Vector3;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.*;
|
||||
import terrasaur.utils.ImmutableSumFile.Builder;
|
||||
import terrasaur.utils.math.MathConversions;
|
||||
import terrasaur.utils.math.RotationUtils;
|
||||
import terrasaur.utils.spice.SpiceBundle;
|
||||
|
||||
public class SumFilesFromFlyby implements TerrasaurTool {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Create sumfiles for a simplified flyby.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
|
||||
String header = "";
|
||||
String footer =
|
||||
"""
|
||||
|
||||
This tool creates sumfiles at points along a straight line trajectory past a body to be imaged.
|
||||
|
||||
Assumptions made:
|
||||
1) The flyby is entirely in the equatorial (XY) plane.
|
||||
2) the Sun lies along the body-fixed X axis.
|
||||
3) the flyby happens on the eastern side of the body.
|
||||
4) Rotation and orbital motion of the body are ignored.
|
||||
5) Image (0,0) is at the upper left. Body north is "up".
|
||||
|
||||
Given these assumptions, the trajectory can be specified using closest approach distance and phase along with speed.
|
||||
""";
|
||||
return TerrasaurTool.super.fullDescription(options, header, footer);
|
||||
}
|
||||
|
||||
private SumFile sumfile;
|
||||
private Function<Double, Vector3D> scPosFunc;
|
||||
private Function<Double, Vector3D> scVelFunc;
|
||||
|
||||
private SumFilesFromFlyby() {}
|
||||
|
||||
public SumFilesFromFlyby(SumFile sumfile, double distance, double phase, double speed) {
|
||||
this.sumfile = sumfile;
|
||||
|
||||
// given phase angle p, closest approach point is (cos p, sin p)
|
||||
Vector3D closestApproach =
|
||||
new Vector3D(FastMath.cos(phase), FastMath.sin(phase), 0.).scalarMultiply(distance);
|
||||
Vector3D velocity =
|
||||
new Vector3D(-FastMath.sin(phase), FastMath.cos(phase), 0.).scalarMultiply(speed);
|
||||
|
||||
/*-
|
||||
* Assumptions:
|
||||
*
|
||||
* Sun lies along the X axis
|
||||
* flyby is in the equatorial (XY) plane
|
||||
*
|
||||
*/
|
||||
scPosFunc = t -> closestApproach.add(velocity.scalarMultiply(t));
|
||||
|
||||
scVelFunc = t -> velocity;
|
||||
}
|
||||
|
||||
public SumFile getSumFile(double t) {
|
||||
|
||||
TimeConversion tc = TimeConversion.createUsingInternalConstants();
|
||||
|
||||
Builder builder = ImmutableSumFile.builder().from(sumfile);
|
||||
double imageTime = t + tc.utcStringToTDB(sumfile.utcString());
|
||||
builder.picnm(String.format("%s%d", sumfile.picnm(), (int) Math.round(imageTime)));
|
||||
builder.utcString(tc.format("C").apply(imageTime));
|
||||
|
||||
Vector3D scPos = scPosFunc.apply(t);
|
||||
|
||||
builder.scobj(scPos.negate());
|
||||
|
||||
Rotation bodyFixedToCamera = RotationUtils.KprimaryJsecondary(scPos.negate(), Vector3D.MINUS_K);
|
||||
builder.cx(bodyFixedToCamera.applyInverseTo(Vector3D.PLUS_I));
|
||||
builder.cy(bodyFixedToCamera.applyInverseTo(Vector3D.PLUS_J));
|
||||
builder.cz(bodyFixedToCamera.applyInverseTo(Vector3D.PLUS_K));
|
||||
|
||||
builder.sz(Vector3D.PLUS_I.scalarMultiply(1e8));
|
||||
|
||||
SumFile s = builder.build();
|
||||
|
||||
logger.info(
|
||||
"{}: S/C position {}, phase {}",
|
||||
s.utcString(),
|
||||
s.scobj().negate(),
|
||||
Math.toDegrees(Vector3D.angle(s.scobj().negate(), s.sz())));
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
private String writeMSOPCKFiles(
|
||||
String basename, IntervalSet intervals, int frameID, SpiceBundle bundle)
|
||||
throws SpiceException {
|
||||
|
||||
File commentFile = new File(basename + "_msopck-comments.txt");
|
||||
if (commentFile.exists()) commentFile.delete();
|
||||
String setupFile = basename + "_msopck.setup";
|
||||
String inputFile = basename + "_msopck.inp";
|
||||
|
||||
try (PrintWriter pw = new PrintWriter(commentFile)) {
|
||||
|
||||
String allComments = "";
|
||||
for (String comment : allComments.split("\\r?\\n")) pw.println(WordUtils.wrap(comment, 80));
|
||||
} catch (FileNotFoundException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
|
||||
File fk = bundle.findKernel(String.format(".*%s\\.tf", basename));
|
||||
File lsk = bundle.findKernel(".*naif[0-9]{4}\\.tls");
|
||||
|
||||
Map<String, String> map = new TreeMap<>();
|
||||
map.put("LSK_FILE_NAME", "'" + lsk + "'");
|
||||
map.put("MAKE_FAKE_SCLK", String.format("'%s.tsc'", basename));
|
||||
map.put("CK_TYPE", "3");
|
||||
map.put("COMMENTS_FILE_NAME", String.format("'%s'", commentFile.getPath()));
|
||||
map.put("INSTRUMENT_ID", Integer.toString(frameID));
|
||||
map.put("REFERENCE_FRAME_NAME", "'J2000'");
|
||||
map.put("FRAMES_FILE_NAME", "'" + fk.getPath() + "'");
|
||||
map.put("ANGULAR_RATE_PRESENT", "'MAKE UP/NO AVERAGING'");
|
||||
map.put("INPUT_TIME_TYPE", "'UTC'");
|
||||
map.put("INPUT_DATA_TYPE", "'SPICE QUATERNIONS'");
|
||||
map.put("PRODUCER_ID", "'Hari.Nair@jhuapl.edu'");
|
||||
|
||||
try (PrintWriter pw = new PrintWriter(setupFile)) {
|
||||
pw.println("\\begindata");
|
||||
for (String key : map.keySet()) {
|
||||
pw.printf("%s = %s\n", key, map.get(key));
|
||||
}
|
||||
} catch (FileNotFoundException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
|
||||
NavigableMap<Double, SpiceQuaternion> attitudeMap = new TreeMap<>();
|
||||
|
||||
double t0 = bundle.getTimeConversion().utcStringToTDB(sumfile.utcString());
|
||||
|
||||
for (UnwritableInterval interval : intervals) {
|
||||
for (double t = interval.getBegin(); t < interval.getEnd(); t += interval.getLength() / 100) {
|
||||
|
||||
double imageTime = t + t0;
|
||||
|
||||
Vector3D scPos = scPosFunc.apply(t);
|
||||
SpiceQuaternion q =
|
||||
new SpiceQuaternion(
|
||||
MathConversions.toMatrix33(
|
||||
RotationUtils.KprimaryJsecondary(scPos.negate(), Vector3D.MINUS_K)));
|
||||
|
||||
attitudeMap.put(imageTime, q);
|
||||
}
|
||||
}
|
||||
|
||||
try (PrintWriter pw = new PrintWriter(new FileWriter(inputFile))) {
|
||||
for (double t : attitudeMap.keySet()) {
|
||||
SpiceQuaternion q = attitudeMap.get(t);
|
||||
Vector3 v = q.getVector();
|
||||
pw.printf(
|
||||
"%s %.14e %.14e %.14e %.14e\n",
|
||||
new TDBTime(t).toUTCString("ISOC", 6),
|
||||
q.getScalar(),
|
||||
v.getElt(0),
|
||||
v.getElt(1),
|
||||
v.getElt(2));
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
|
||||
return String.format("msopck %s %s %s.bc", setupFile, inputFile, basename);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param basename file basename
|
||||
* @param intervals time intervals
|
||||
* @param centerID NAIF id of center body
|
||||
* @param bundle SPICE bundle
|
||||
* @return command to run MKSPK
|
||||
*/
|
||||
private String writeMKSPKFiles(
|
||||
String basename, IntervalSet intervals, int centerID, SpiceBundle bundle) {
|
||||
|
||||
String commentFile = basename + "_mkspk-comments.txt";
|
||||
String setupFile = basename + "_mkspk.setup";
|
||||
String inputFile = basename + "_mkspk.inp";
|
||||
|
||||
try (PrintWriter pw = new PrintWriter(commentFile)) {
|
||||
|
||||
String allComments = "";
|
||||
for (String comment : allComments.split("\\r?\\n")) pw.println(WordUtils.wrap(comment, 80));
|
||||
} catch (FileNotFoundException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
|
||||
File lsk = bundle.findKernel(".*naif[0-9]{4}.tls");
|
||||
|
||||
Map<String, String> map = new TreeMap<>();
|
||||
map.put("INPUT_DATA_TYPE", "'STATES'");
|
||||
map.put("OUTPUT_SPK_TYPE", "13"); // hermite polynomial, unevenly spaced in time
|
||||
map.put("OBJECT_ID", "-999");
|
||||
map.put("CENTER_ID", String.format("%d", centerID));
|
||||
map.put("COMMENT_FILE", String.format("'%s'", commentFile));
|
||||
map.put("REF_FRAME_NAME", "'J2000'");
|
||||
map.put("PRODUCER_ID", "'Hari.Nair@jhuapl.edu'");
|
||||
map.put("DATA_ORDER", "'EPOCH X Y Z VX VY VZ'");
|
||||
map.put("DATA_DELIMITER", "' '");
|
||||
map.put("LEAPSECONDS_FILE", String.format("'%s'", lsk));
|
||||
map.put("TIME_WRAPPER", "'# ETSECONDS'");
|
||||
map.put("POLYNOM_DEGREE", "7");
|
||||
map.put("SEGMENT_ID", "'SPK_STATES_13'");
|
||||
map.put("LINES_PER_RECORD", "1");
|
||||
try (PrintWriter pw = new PrintWriter(setupFile)) {
|
||||
pw.println("\\begindata");
|
||||
for (String key : map.keySet()) {
|
||||
pw.printf("%s = %s\n", key, map.get(key));
|
||||
}
|
||||
} catch (FileNotFoundException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
|
||||
double t0 = bundle.getTimeConversion().utcStringToTDB(sumfile.utcString());
|
||||
|
||||
try (PrintWriter pw = new PrintWriter(inputFile)) {
|
||||
for (UnwritableInterval interval : intervals) {
|
||||
for (double t = interval.getBegin();
|
||||
t < interval.getEnd();
|
||||
t += interval.getLength() / 100) {
|
||||
|
||||
Vector3D scPos = scPosFunc.apply(t);
|
||||
Vector3D scVel = scVelFunc.apply(t);
|
||||
pw.printf(
|
||||
"%.16e %.16e %.16e %.16e %.16e %.16e %.16e\n",
|
||||
t + t0,
|
||||
scPos.getX(),
|
||||
scPos.getY(),
|
||||
scPos.getZ(),
|
||||
scVel.getX(),
|
||||
scVel.getY(),
|
||||
scVel.getZ());
|
||||
}
|
||||
}
|
||||
} catch (FileNotFoundException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
return String.format(
|
||||
"mkspk -setup %s -input %s -output %s.bsp", setupFile, inputFile, basename);
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(
|
||||
Option.builder("distance")
|
||||
.hasArg()
|
||||
.required()
|
||||
.desc("Required. Flyby distance from body center in km.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("logFile")
|
||||
.hasArg()
|
||||
.desc("If present, save screen output to log file.")
|
||||
.build());
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (StandardLevel l : StandardLevel.values()) sb.append(String.format("%s ", l.name()));
|
||||
options.addOption(
|
||||
Option.builder("logLevel")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"If present, print messages above selected priority. Valid values are "
|
||||
+ sb.toString().trim()
|
||||
+ ". Default is INFO.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("mk")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Path to NAIF metakernel. This should contain LSK, FK for the central body, and FK for the spacecraft. This is used by -mkspk and -msopck.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("mkspk")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"If present, create input files for MKSPK. The argument is the NAIF id for the central body (e.g. 10 for the Sun). This option requires -lsk.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("msopck")
|
||||
.desc("If present, create input files for MSOPCK. This option requires -lsk.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("phase")
|
||||
.hasArg()
|
||||
.required()
|
||||
.desc("Required. Phase angle at closest approach in degrees.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("speed")
|
||||
.hasArg()
|
||||
.required()
|
||||
.desc("Required. Flyby speed at closest approach in km/s.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("template")
|
||||
.hasArg()
|
||||
.required()
|
||||
.desc(
|
||||
"Required. An existing sumfile to use as a template. Camera parameters are taken from this "
|
||||
+ "file, while camera position and orientation are calculated.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("times")
|
||||
.hasArgs()
|
||||
.desc(
|
||||
"If present, list of times separated by white space. In seconds, relative to closest approach.")
|
||||
.build());
|
||||
return options;
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws IOException, SpiceException {
|
||||
TerrasaurTool defaultOBJ = new SumFilesFromFlyby();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
double phase = Double.parseDouble(cl.getOptionValue("phase"));
|
||||
if (phase < 0 || phase > 180) {
|
||||
logger.error("Phase angle {} out of range [0, 180]", phase);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
String sumFileTemplate = cl.getOptionValue("template");
|
||||
|
||||
String base = FilenameUtils.getBaseName(sumFileTemplate);
|
||||
String ext = FilenameUtils.getExtension(sumFileTemplate);
|
||||
SumFilesFromFlyby app =
|
||||
new SumFilesFromFlyby(
|
||||
SumFile.fromFile(new File(sumFileTemplate)),
|
||||
Double.parseDouble(cl.getOptionValue("distance")),
|
||||
Math.toRadians(phase),
|
||||
Double.parseDouble(cl.getOptionValue("speed")));
|
||||
|
||||
NavigableSet<Double> times = new TreeSet<>();
|
||||
if (cl.hasOption("times")) {
|
||||
for (String s : cl.getOptionValues("times")) times.add(Double.parseDouble(s));
|
||||
} else times.add(0.);
|
||||
|
||||
SpiceBundle bundle = null;
|
||||
if (cl.hasOption("mk")) {
|
||||
NativeLibraryLoader.loadSpiceLibraries();
|
||||
bundle =
|
||||
new SpiceBundle.Builder()
|
||||
.addMetakernels(Collections.singletonList(cl.getOptionValue("mk")))
|
||||
.build();
|
||||
KernelDatabase.load(cl.getOptionValue("mk"));
|
||||
}
|
||||
|
||||
TimeConversion tc =
|
||||
bundle == null ? TimeConversion.createUsingInternalConstants() : bundle.getTimeConversion();
|
||||
|
||||
for (double t : times) {
|
||||
SumFile s = app.getSumFile(t);
|
||||
|
||||
try (PrintWriter pw =
|
||||
new PrintWriter(
|
||||
String.format("%s_%d.%s", base, (int) tc.utcStringToTDB(s.utcString()), ext))) {
|
||||
|
||||
pw.println(s);
|
||||
|
||||
} catch (FileNotFoundException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
if (cl.hasOption("mkspk")) {
|
||||
if (bundle == null) {
|
||||
logger.error("Need -mk to use -mkspk!");
|
||||
} else {
|
||||
IntervalSet.Builder builder = IntervalSet.builder();
|
||||
for (Double t : times) {
|
||||
Double next = times.higher(t);
|
||||
if (next != null) builder.add(new UnwritableInterval(t, next));
|
||||
}
|
||||
int centerID = Integer.parseInt(cl.getOptionValue("mkspk"));
|
||||
|
||||
String command = app.writeMKSPKFiles(base, builder.build(), centerID, bundle);
|
||||
logger.info("Command to create SPK:\n{}", command);
|
||||
}
|
||||
}
|
||||
|
||||
if (cl.hasOption("msopck")) {
|
||||
if (bundle == null) {
|
||||
logger.error("Need -mk to use -msopck!");
|
||||
} else {
|
||||
IntervalSet.Builder builder = IntervalSet.builder();
|
||||
for (Double t : times) {
|
||||
Double next = times.higher(t);
|
||||
if (next != null) builder.add(new UnwritableInterval(t, next));
|
||||
}
|
||||
|
||||
final int scID = -999;
|
||||
final int frameID = scID * 1000;
|
||||
|
||||
File spacecraftFK = new File(String.format("%s.tf", base));
|
||||
try (PrintWriter pw = new PrintWriter(spacecraftFK)) {
|
||||
pw.println("\\begindata");
|
||||
pw.printf("FRAME_%s_FIXED = %d\n", base, frameID);
|
||||
pw.printf("FRAME_%d_NAME = '%s_FIXED'\n", frameID, base);
|
||||
pw.printf("FRAME_%d_CLASS = 3\n", frameID);
|
||||
pw.printf("FRAME_%d_CENTER = %d\n", frameID, scID);
|
||||
pw.printf("FRAME_%d_CLASS_ID = %d\n", frameID, frameID);
|
||||
pw.println("\\begintext");
|
||||
}
|
||||
|
||||
List<File> kernels = new ArrayList<>(bundle.getKernels());
|
||||
kernels.add(spacecraftFK);
|
||||
|
||||
bundle = new SpiceBundle.Builder().addKernelList(kernels).build();
|
||||
|
||||
File spacecraftSCLK = new File(String.format("%s.tsc", base));
|
||||
if (spacecraftSCLK.exists()) spacecraftSCLK.delete();
|
||||
|
||||
String command = app.writeMSOPCKFiles(base, builder.build(), frameID, bundle);
|
||||
logger.info("Command to create SPK:\n{}", command);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
339
src/main/java/terrasaur/apps/TileLookup.java
Normal file
@@ -0,0 +1,339 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.awt.Color;
|
||||
import java.awt.Graphics2D;
|
||||
import java.awt.Rectangle;
|
||||
import java.awt.RenderingHints;
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NavigableMap;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import picante.math.coords.LatitudinalVector;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.saaPlotLib.canvas.MapPlot;
|
||||
import terrasaur.utils.saaPlotLib.canvas.PlotCanvas;
|
||||
import terrasaur.utils.saaPlotLib.canvas.axis.AxisX;
|
||||
import terrasaur.utils.saaPlotLib.canvas.axis.AxisY;
|
||||
import terrasaur.utils.saaPlotLib.canvas.projection.Projection;
|
||||
import terrasaur.utils.saaPlotLib.canvas.projection.ProjectionOrthographic;
|
||||
import terrasaur.utils.saaPlotLib.canvas.projection.ProjectionRectangular;
|
||||
import terrasaur.utils.saaPlotLib.colorMaps.ColorRamp;
|
||||
import terrasaur.utils.saaPlotLib.colorMaps.ImmutableColorBar;
|
||||
import terrasaur.utils.saaPlotLib.colorMaps.ImmutableColorRamp;
|
||||
import terrasaur.utils.saaPlotLib.config.ImmutablePlotConfig;
|
||||
import terrasaur.utils.saaPlotLib.config.PlotConfig;
|
||||
import terrasaur.utils.saaPlotLib.data.DiscreteDataSet;
|
||||
import terrasaur.utils.saaPlotLib.data.ImmutableAnnotation;
|
||||
import terrasaur.utils.tessellation.FibonacciSphere;
|
||||
|
||||
/**
|
||||
* Given a number of tiles on a spherical tesselation, look up a tile index for
|
||||
*
|
||||
* @author nairah1
|
||||
*/
|
||||
public class TileLookup implements TerrasaurTool {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Locate tiles on the unit sphere.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
String header = "";
|
||||
String footer = "";
|
||||
return TerrasaurTool.super.fullDescription(options, header, footer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given the base database name and a tile number, return the path to that database file. For
|
||||
* example:
|
||||
*
|
||||
* <pre>
|
||||
* System.out.println(TileLookup.getDBName("/path/to/database/ola.db", 6));
|
||||
* System.out.println(TileLookup.getDBName("./ola.db", 6));
|
||||
* System.out.println(TileLookup.getDBName("ola.db", 6));
|
||||
*
|
||||
* /path/to/database/ola.6.db
|
||||
* ./ola.6.db
|
||||
* ./ola.6.db
|
||||
* </pre>
|
||||
*
|
||||
* @param dbName basename for database (e.g. /path/to/database/ola.db)
|
||||
* @param tile tile index (e.g. 6)
|
||||
* @return path to database file (e.g. /path/to/database/ola.6.db)
|
||||
*/
|
||||
public static String getDBName(String dbName, int tile) {
|
||||
String fullPath = FilenameUtils.getFullPath(dbName);
|
||||
if (fullPath.trim().isEmpty()) fullPath = ".";
|
||||
if (!fullPath.endsWith(File.separator)) fullPath += File.separator;
|
||||
return String.format(
|
||||
"%s%s.%d.%s",
|
||||
fullPath, FilenameUtils.getBaseName(dbName), tile, FilenameUtils.getExtension(dbName));
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(
|
||||
Option.builder("nTiles")
|
||||
.hasArg()
|
||||
.required()
|
||||
.desc("Number of points covering the sphere.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("printCoords")
|
||||
.desc(
|
||||
"Print a table of points along with their coordinates. Takes precedence over -printStats, -printDistance, and -png.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("printDistance")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Print a table of points sorted by distance from the input point. "
|
||||
+ "Format of the input point is longitude,latitude in degrees, comma separated without spaces. Takes precedence over -png.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("printStats")
|
||||
.desc(
|
||||
"Print statistics on the distances (in degrees) between each point and its nearest neighbor. Takes precedence over -printDistance and -png.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("png")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Plot points and distance to nearest point in degrees. Argument is the name of the PNG file to write.")
|
||||
.build());
|
||||
return options;
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
TerrasaurTool defaultOBJ = new TileLookup();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
final int npts = Integer.parseInt(cl.getOptionValue("nTiles"));
|
||||
FibonacciSphere fs = new FibonacciSphere(npts);
|
||||
|
||||
if (cl.hasOption("printCoords")) {
|
||||
String header = String.format("%7s, %10s, %9s", "# index", "longitude", "latitude");
|
||||
System.out.println(header);
|
||||
// System.out.printf("%7s, %10s, %9s, %6s\n", "# index", "longitude", "latitude", "mapola");
|
||||
for (int i = 0; i < npts; i++) {
|
||||
LatitudinalVector lv = fs.getTileCenter(i);
|
||||
double lon = Math.toDegrees(lv.getLongitude());
|
||||
if (lon < 0) lon += 360;
|
||||
double lat = Math.toDegrees(lv.getLatitude());
|
||||
System.out.printf("%7d, %10.5f, %9.5f\n", i, lon, lat);
|
||||
}
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
if (cl.hasOption("printStats")) {
|
||||
System.out.println(
|
||||
"Statistics on distances between each point and its nearest neighbor (degrees):");
|
||||
System.out.println(fs.getDistanceStats());
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
if (cl.hasOption("printDistance")) {
|
||||
String[] parts = cl.getOptionValue("printDistance").split(",");
|
||||
double lon = Math.toRadians(Double.parseDouble(parts[0].trim()));
|
||||
double lat = Math.toRadians(Double.parseDouble(parts[1].trim()));
|
||||
LatitudinalVector lv = new LatitudinalVector(1, lat, lon);
|
||||
NavigableMap<Double, Integer> distanceMap = fs.getDistanceMap(lv);
|
||||
System.out.printf("%11s, %5s, %10s, %9s\n", "# distance", "index", "longitude", "latitude");
|
||||
System.out.printf("%11s, %5s, %10s, %9s\n", "# (degrees)", "", "(degrees)", "(degrees)");
|
||||
for (Double dist : distanceMap.keySet()) {
|
||||
int index = distanceMap.get(dist);
|
||||
lv = fs.getTileCenter(index);
|
||||
System.out.printf(
|
||||
"%11.5f, %5d, %10.5f, %9.5f\n",
|
||||
Math.toDegrees(dist),
|
||||
index,
|
||||
Math.toDegrees(lv.getLongitude()),
|
||||
Math.toDegrees(lv.getLatitude()));
|
||||
}
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
if (cl.hasOption("png")) {
|
||||
PlotConfig config = ImmutablePlotConfig.builder().width(1000).height(1000).build();
|
||||
|
||||
String title = String.format("Fibonacci Sphere, n = %d, ", npts);
|
||||
|
||||
Map<String, Projection> projections = new LinkedHashMap<>();
|
||||
projections.put(
|
||||
title + "Rectangular Projection",
|
||||
new ProjectionRectangular(config.width(), config.height() / 2));
|
||||
projections.put(
|
||||
title + "Orthographic Projection (0, 90)",
|
||||
new ProjectionOrthographic(
|
||||
config.width(), config.height(), new LatitudinalVector(1, Math.PI / 2, 0)));
|
||||
projections.put(
|
||||
title + "Orthographic Projection (0, 0)",
|
||||
new ProjectionOrthographic(
|
||||
config.width(), config.height(), new LatitudinalVector(1, 0, 0)));
|
||||
projections.put(
|
||||
title + "Orthographic Projection (90, 0)",
|
||||
new ProjectionOrthographic(
|
||||
config.width(), config.height(), new LatitudinalVector(1, 0, Math.PI / 2)));
|
||||
projections.put(
|
||||
title + "Orthographic Projection (180, 0)",
|
||||
new ProjectionOrthographic(
|
||||
config.width(), config.height(), new LatitudinalVector(1, 0, Math.PI)));
|
||||
projections.put(
|
||||
title + "Orthographic Projection (270, 0)",
|
||||
new ProjectionOrthographic(
|
||||
config.width(), config.height(), new LatitudinalVector(1, 0, 3 * Math.PI / 2)));
|
||||
projections.put(
|
||||
title + "Orthographic Projection (0, -90)",
|
||||
new ProjectionOrthographic(
|
||||
config.width(), config.height(), new LatitudinalVector(1, -Math.PI / 2, 0)));
|
||||
|
||||
final int nColors = 6;
|
||||
ColorRamp ramp = ColorRamp.createLinear(1, nColors - 1);
|
||||
List<Color> colors = new ArrayList<>();
|
||||
colors.add(Color.BLACK);
|
||||
for (int i = 1; i < nColors; i++) colors.add(ramp.getColor(i));
|
||||
colors.add(Color.WHITE);
|
||||
ramp = ImmutableColorRamp.builder().min(0).max(nColors).colors(colors).build();
|
||||
|
||||
double radius = fs.getDistanceStats().getMax();
|
||||
ramp = ColorRamp.createLinear(0, radius).addLimitColors();
|
||||
|
||||
ArrayList<BufferedImage> images = new ArrayList<>();
|
||||
for (String t : projections.keySet()) {
|
||||
config = ImmutablePlotConfig.builder().from(config).title(t).build();
|
||||
Projection p = projections.get(t);
|
||||
|
||||
if (p instanceof ProjectionRectangular)
|
||||
config = ImmutablePlotConfig.builder().from(config).height(500).build();
|
||||
else config = ImmutablePlotConfig.builder().from(config).height(1000).build();
|
||||
|
||||
MapPlot canvas = new MapPlot(config, p);
|
||||
AxisX xLowerAxis = new AxisX(0, 360, "Longitude (degrees)", "%.0fE");
|
||||
AxisY yLeftAxis = new AxisY(-90, 90, "Latitude (degrees)", "%.0f");
|
||||
|
||||
canvas.drawTitle();
|
||||
canvas.setAxes(xLowerAxis, yLeftAxis);
|
||||
// canvas.drawAxes();
|
||||
|
||||
BufferedImage image = canvas.getImage();
|
||||
for (int i = 0; i < config.width(); i++) {
|
||||
for (int j = 0; j < config.height(); j++) {
|
||||
LatitudinalVector lv = p.pixelToSpherical(i, j);
|
||||
if (lv == null) continue;
|
||||
double closestDistance = Math.toDegrees(fs.getNearest(lv).getKey());
|
||||
// int numPoints = fs.getDistanceMap(lv).subMap(0., Math.toRadians(radius)).size();
|
||||
image.setRGB(
|
||||
config.leftMargin() + i,
|
||||
config.topMargin() + j,
|
||||
ramp.getColor(closestDistance).getRGB());
|
||||
}
|
||||
}
|
||||
|
||||
DiscreteDataSet points = new DiscreteDataSet("");
|
||||
for (int i = 0; i < fs.getNumTiles(); i++) {
|
||||
LatitudinalVector lv = fs.getTileCenter(i);
|
||||
points.add(lv.getLongitude(), lv.getLatitude());
|
||||
}
|
||||
|
||||
if (p instanceof ProjectionRectangular) {
|
||||
canvas.drawColorBar(
|
||||
ImmutableColorBar.builder()
|
||||
.rect(
|
||||
new Rectangle(
|
||||
canvas.getPageWidth() - 60, config.topMargin(), 10, config.height()))
|
||||
.ramp(ramp)
|
||||
.numTicks(nColors + 1)
|
||||
.tickFunction(aDouble -> String.format("%.1f", aDouble))
|
||||
.build());
|
||||
|
||||
// for (int i = 0; i < fs.getNumTiles(); i++) {
|
||||
// LatitudinalVector lv = fs.getTileCenter(i);
|
||||
// canvas.drawCircle(lv, radius, Math.toRadians(1), Color.RED);
|
||||
// }
|
||||
}
|
||||
|
||||
for (int i = 0; i < fs.getNumTiles(); i++) {
|
||||
LatitudinalVector lv = fs.getTileCenter(i);
|
||||
canvas.addAnnotation(
|
||||
ImmutableAnnotation.builder().text(String.format("%d", i)).build(),
|
||||
lv.getLongitude(),
|
||||
lv.getLatitude());
|
||||
}
|
||||
|
||||
images.add(canvas.getImage());
|
||||
}
|
||||
|
||||
int width = 2400;
|
||||
int height = 2400;
|
||||
BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
|
||||
|
||||
Graphics2D g = image.createGraphics();
|
||||
g.setRenderingHint(
|
||||
RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BILINEAR);
|
||||
int imageWidth = width;
|
||||
int imageHeight = height / 3;
|
||||
g.drawImage(
|
||||
images.getFirst(),
|
||||
width / 6,
|
||||
0,
|
||||
5 * width / 6,
|
||||
imageHeight,
|
||||
0,
|
||||
0,
|
||||
images.getFirst().getWidth(),
|
||||
images.getFirst().getHeight(),
|
||||
null);
|
||||
|
||||
imageWidth = width / 3;
|
||||
for (int i = 1; i < 4; i++) {
|
||||
g.drawImage(
|
||||
images.get(i),
|
||||
(i - 1) * imageWidth,
|
||||
imageHeight,
|
||||
i * imageWidth,
|
||||
2 * imageHeight,
|
||||
0,
|
||||
0,
|
||||
images.get(i).getWidth(),
|
||||
images.get(i).getHeight(),
|
||||
null);
|
||||
}
|
||||
for (int i = 4; i < 7; i++) {
|
||||
g.drawImage(
|
||||
images.get(i),
|
||||
(i - 4) * imageWidth,
|
||||
2 * imageHeight,
|
||||
(i - 3) * imageWidth,
|
||||
3 * imageHeight,
|
||||
0,
|
||||
0,
|
||||
images.get(i).getWidth(),
|
||||
images.get(i).getHeight(),
|
||||
null);
|
||||
}
|
||||
g.dispose();
|
||||
|
||||
PlotCanvas.writeImage(cl.getOptionValue("png"), image);
|
||||
}
|
||||
}
|
||||
}
|
||||
154
src/main/java/terrasaur/apps/TransformFrame.java
Normal file
@@ -0,0 +1,154 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NavigableMap;
|
||||
import java.util.TreeMap;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.spi.StandardLevel;
|
||||
import spice.basic.KernelDatabase;
|
||||
import spice.basic.Matrix33;
|
||||
import spice.basic.ReferenceFrame;
|
||||
import spice.basic.SpiceErrorException;
|
||||
import spice.basic.SpiceException;
|
||||
import spice.basic.TDBTime;
|
||||
import spice.basic.Vector3;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.NativeLibraryLoader;
|
||||
import terrasaur.utils.SPICEUtil;
|
||||
|
||||
public class TransformFrame implements TerrasaurTool {
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Transform coordinates between reference frames.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
String header = "";
|
||||
String footer = "\nThis program transforms coordinates between reference frames.\n";
|
||||
return TerrasaurTool.super.fullDescription(options, header, footer);
|
||||
}
|
||||
|
||||
private NavigableMap<TDBTime, Vector3> pointsIn;
|
||||
private NavigableMap<TDBTime, Vector3> pointsOut;
|
||||
|
||||
public TransformFrame() {}
|
||||
|
||||
public void setPoints(NavigableMap<TDBTime, Vector3> pointsIn) {
|
||||
this.pointsIn = pointsIn;
|
||||
}
|
||||
|
||||
public void transformCoordinates(String inFrame, String outFrame) {
|
||||
try {
|
||||
ReferenceFrame from = new ReferenceFrame(inFrame);
|
||||
ReferenceFrame to = new ReferenceFrame(outFrame);
|
||||
pointsOut = new TreeMap<>(SPICEUtil.tdbComparator);
|
||||
for (TDBTime t : pointsIn.keySet()) {
|
||||
Matrix33 transform = from.getPositionTransformation(to, t);
|
||||
pointsOut.put(t, transform.mxv(pointsIn.get(t)));
|
||||
}
|
||||
} catch (SpiceException e) {
|
||||
logger.error(e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void write(String outFile) {
|
||||
|
||||
try (PrintWriter pw = new PrintWriter(outFile)) {
|
||||
for (TDBTime t : pointsOut.keySet()) {
|
||||
Vector3 v = pointsOut.get(t);
|
||||
pw.printf("%.6f,%.6e,%.6e,%.6e\n", t.getTDBSeconds(), v.getElt(0), v.getElt(1),
|
||||
v.getElt(2));
|
||||
}
|
||||
} catch (FileNotFoundException | SpiceException e) {
|
||||
logger.error(e.getLocalizedMessage());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(Option.builder("inFile").required().hasArg()
|
||||
.desc("Required. Text file containing comma separated t, x, y, z values. Time is ET.")
|
||||
.build());
|
||||
options.addOption(Option.builder("inFrame").required().hasArg()
|
||||
.desc("Required. Name of inFile reference frame.").build());
|
||||
options.addOption(Option.builder("outFile").required().hasArg()
|
||||
.desc("Required. Name of output file. It will be in the same format as inFile.").build());
|
||||
options.addOption(Option.builder("outFrame").required().hasArg()
|
||||
.desc("Required. Name of outFile reference frame.").build());
|
||||
options.addOption(Option.builder("spice").required().hasArg().desc(
|
||||
"Required. Name of SPICE metakernel containing kernels needed to make the frame transformation.")
|
||||
.build());
|
||||
options.addOption(Option.builder("logFile").hasArg()
|
||||
.desc("If present, save screen output to log file.").build());
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (StandardLevel l : StandardLevel.values())
|
||||
sb.append(String.format("%s ", l.name()));
|
||||
options.addOption(Option.builder("logLevel").hasArg()
|
||||
.desc("If present, print messages above selected priority. Valid values are "
|
||||
+ sb.toString().trim() + ". Default is INFO.")
|
||||
.build()); return options;
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
TerrasaurTool defaultOBJ = new TransformFrame();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
TransformFrame tf = new TransformFrame();
|
||||
NavigableMap<TDBTime, Vector3> map = new TreeMap<>(SPICEUtil.tdbComparator);
|
||||
try {
|
||||
File f = new File(cl.getOptionValue("inFile"));
|
||||
List<String> lines = FileUtils.readLines(f, Charset.defaultCharset());
|
||||
for (String line : lines) {
|
||||
String trim = line.trim();
|
||||
if (trim.isEmpty() || trim.startsWith("#"))
|
||||
continue;
|
||||
String[] parts = trim.split(",");
|
||||
double et = Double.parseDouble(parts[0].trim());
|
||||
if (et > 0) {
|
||||
TDBTime t = new TDBTime(et);
|
||||
Vector3 v = new Vector3(Double.parseDouble(parts[1].trim()),
|
||||
Double.parseDouble(parts[2].trim()), Double.parseDouble(parts[3].trim()));
|
||||
map.put(t, v);
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getLocalizedMessage());
|
||||
}
|
||||
|
||||
tf.setPoints(map);
|
||||
|
||||
NativeLibraryLoader.loadSpiceLibraries();
|
||||
try {
|
||||
KernelDatabase.load(cl.getOptionValue("spice"));
|
||||
} catch (SpiceErrorException e) {
|
||||
logger.error(e.getLocalizedMessage());
|
||||
}
|
||||
|
||||
tf.transformCoordinates(cl.getOptionValue("inFrame"), cl.getOptionValue("outFrame"));
|
||||
tf.write(cl.getOptionValue("outFile"));
|
||||
}
|
||||
|
||||
}
|
||||
240
src/main/java/terrasaur/apps/TranslateTime.java
Normal file
@@ -0,0 +1,240 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.spi.StandardLevel;
|
||||
import spice.basic.KernelDatabase;
|
||||
import spice.basic.KernelPool;
|
||||
import spice.basic.SCLK;
|
||||
import spice.basic.SCLKTime;
|
||||
import spice.basic.SpiceErrorException;
|
||||
import spice.basic.SpiceException;
|
||||
import spice.basic.TDBTime;
|
||||
import terrasaur.gui.TranslateTimeFX;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.NativeLibraryLoader;
|
||||
|
||||
/**
|
||||
* Translate time between formats.
|
||||
*
|
||||
* @author nairah1
|
||||
*
|
||||
*/
|
||||
public class TranslateTime implements TerrasaurTool {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Convert between different time systems.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
String header = "";
|
||||
String footer = "\nConvert between different time systems.\n";
|
||||
return TerrasaurTool.super.fullDescription(options, header, footer);
|
||||
|
||||
}
|
||||
|
||||
private enum Types {
|
||||
JULIAN, SCLK, TDB, TDBCALENDAR, UTC
|
||||
}
|
||||
|
||||
private Map<Integer, SCLK> sclkMap;
|
||||
|
||||
private TranslateTime(){}
|
||||
|
||||
public TranslateTime(Map<Integer, SCLK> sclkMap) {
|
||||
this.sclkMap = sclkMap;
|
||||
}
|
||||
|
||||
private TDBTime tdb;
|
||||
|
||||
public String toJulian() throws SpiceErrorException {
|
||||
return tdb.toString("JULIAND.######");
|
||||
}
|
||||
|
||||
private SCLK sclkKernel;
|
||||
|
||||
public SCLK getSCLKKernel() {
|
||||
return sclkKernel;
|
||||
}
|
||||
|
||||
public void setSCLKKernel(int sclkID) {
|
||||
sclkKernel = sclkMap.get(sclkID);
|
||||
if (sclkKernel == null) {
|
||||
logger.error("SCLK {} is not loaded!", sclkID);
|
||||
}
|
||||
}
|
||||
|
||||
public SCLKTime toSCLK() throws SpiceException {
|
||||
return new SCLKTime(sclkKernel, tdb);
|
||||
}
|
||||
|
||||
public TDBTime toTDB() {
|
||||
return tdb;
|
||||
}
|
||||
|
||||
public String toUTC() throws SpiceErrorException {
|
||||
return tdb.toUTCString("ISOC", 3);
|
||||
}
|
||||
|
||||
public void setJulianDate(double julianDate) throws SpiceErrorException {
|
||||
tdb = new TDBTime(String.format("%.6f JDUTC", julianDate));
|
||||
}
|
||||
|
||||
public void setSCLK(String sclkString) throws SpiceException {
|
||||
tdb = new TDBTime(new SCLKTime(sclkKernel, sclkString));
|
||||
}
|
||||
|
||||
public void setTDB(double tdb) {
|
||||
this.tdb = new TDBTime(tdb);
|
||||
}
|
||||
|
||||
public void setTDBCalendarString(String tdbString) throws SpiceErrorException {
|
||||
tdb = new TDBTime(String.format("%s TDB", tdbString));
|
||||
}
|
||||
|
||||
public void setUTC(String utcStr) throws SpiceErrorException {
|
||||
tdb = new TDBTime(utcStr);
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(Option.builder("logFile").hasArg()
|
||||
.desc("If present, save screen output to log file.").build());
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (StandardLevel l : StandardLevel.values())
|
||||
sb.append(String.format("%s ", l.name()));
|
||||
options.addOption(Option.builder("logLevel").hasArg()
|
||||
.desc("If present, print messages above selected priority. Valid values are "
|
||||
+ sb.toString().trim() + ". Default is INFO.")
|
||||
.build());
|
||||
options.addOption(Option.builder("sclk").hasArg().desc(
|
||||
"SPICE id of the sclk to use. Default is to use the first one found in the kernel pool.")
|
||||
.build());
|
||||
options.addOption(Option.builder("spice").required().hasArg()
|
||||
.desc("Required. SPICE metakernel containing leap second and SCLK.").build());
|
||||
options.addOption(Option.builder("gui").desc("Launch a GUI.").build());
|
||||
options.addOption(Option.builder("inputDate").hasArgs().desc("Date to translate.").build());
|
||||
sb = new StringBuilder();
|
||||
for (Types system : Types.values()) {
|
||||
sb.append(String.format("%s ", system.name()));
|
||||
}
|
||||
options.addOption(Option.builder("inputSystem").hasArg().desc(
|
||||
"Timesystem of inputDate. Valid values are " + sb.toString().trim() + ". Default is UTC.")
|
||||
.build()); return options;
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws SpiceException {
|
||||
TerrasaurTool defaultOBJ = new TranslateTime();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
// This is to avoid java crashing due to inability to connect to an X display
|
||||
if (!cl.hasOption("gui"))
|
||||
System.setProperty("java.awt.headless", "true");
|
||||
|
||||
NativeLibraryLoader.loadSpiceLibraries();
|
||||
|
||||
for (String kernel : cl.getOptionValues("spice"))
|
||||
KernelDatabase.load(kernel);
|
||||
|
||||
LinkedHashMap<Integer, SCLK> sclkMap = new LinkedHashMap<>();
|
||||
String[] sclk_data_type = KernelPool.getNames("SCLK_DATA_*");
|
||||
for (String s : sclk_data_type) {
|
||||
String[] parts = s.split("_");
|
||||
int sclkID = -Integer.parseInt(parts[parts.length - 1]);
|
||||
sclkMap.put(sclkID, new SCLK(sclkID));
|
||||
}
|
||||
|
||||
SCLK sclk = null;
|
||||
if (cl.hasOption("sclk")) {
|
||||
int sclkID = Integer.parseInt(cl.getOptionValue("sclk"));
|
||||
if (sclkMap.containsKey(sclkID))
|
||||
sclk = sclkMap.get(sclkID);
|
||||
else {
|
||||
logger.error("Cannot find SCLK {} in kernel pool!", sclkID);
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (Integer id : sclkMap.keySet())
|
||||
sb.append(String.format("%d ", id));
|
||||
logger.error("Loaded IDs are {}", sb.toString());
|
||||
}
|
||||
} else {
|
||||
if (!sclkMap.values().isEmpty())
|
||||
// set the SCLK to the first one found
|
||||
sclk = sclkMap.values().stream().toList().get(0);
|
||||
}
|
||||
|
||||
if (sclk == null) {
|
||||
logger.fatal("Cannot load SCLK");
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
TranslateTime tt = new TranslateTime(sclkMap);
|
||||
|
||||
if (cl.hasOption("gui")) {
|
||||
TranslateTimeFX.setTranslateTime(tt);
|
||||
TranslateTimeFX.setSCLKIDs(sclkMap.keySet());
|
||||
TranslateTimeFX.main(args);
|
||||
System.exit(0);
|
||||
} else {
|
||||
if (!cl.hasOption("inputDate")) {
|
||||
logger.fatal("Missing required option -inputDate!");
|
||||
System.exit(1);
|
||||
}
|
||||
tt.setSCLKKernel(sclk.getIDCode());
|
||||
}
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (String s : cl.getOptionValues("inputDate"))
|
||||
sb.append(String.format("%s ", s));
|
||||
String inputDate = sb.toString().trim();
|
||||
|
||||
Types type =
|
||||
cl.hasOption("inputSystem") ? Types.valueOf(cl.getOptionValue("inputSystem").toUpperCase())
|
||||
: Types.UTC;
|
||||
|
||||
switch (type) {
|
||||
case JULIAN:
|
||||
tt.setJulianDate(Double.parseDouble(inputDate));
|
||||
break;
|
||||
case SCLK:
|
||||
tt.setSCLK(inputDate);
|
||||
break;
|
||||
case TDB:
|
||||
tt.setTDB(Double.parseDouble(inputDate));
|
||||
break;
|
||||
case TDBCALENDAR:
|
||||
tt.setTDBCalendarString(inputDate);
|
||||
break;
|
||||
case UTC:
|
||||
tt.setUTC(inputDate);
|
||||
break;
|
||||
}
|
||||
|
||||
System.out.printf("# input date %s (%s)\n", inputDate, type.name());
|
||||
System.out.printf("# UTC, TDB (Calendar), DOY, TDB, Julian Date, SCLK (%d)\n",
|
||||
sclk.getIDCode());
|
||||
|
||||
String utcString = tt.toTDB().toUTCString("ISOC", 3);
|
||||
String tdbString = tt.toTDB().toString("YYYY-MM-DDTHR:MN:SC.### ::TDB");
|
||||
String doyString = tt.toTDB().toString("DOY");
|
||||
|
||||
System.out.printf("%s, %s, %s, %.6f, %s, %s\n", utcString, tdbString, doyString,
|
||||
tt.toTDB().getTDBSeconds(), tt.toJulian(), tt.toSCLK().toString());
|
||||
|
||||
}
|
||||
}
|
||||
261
src/main/java/terrasaur/apps/ValidateNormals.java
Normal file
@@ -0,0 +1,261 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.time.ZoneId;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.text.WordUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.*;
|
||||
import vtk.vtkCellArray;
|
||||
import vtk.vtkIdList;
|
||||
import vtk.vtkOBBTree;
|
||||
import vtk.vtkOBJReader;
|
||||
import vtk.vtkPolyData;
|
||||
|
||||
public class ValidateNormals implements TerrasaurTool {
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
private ValidateNormals() {}
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Check facet normal directions for an OBJ shape file.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
|
||||
String footer =
|
||||
"\nThis program checks that the normals of the shape model are not pointing inward.\n";
|
||||
return TerrasaurTool.super.fullDescription(options, "", footer);
|
||||
}
|
||||
|
||||
static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(
|
||||
Option.builder("origin")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"If present, center of body in xyz coordinates. "
|
||||
+ "Specify as three floating point values separated by commas. Default is to use the centroid of "
|
||||
+ "the input shape model.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("obj").required().hasArg().desc("Shape model to validate.").build());
|
||||
options.addOption(
|
||||
Option.builder("output")
|
||||
.hasArg()
|
||||
.desc("Write out new OBJ file with corrected vertex orders for facets.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("numThreads")
|
||||
.hasArg()
|
||||
.desc("Number of threads to run. Default is 1.")
|
||||
.build());
|
||||
return options;
|
||||
}
|
||||
|
||||
private vtkPolyData polyData;
|
||||
private ThreadLocal<vtkOBBTree> threadLocalsearchTree;
|
||||
private double[] origin;
|
||||
|
||||
public ValidateNormals(vtkPolyData polyData) {
|
||||
this.polyData = polyData;
|
||||
|
||||
PolyDataStatistics stats = new PolyDataStatistics(polyData);
|
||||
origin = stats.getCentroid();
|
||||
|
||||
threadLocalsearchTree = new ThreadLocal<>();
|
||||
}
|
||||
|
||||
public vtkOBBTree getOBBTree() {
|
||||
vtkOBBTree searchTree = threadLocalsearchTree.get();
|
||||
if (searchTree == null) {
|
||||
searchTree = new vtkOBBTree();
|
||||
searchTree.SetDataSet(polyData);
|
||||
searchTree.SetTolerance(1e-12);
|
||||
searchTree.BuildLocator();
|
||||
threadLocalsearchTree.set(searchTree);
|
||||
}
|
||||
return searchTree;
|
||||
}
|
||||
|
||||
public void setOrigin(double[] origin) {
|
||||
this.origin = origin;
|
||||
}
|
||||
|
||||
private class FlippedNormalFinder implements Callable<List<Long>> {
|
||||
|
||||
private static final DateTimeFormatter defaultFormatter =
|
||||
DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss z")
|
||||
.withLocale(Locale.getDefault())
|
||||
.withZone(ZoneId.systemDefault());
|
||||
|
||||
private final long index0;
|
||||
private final long index1;
|
||||
|
||||
public FlippedNormalFinder(long index0, long index1) {
|
||||
this.index0 = index0;
|
||||
this.index1 = index1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Long> call() {
|
||||
|
||||
logger.info("Thread {}: indices {} to {}", Thread.currentThread().threadId(), index0, index1);
|
||||
vtkIdList idList = new vtkIdList();
|
||||
vtkIdList cellIds = new vtkIdList();
|
||||
List<Long> flippedNormals = new ArrayList<>();
|
||||
|
||||
final long startTime = Instant.now().getEpochSecond();
|
||||
final long numFacets = index1 - index0;
|
||||
for (int i = 0; i < numFacets; ++i) {
|
||||
|
||||
if (i > 0 && i % (numFacets / 10) == 0) {
|
||||
double pctDone = i / (numFacets * .01);
|
||||
long elapsed = Instant.now().getEpochSecond() - startTime;
|
||||
long estimatedFinish = (long) (elapsed / (pctDone / 100) + startTime);
|
||||
String finish = defaultFormatter.format(Instant.ofEpochSecond(estimatedFinish));
|
||||
logger.info(
|
||||
String.format(
|
||||
"Thread %d: read %d of %d facets. %.0f%% complete, projected finish %s",
|
||||
Thread.currentThread().threadId(), index0 + i, index1, pctDone, finish));
|
||||
}
|
||||
|
||||
long index = index0 + i;
|
||||
|
||||
CellInfo ci = CellInfo.getCellInfo(polyData, index, idList);
|
||||
getOBBTree().IntersectWithLine(origin, ci.center().toArray(), null, cellIds);
|
||||
|
||||
// count up all crossings of the surface between the origin and the facet.
|
||||
int numCrossings = 0;
|
||||
for (int j = 0; j < cellIds.GetNumberOfIds(); j++) {
|
||||
if (cellIds.GetId(j) == index) break;
|
||||
numCrossings++;
|
||||
}
|
||||
|
||||
// if numCrossings is even, the radial and normal should point in the same direction. If it
|
||||
// is odd, the
|
||||
// radial and normal should point in opposite directions.
|
||||
boolean shouldBeOpposite = (numCrossings % 2 == 1);
|
||||
boolean isOpposite = (ci.center().dotProduct(ci.normal()) < 0);
|
||||
|
||||
// XOR operator - true if both conditions are different
|
||||
if (isOpposite ^ shouldBeOpposite) flippedNormals.add(index);
|
||||
}
|
||||
|
||||
return flippedNormals;
|
||||
}
|
||||
}
|
||||
|
||||
public void flipNormals(Collection<Long> facets) {
|
||||
vtkCellArray cells = new vtkCellArray();
|
||||
for (long i = 0; i < polyData.GetNumberOfCells(); ++i) {
|
||||
vtkIdList idList = new vtkIdList();
|
||||
polyData.GetCellPoints(i, idList);
|
||||
if (facets.contains(i)) {
|
||||
long id0 = idList.GetId(0);
|
||||
long id1 = idList.GetId(1);
|
||||
long id2 = idList.GetId(2);
|
||||
idList.SetId(0, id0);
|
||||
idList.SetId(1, id2);
|
||||
idList.SetId(2, id1);
|
||||
}
|
||||
cells.InsertNextCell(idList);
|
||||
}
|
||||
polyData.SetPolys(cells);
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
TerrasaurTool defaultOBJ = new ValidateNormals();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
NativeLibraryLoader.loadVtkLibraries();
|
||||
|
||||
// PolyDataUtil's OBJ reader messes with the normals - not reliable for a local obj
|
||||
vtkOBJReader smallBodyReader = new vtkOBJReader();
|
||||
smallBodyReader.SetFileName(cl.getOptionValue("obj"));
|
||||
smallBodyReader.Update();
|
||||
vtkPolyData polyData = new vtkPolyData();
|
||||
polyData.ShallowCopy(smallBodyReader.GetOutput());
|
||||
|
||||
smallBodyReader.Delete();
|
||||
|
||||
ValidateNormals app = new ValidateNormals(polyData);
|
||||
|
||||
logger.info("Read {} facets from {}", polyData.GetNumberOfCells(), cl.getOptionValue("obj"));
|
||||
|
||||
if (cl.hasOption("origin")) {
|
||||
String[] parts = cl.getOptionValue("origin").split(",");
|
||||
double[] origin = new double[3];
|
||||
for (int i = 0; i < 3; i++) origin[i] = Double.parseDouble(parts[i]);
|
||||
app.setOrigin(origin);
|
||||
}
|
||||
|
||||
Set<Long> flippedNormals = new HashSet<>();
|
||||
|
||||
int numThreads =
|
||||
cl.hasOption("numThreads") ? Integer.parseInt(cl.getOptionValue("numThreads")) : 1;
|
||||
try (ExecutorService executor = Executors.newFixedThreadPool(numThreads)) {
|
||||
List<Future<List<Long>>> futures = new ArrayList<>();
|
||||
|
||||
long numFacets = polyData.GetNumberOfCells() / numThreads;
|
||||
for (int i = 0; i < numThreads; i++) {
|
||||
long fromIndex = i * numFacets;
|
||||
long toIndex = Math.min(polyData.GetNumberOfCells(), fromIndex + numFacets);
|
||||
|
||||
FlippedNormalFinder fnf = app.new FlippedNormalFinder(fromIndex, toIndex);
|
||||
futures.add(executor.submit(fnf));
|
||||
}
|
||||
|
||||
for (Future<List<Long>> future : futures) flippedNormals.addAll(future.get());
|
||||
|
||||
executor.shutdown();
|
||||
}
|
||||
|
||||
logger.info(
|
||||
"Found {} flipped normals out of {} facets",
|
||||
flippedNormals.size(),
|
||||
polyData.GetNumberOfCells());
|
||||
|
||||
if (cl.hasOption("output")) {
|
||||
NavigableSet<Long> sorted = new TreeSet<>(flippedNormals);
|
||||
String header = "";
|
||||
if (!flippedNormals.isEmpty()) {
|
||||
header = "# The following indices were flipped from " + cl.getOptionValue("obj") + ":\n";
|
||||
StringBuilder sb = new StringBuilder("# ");
|
||||
for (Long index : sorted) {
|
||||
sb.append(String.format("%d", index));
|
||||
if (index < sorted.last()) sb.append(", ");
|
||||
}
|
||||
sb.append("\n");
|
||||
header += WordUtils.wrap(sb.toString(), 80, "\n# ", false);
|
||||
logger.info(header);
|
||||
}
|
||||
|
||||
app.flipNormals(flippedNormals);
|
||||
PolyDataUtil.saveShapeModelAsOBJ(app.polyData, cl.getOptionValue("output"), header);
|
||||
logger.info("wrote OBJ file {}", cl.getOptionValue("output"));
|
||||
}
|
||||
|
||||
logger.info("ValidateNormals done");
|
||||
}
|
||||
}
|
||||
402
src/main/java/terrasaur/apps/ValidateOBJ.java
Normal file
@@ -0,0 +1,402 @@
|
||||
package terrasaur.apps;
|
||||
|
||||
import java.util.*;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
|
||||
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.spi.StandardLevel;
|
||||
import picante.math.vectorspace.VectorIJK;
|
||||
import terrasaur.smallBodyModel.SmallBodyModel;
|
||||
import terrasaur.templates.TerrasaurTool;
|
||||
import terrasaur.utils.*;
|
||||
import terrasaur.utils.mesh.TriangularFacet;
|
||||
import vtk.vtkIdList;
|
||||
import vtk.vtkPolyData;
|
||||
|
||||
/**
|
||||
* Check an OBJ file for the correct number of facets and vertices. Test for duplicate vertices,
|
||||
* unreferenced vertices, and zero area facets.
|
||||
*
|
||||
* <p>Even though this is named "ValidateOBJ" it will work with any format that can be read by
|
||||
* {@link PolyDataUtil#loadShapeModel(String)}.
|
||||
*
|
||||
* @author Hari.Nair@jhuapl.edu
|
||||
*/
|
||||
public class ValidateOBJ implements TerrasaurTool {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
@Override
|
||||
public String shortDescription() {
|
||||
return "Check a closed shape file in OBJ format for errors.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String fullDescription(Options options) {
|
||||
String header = "";
|
||||
String footer =
|
||||
"""
|
||||
This program checks that a shape model has the correct number of facets and vertices. \
|
||||
It will also check for duplicate vertices, vertices that are not referenced by any facet, and zero area facets.
|
||||
""";
|
||||
return TerrasaurTool.super.fullDescription(options, header, footer);
|
||||
}
|
||||
|
||||
private vtkPolyData polyData;
|
||||
private String validationMsg;
|
||||
|
||||
private ValidateOBJ() {}
|
||||
|
||||
public ValidateOBJ(vtkPolyData polyData) {
|
||||
this.polyData = polyData;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@link vtkPolyData#GetNumberOfCells()}
|
||||
*/
|
||||
public long facetCount() {
|
||||
return polyData.GetNumberOfCells();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@link vtkPolyData#GetNumberOfPoints()}
|
||||
*/
|
||||
public long vertexCount() {
|
||||
return polyData.GetNumberOfPoints();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return description of test result
|
||||
*/
|
||||
public String getMessage() {
|
||||
return validationMsg;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if number of facets in the shape model satisfies 3*4^n where n is an integer
|
||||
*/
|
||||
public boolean testFacets() {
|
||||
boolean meetsCondition = facetCount() % 3 == 0;
|
||||
|
||||
if (meetsCondition) {
|
||||
long facet3 = facetCount() / 3;
|
||||
double logFacet3 = Math.log(facet3) / Math.log(4);
|
||||
if (Math.ceil(logFacet3) != Math.floor(logFacet3)) meetsCondition = false;
|
||||
}
|
||||
|
||||
int n = (int) (Math.log(facetCount() / 3.) / Math.log(4.0) + 0.5);
|
||||
if (meetsCondition) {
|
||||
validationMsg =
|
||||
String.format(
|
||||
"Model has %d facets. This satisfies f = 3*4^n with n = %d.", facetCount(), n);
|
||||
} else {
|
||||
validationMsg =
|
||||
String.format(
|
||||
"Model has %d facets. This does not satisfy f = 3*4^n. A shape model with %.0f facets has n = %d.",
|
||||
facetCount(), 3 * Math.pow(4, n), n);
|
||||
}
|
||||
|
||||
return meetsCondition;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if number of vertices in the shape model satisfies v=f/2+2
|
||||
*/
|
||||
public boolean testVertices() {
|
||||
boolean meetsCondition = (facetCount() + 4) / 2 == vertexCount();
|
||||
if (meetsCondition)
|
||||
validationMsg =
|
||||
String.format(
|
||||
"Model has %d vertices and %d facets. This satisfies v = f/2+2.",
|
||||
vertexCount(), facetCount());
|
||||
else
|
||||
validationMsg =
|
||||
String.format(
|
||||
"Model has %d vertices and %d facets. This does not satisfy v = f/2+2. Number of vertices should be %d.",
|
||||
vertexCount(), facetCount(), facetCount() / 2 + 2);
|
||||
|
||||
return meetsCondition;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return key is vertex id, value is a list of vertices within a hard coded distance of 1e-10.
|
||||
*/
|
||||
public NavigableMap<Long, List<Long>> findDuplicateVertices() {
|
||||
|
||||
SmallBodyModel sbm = new SmallBodyModel(polyData);
|
||||
|
||||
double[] iPt = new double[3];
|
||||
|
||||
NavigableMap<Long, List<Long>> map = new TreeMap<>();
|
||||
double tol = 1e-10;
|
||||
for (long i = 0; i < vertexCount(); i++) {
|
||||
polyData.GetPoint(i, iPt);
|
||||
|
||||
List<Long> closestVertices = new ArrayList<>();
|
||||
for (Long id : sbm.findClosestVerticesWithinRadius(iPt, tol))
|
||||
if (id > i) closestVertices.add(id);
|
||||
if (!closestVertices.isEmpty()) map.put(i, closestVertices);
|
||||
|
||||
if (map.containsKey(i) && !map.get(i).isEmpty()) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(String.format("Duplicates for vertex %d: ", i + 1));
|
||||
for (Long dupId : map.get(i)) sb.append(String.format("%d ", dupId + 1));
|
||||
logger.debug(sb.toString());
|
||||
}
|
||||
}
|
||||
|
||||
validationMsg = String.format("%d vertices have duplicates", map.size());
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a list of vertex indices where one or more of the coordinates fail {@link
|
||||
* Double#isFinite(double)}.
|
||||
*/
|
||||
public List<Integer> findMalformedVertices() {
|
||||
double[] iPt = new double[3];
|
||||
NavigableSet<Integer> vertexIndices = new TreeSet<>();
|
||||
for (int i = 0; i < vertexCount(); i++) {
|
||||
polyData.GetPoint(i, iPt);
|
||||
for (int j = 0; j < 3; j++) {
|
||||
if (!Double.isFinite(iPt[j])) {
|
||||
logger.debug("Vertex {}: {} {} {}", i, iPt[0], iPt[1], iPt[2]);
|
||||
vertexIndices.add(i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
validationMsg = String.format("%d malformed vertices ", vertexIndices.size());
|
||||
return new ArrayList<>(vertexIndices);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a list of vertex indices that are not referenced by any facet
|
||||
*/
|
||||
public List<Long> findUnreferencedVertices() {
|
||||
NavigableSet<Long> vertexIndices = new TreeSet<>();
|
||||
for (long i = 0; i < polyData.GetNumberOfPoints(); i++) {
|
||||
vertexIndices.add(i);
|
||||
}
|
||||
|
||||
vtkIdList idList = new vtkIdList();
|
||||
for (int i = 0; i < facetCount(); ++i) {
|
||||
polyData.GetCellPoints(i, idList);
|
||||
long id0 = idList.GetId(0);
|
||||
long id1 = idList.GetId(1);
|
||||
long id2 = idList.GetId(2);
|
||||
|
||||
vertexIndices.remove(id0);
|
||||
vertexIndices.remove(id1);
|
||||
vertexIndices.remove(id2);
|
||||
}
|
||||
|
||||
if (!vertexIndices.isEmpty()) {
|
||||
double[] pt = new double[3];
|
||||
for (long id : vertexIndices) {
|
||||
polyData.GetPoint(id, pt);
|
||||
logger.debug("Unreferenced vertex {} [{}, {}, {}]", id + 1, pt[0], pt[1], pt[2]);
|
||||
// note OBJ vertices are numbered from 1 but VTK uses 0
|
||||
}
|
||||
}
|
||||
|
||||
validationMsg = String.format("%d unreferenced vertices found", vertexIndices.size());
|
||||
|
||||
return new ArrayList<>(vertexIndices);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a list of facet indices where the facet has zero area
|
||||
*/
|
||||
public List<Integer> findZeroAreaFacets() {
|
||||
List<Integer> zeroAreaFacets = new ArrayList<>();
|
||||
vtkIdList idList = new vtkIdList();
|
||||
double[] pt0 = new double[3];
|
||||
double[] pt1 = new double[3];
|
||||
double[] pt2 = new double[3];
|
||||
|
||||
for (int i = 0; i < facetCount(); ++i) {
|
||||
polyData.GetCellPoints(i, idList);
|
||||
long id0 = idList.GetId(0);
|
||||
long id1 = idList.GetId(1);
|
||||
long id2 = idList.GetId(2);
|
||||
polyData.GetPoint(id0, pt0);
|
||||
polyData.GetPoint(id1, pt1);
|
||||
polyData.GetPoint(id2, pt2);
|
||||
|
||||
// would be faster to check if id0==id1||id0==id2||id1==id2 but there may be
|
||||
// duplicate vertices
|
||||
TriangularFacet facet =
|
||||
new TriangularFacet(new VectorIJK(pt0), new VectorIJK(pt1), new VectorIJK(pt2));
|
||||
double area = facet.getArea();
|
||||
if (area == 0) {
|
||||
zeroAreaFacets.add(i);
|
||||
logger.debug(
|
||||
"Facet {} has zero area. Vertices are {} [{}, {}, {}], {} [{}, {}, {}], and {} [{}, {}, {}]",
|
||||
i + 1,
|
||||
id0 + 1,
|
||||
pt0[0],
|
||||
pt0[1],
|
||||
pt0[2],
|
||||
id1 + 1,
|
||||
pt1[0],
|
||||
pt1[1],
|
||||
pt1[2],
|
||||
id2 + 1,
|
||||
pt2[0],
|
||||
pt2[1],
|
||||
pt2[2]);
|
||||
}
|
||||
}
|
||||
|
||||
validationMsg = String.format("%d zero area facets found", zeroAreaFacets.size());
|
||||
return zeroAreaFacets;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return statistics on the angle between the facet radial and normal vectors
|
||||
*/
|
||||
public DescriptiveStatistics normalStats() {
|
||||
DescriptiveStatistics stats = new DescriptiveStatistics();
|
||||
|
||||
VectorStatistics cStats = new VectorStatistics();
|
||||
VectorStatistics nStats = new VectorStatistics();
|
||||
|
||||
vtkIdList idList = new vtkIdList();
|
||||
double[] pt0 = new double[3];
|
||||
double[] pt1 = new double[3];
|
||||
double[] pt2 = new double[3];
|
||||
for (int i = 0; i < facetCount(); ++i) {
|
||||
polyData.GetCellPoints(i, idList);
|
||||
long id0 = idList.GetId(0);
|
||||
long id1 = idList.GetId(1);
|
||||
long id2 = idList.GetId(2);
|
||||
polyData.GetPoint(id0, pt0);
|
||||
polyData.GetPoint(id1, pt1);
|
||||
polyData.GetPoint(id2, pt2);
|
||||
|
||||
// would be faster to check if id0==id1||id0==id2||id1==id2 but there may be
|
||||
// duplicate vertices
|
||||
TriangularFacet facet =
|
||||
new TriangularFacet(new VectorIJK(pt0), new VectorIJK(pt1), new VectorIJK(pt2));
|
||||
if (facet.getArea() > 0) {
|
||||
stats.addValue(facet.getCenter().getDot(facet.getNormal()));
|
||||
cStats.add(facet.getCenter());
|
||||
nStats.add(facet.getNormal());
|
||||
}
|
||||
}
|
||||
|
||||
validationMsg =
|
||||
String.format(
|
||||
"Using %d non-zero area facets: Mean angle between radial and normal is %f degrees, "
|
||||
+ "angle between mean radial and mean normal is %f degrees",
|
||||
stats.getN(),
|
||||
Math.toDegrees(Math.acos(stats.getMean())),
|
||||
Math.toDegrees(Vector3D.angle(cStats.getMean(), nStats.getMean())));
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
private static Options defineOptions() {
|
||||
Options options = TerrasaurTool.defineOptions();
|
||||
options.addOption(
|
||||
Option.builder("obj").required().hasArg().desc("Shape model to validate.").build());
|
||||
options.addOption(
|
||||
Option.builder("logFile")
|
||||
.hasArg()
|
||||
.desc("If present, save screen output to log file.")
|
||||
.build());
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (StandardLevel l : StandardLevel.values()) sb.append(String.format("%s ", l.name()));
|
||||
options.addOption(
|
||||
Option.builder("logLevel")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"If present, print messages above selected priority. Valid values are "
|
||||
+ sb.toString().trim()
|
||||
+ ". Default is INFO.")
|
||||
.build());
|
||||
options.addOption(Option.builder("output").hasArg().desc("Filename for output OBJ.").build());
|
||||
options.addOption(
|
||||
Option.builder("removeDuplicateVertices")
|
||||
.desc("Remove duplicate vertices. Use with -output to save OBJ.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("removeUnreferencedVertices")
|
||||
.desc("Remove unreferenced vertices. Use with -output to save OBJ.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("removeZeroAreaFacets")
|
||||
.desc("Remove facets with zero area. Use with -output to save OBJ.")
|
||||
.build());
|
||||
options.addOption(
|
||||
Option.builder("cleanup")
|
||||
.desc(
|
||||
"Combines -removeDuplicateVertices, -removeUnreferencedVertices, and -removeZeroAreaFacets.")
|
||||
.build());
|
||||
return options;
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
TerrasaurTool defaultOBJ = new ValidateOBJ();
|
||||
|
||||
Options options = defineOptions();
|
||||
|
||||
CommandLine cl = defaultOBJ.parseArgs(args, options);
|
||||
|
||||
Map<MessageLabel, String> startupMessages = defaultOBJ.startupMessages(cl);
|
||||
for (MessageLabel ml : startupMessages.keySet())
|
||||
logger.info(String.format("%s %s", ml.label, startupMessages.get(ml)));
|
||||
|
||||
NativeLibraryLoader.loadVtkLibraries();
|
||||
vtkPolyData polyData = PolyDataUtil.loadShapeModel(cl.getOptionValue("obj"));
|
||||
|
||||
if (polyData == null) {
|
||||
logger.error("Cannot read {}, exiting.", cl.getOptionValue("obj"));
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
ValidateOBJ vo = new ValidateOBJ(polyData);
|
||||
|
||||
logger.log(vo.testFacets() ? Level.INFO : Level.WARN, vo.getMessage());
|
||||
logger.log(vo.testVertices() ? Level.INFO : Level.WARN, vo.getMessage());
|
||||
|
||||
DescriptiveStatistics stats = vo.normalStats();
|
||||
logger.log(stats.getMean() > 0 ? Level.INFO : Level.WARN, vo.getMessage());
|
||||
|
||||
List<Integer> mfv = vo.findMalformedVertices();
|
||||
logger.log(!mfv.isEmpty() ? Level.WARN : Level.INFO, vo.getMessage());
|
||||
|
||||
NavigableMap<Long, List<Long>> dv = vo.findDuplicateVertices();
|
||||
logger.log(!dv.isEmpty() ? Level.WARN : Level.INFO, vo.getMessage());
|
||||
|
||||
List<Long> urv = vo.findUnreferencedVertices();
|
||||
logger.log(!urv.isEmpty() ? Level.WARN : Level.INFO, vo.getMessage());
|
||||
|
||||
List<Integer> zaf = vo.findZeroAreaFacets();
|
||||
logger.log(!zaf.isEmpty() ? Level.WARN : Level.INFO, vo.getMessage());
|
||||
|
||||
final boolean cleanup = cl.hasOption("cleanup");
|
||||
final boolean removeDuplicateVertices = cleanup || cl.hasOption("removeDuplicateVertices");
|
||||
final boolean removeUnreferencedVertices =
|
||||
cleanup || cl.hasOption("removeUnreferencedVertices");
|
||||
final boolean removeZeroAreaFacets = cleanup || cl.hasOption("removeZeroAreaFacets");
|
||||
|
||||
if (removeDuplicateVertices) polyData = PolyDataUtil.removeDuplicatePoints(polyData);
|
||||
|
||||
if (removeUnreferencedVertices) polyData = PolyDataUtil.removeUnreferencedPoints(polyData);
|
||||
|
||||
if (removeZeroAreaFacets) polyData = PolyDataUtil.removeZeroAreaFacets(polyData);
|
||||
|
||||
if (cl.hasOption("output")) {
|
||||
PolyDataUtil.saveShapeModelAsOBJ(polyData, cl.getOptionValue("output"));
|
||||
logger.info(String.format("Wrote OBJ file %s", cl.getOptionValue("output")));
|
||||
}
|
||||
}
|
||||
}
|
||||
5
src/main/java/terrasaur/apps/package-info.java
Normal file
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* Applications that can be run on the command line. Running each without arguments gives a usage
|
||||
* summary.
|
||||
*/
|
||||
package terrasaur.apps;
|
||||
99
src/main/java/terrasaur/config/CKFromSumFileConfig.java
Normal file
@@ -0,0 +1,99 @@
|
||||
package terrasaur.config;
|
||||
|
||||
import java.util.List;
|
||||
import jackfruit.annotations.Comment;
|
||||
import jackfruit.annotations.DefaultValue;
|
||||
import jackfruit.annotations.Jackfruit;
|
||||
|
||||
@Jackfruit
|
||||
public interface CKFromSumFileConfig {
|
||||
|
||||
@Comment("""
|
||||
Body fixed frame for the target body. If blank, use SPICE-defined
|
||||
body fixed frame. This will be the reference frame unless the J2000
|
||||
parameter is set to true.""")
|
||||
@DefaultValue("IAU_DIMORPHOS")
|
||||
String bodyFrame();
|
||||
|
||||
@Comment("Target body name.")
|
||||
@DefaultValue("DIMORPHOS")
|
||||
String bodyName();
|
||||
|
||||
@Comment("""
|
||||
Extend CK past the last sumFile by this number of seconds. Default
|
||||
is zero. Attitude is assumed to be fixed to the value given by the
|
||||
last sumfile.""")
|
||||
@DefaultValue("0")
|
||||
double extend();
|
||||
|
||||
@Comment("""
|
||||
SPC defines the camera X axis to be increasing to the right, Y to
|
||||
be increasing down, and Z to point into the page:
|
||||
|
||||
Z
|
||||
/
|
||||
/
|
||||
o---> X
|
||||
|
|
||||
Y |
|
||||
v
|
||||
|
||||
SPICE may use a different convention. Use the flip parameters to
|
||||
map the camera axes to SPICE axes. flipI = J sets camera axis I
|
||||
to SPICE axis J, where I is an axis name (X, Y, or Z), and J is a
|
||||
signed integer number for an axis. The values of J can be (-1, 1,
|
||||
-2, 2, -3, or 3), indicating the (-X, X, -Y, Y, -Z, Z) axis,
|
||||
respectively. The user must take care to correctly enter the
|
||||
flipI values so that the resulting flipped axes form a
|
||||
well-defined, right-handed coordinate system.
|
||||
|
||||
Examples:
|
||||
(flipX, flipY, flipZ) = ( 1, 2, 3) SPICE and camera frames coincide.
|
||||
(flipX, flipY, flipZ) = ( 2,-1, 3) SPICE frame is camera frame rotated 90 degrees about Z.
|
||||
(flipX, flipY, flipZ) = (-2, 1, 3) SPICE frame is camera frame rotated -90 degrees about Z.
|
||||
(flipX, flipY, flipZ) = ( 1,-2,-3) rotates the image 180 degrees about X.""")
|
||||
@DefaultValue("-1")
|
||||
int flipX();
|
||||
|
||||
@Comment("Map the camera Y axis to a SPICE axis. See flipX for details.")
|
||||
@DefaultValue("2")
|
||||
int flipY();
|
||||
|
||||
@Comment("Map the camera Z axis to a SPICE axis. See flipX for details.")
|
||||
@DefaultValue("-3")
|
||||
int flipZ();
|
||||
|
||||
@Comment("""
|
||||
Supply this frame kernel to MSOPCK. Only needed if the reference frame
|
||||
(set by bodyFrame or J2000) is not built into SPICE""")
|
||||
@DefaultValue("/project/dart/data/SPICE/flight/fk/didymos_system_001.tf")
|
||||
String fk();
|
||||
|
||||
@Comment("Instrument frame name")
|
||||
@DefaultValue("DART_DRACO")
|
||||
String instrumentFrameName();
|
||||
|
||||
@Comment("If set to true, use J2000 as the reference frame")
|
||||
@DefaultValue("true")
|
||||
boolean J2000();
|
||||
|
||||
@Comment("Path to leapseconds kernel.")
|
||||
@DefaultValue("/project/dart/data/SPICE/flight/lsk/naif0012.tls")
|
||||
String lsk();
|
||||
|
||||
@Comment("Path to spacecraft SCLK file.")
|
||||
@DefaultValue("/project/dart/data/SPICE/flight/sclk/dart_sclk_0204.tsc")
|
||||
String sclk();
|
||||
|
||||
@Comment("Name of spacecraft frame.")
|
||||
@DefaultValue("DART_SPACECRAFT")
|
||||
String spacecraftFrame();
|
||||
|
||||
@Comment("""
|
||||
SPICE metakernel to read. This may be specified more than once
|
||||
for multiple metakernels.""")
|
||||
@DefaultValue("/project/dart/data/SPICE/flight/mk/current.tm")
|
||||
List<String> metakernel();
|
||||
|
||||
|
||||
}
|
||||
236
src/main/java/terrasaur/config/CommandLineOptions.java
Normal file
@@ -0,0 +1,236 @@
|
||||
package terrasaur.config;
|
||||
|
||||
import java.io.File;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.spi.StandardLevel;
|
||||
import terrasaur.utils.saaPlotLib.colorMaps.ColorRamp;
|
||||
import terrasaur.utils.Log4j2Configurator;
|
||||
|
||||
public class CommandLineOptions {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
/**
|
||||
* Configuration file to load
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static Option addConfig() {
|
||||
return Option.builder("config").hasArg().required().desc("Configuration file to load").build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Color ramp style. See {@link ColorRamp.TYPE} for valid values.
|
||||
*
|
||||
* @param defaultCRType
|
||||
* @return
|
||||
*/
|
||||
public static Option addColorRamp(ColorRamp.TYPE defaultCRType) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (ColorRamp.TYPE t : ColorRamp.TYPE.values()) sb.append(String.format("%s ", t.name()));
|
||||
return Option.builder("colorRamp")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Color ramp style. Valid values are "
|
||||
+ sb.toString().trim()
|
||||
+ ". Default is "
|
||||
+ defaultCRType.name()
|
||||
+ ". Run the ColorMaps application to see all supported color ramps.")
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a color ramp type or the default value.
|
||||
*
|
||||
* @param cl
|
||||
* @param defaultCRType
|
||||
* @return
|
||||
*/
|
||||
public static ColorRamp.TYPE getColorRamp(CommandLine cl, ColorRamp.TYPE defaultCRType) {
|
||||
ColorRamp.TYPE crType =
|
||||
cl.hasOption("colorRamp")
|
||||
? ColorRamp.TYPE.valueOf(cl.getOptionValue("colorRamp").toUpperCase().strip())
|
||||
: defaultCRType;
|
||||
return crType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hard lower limit for color bar. If the color bar minimum is set dynamically it will not be
|
||||
* lower than hardMin.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static Option addHardMin() {
|
||||
return Option.builder("hardMin")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Hard lower limit for color bar. If the color bar minimum is set dynamically it will not be lower than hardMin.")
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Hard upper limit for color bar. If the color bar maximum is set dynamically it will not be
|
||||
* higher than hardMax.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static Option addHardMax() {
|
||||
return Option.builder("hardMax")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Hard upper limit for color bar. If the color bar maximum is set dynamically it will not be higher than hardMax.")
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the hard minimum for the colorbar.
|
||||
*
|
||||
* @param cl
|
||||
* @return
|
||||
*/
|
||||
public static double getHardMin(CommandLine cl) {
|
||||
return cl.hasOption("hardMin") ? Double.parseDouble(cl.getOptionValue("hardMin")) : Double.NaN;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the hard maximum for the colorbar.
|
||||
*
|
||||
* @param cl
|
||||
* @return
|
||||
*/
|
||||
public static double getHardMax(CommandLine cl) {
|
||||
return cl.hasOption("hardMax") ? Double.parseDouble(cl.getOptionValue("hardMax")) : Double.NaN;
|
||||
}
|
||||
|
||||
/**
|
||||
* If present, save screen output to log file.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static Option addLogFile() {
|
||||
return Option.builder("logFile")
|
||||
.hasArg()
|
||||
.desc("If present, save screen output to log file.")
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* If present, print messages above selected priority. See {@link StandardLevel} for valid values.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static Option addLogLevel() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (StandardLevel l : StandardLevel.values()) sb.append(String.format("%s ", l.name()));
|
||||
return Option.builder("logLevel")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"If present, print messages above selected priority. Valid values are "
|
||||
+ sb.toString().trim()
|
||||
+ ". Default is INFO.")
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the logging level from the command line option.
|
||||
*
|
||||
* @param cl
|
||||
*/
|
||||
public static void setLogLevel(CommandLine cl) {
|
||||
Log4j2Configurator lc = Log4j2Configurator.getInstance();
|
||||
if (cl.hasOption("logLevel"))
|
||||
lc.setLevel(Level.valueOf(cl.getOptionValue("logLevel").toUpperCase().trim()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Log to file named on the command line as well as others
|
||||
*
|
||||
* @param cl
|
||||
* @param others
|
||||
*/
|
||||
public static void setLogFile(CommandLine cl, String... others) {
|
||||
Log4j2Configurator lc = Log4j2Configurator.getInstance();
|
||||
if (cl.hasOption("logFile")) lc.addFile(cl.getOptionValue("logFile"));
|
||||
for (String other : others) lc.addFile(other);
|
||||
}
|
||||
|
||||
/**
|
||||
* Maximum number of simultaneous threads to execute.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static Option addNumCPU() {
|
||||
return Option.builder("numCPU")
|
||||
.hasArg()
|
||||
.desc(
|
||||
"Maximum number of simultaneous threads to execute. Default is numCPU value in configuration file.")
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Directory to place output files. Default is the working directory.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static Option addOutputDir() {
|
||||
return Option.builder("outputDir")
|
||||
.hasArg()
|
||||
.desc("Directory to place output files. Default is the working directory.")
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the output dir from the command line argument.
|
||||
*
|
||||
* @param cl
|
||||
* @return
|
||||
*/
|
||||
public static String setOutputDir(CommandLine cl) {
|
||||
String path = cl.hasOption("outputDir") ? cl.getOptionValue("outputDir") : ".";
|
||||
File parent = new File(path);
|
||||
if (!parent.exists()) parent.mkdirs();
|
||||
return path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Minimum value to plot.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static Option addPlotMin() {
|
||||
return Option.builder("plotMin").hasArg().desc("Min value to plot.").build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get plot min from command line argument
|
||||
*
|
||||
* @param cl
|
||||
* @return
|
||||
*/
|
||||
public static double getPlotMin(CommandLine cl) {
|
||||
return cl.hasOption("plotMin") ? Double.parseDouble(cl.getOptionValue("plotMin")) : Double.NaN;
|
||||
}
|
||||
|
||||
/**
|
||||
* Maximum value to plot.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static Option addPlotMax() {
|
||||
return Option.builder("plotMax").hasArg().desc("Max value to plot.").build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get plot max from command line argument
|
||||
*
|
||||
* @param cl
|
||||
* @return
|
||||
*/
|
||||
public static double getPlotMax(CommandLine cl) {
|
||||
return cl.hasOption("plotMax") ? Double.parseDouble(cl.getOptionValue("plotMax")) : Double.NaN;
|
||||
}
|
||||
}
|
||||
55
src/main/java/terrasaur/config/ConfigBlock.java
Normal file
@@ -0,0 +1,55 @@
|
||||
package terrasaur.config;
|
||||
|
||||
import jackfruit.annotations.Comment;
|
||||
import jackfruit.annotations.DefaultValue;
|
||||
import jackfruit.annotations.Include;
|
||||
import jackfruit.annotations.Jackfruit;
|
||||
|
||||
@Jackfruit
|
||||
public interface ConfigBlock {
|
||||
|
||||
String introLines =
|
||||
"""
|
||||
###############################################################################
|
||||
# GENERAL PARAMETERS
|
||||
###############################################################################
|
||||
""";
|
||||
|
||||
@Comment(
|
||||
introLines
|
||||
+ """
|
||||
Set the logging level. Valid values in order of increasing detail:
|
||||
OFF
|
||||
FATAL
|
||||
ERROR
|
||||
WARN
|
||||
INFO
|
||||
DEBUG
|
||||
TRACE
|
||||
ALL
|
||||
See org.apache.logging.log4j.Level.""")
|
||||
@DefaultValue("INFO")
|
||||
String logLevel();
|
||||
|
||||
@Comment(
|
||||
"Format for log messages. See https://logging.apache.org/log4j/2.x/manual/layouts.html#PatternLayout for more details.")
|
||||
@DefaultValue("%highlight{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%c{1}:%L] %msg%n%throwable}")
|
||||
String logFormat();
|
||||
|
||||
@Comment(
|
||||
"""
|
||||
Format for time strings. Allowed values are:
|
||||
C (e.g. 1986 APR 12 16:31:09.814)
|
||||
D (e.g. 1986-102 // 16:31:12.814)
|
||||
J (e.g. 2446533.18834276)
|
||||
ISOC (e.g. 1986-04-12T16:31:12.814)
|
||||
ISOD (e.g. 1986-102T16:31:12.814)""")
|
||||
@DefaultValue("ISOC")
|
||||
String timeFormat();
|
||||
|
||||
@Include
|
||||
MissionBlock missionBlock();
|
||||
|
||||
@Include
|
||||
SPCBlock spcBlock();
|
||||
}
|
||||
37
src/main/java/terrasaur/config/MissionBlock.java
Normal file
@@ -0,0 +1,37 @@
|
||||
package terrasaur.config;
|
||||
|
||||
import jackfruit.annotations.Comment;
|
||||
import jackfruit.annotations.DefaultValue;
|
||||
import jackfruit.annotations.Jackfruit;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Jackfruit(prefix = "mission")
|
||||
public interface MissionBlock {
|
||||
|
||||
String introLines =
|
||||
"""
|
||||
###############################################################################
|
||||
# MISSION PARAMETERS
|
||||
###############################################################################
|
||||
""";
|
||||
|
||||
@Comment(introLines + "Mission name (e.g. DART)")
|
||||
@DefaultValue("mission")
|
||||
String missionName();
|
||||
|
||||
@Comment(
|
||||
"""
|
||||
SPICE metakernel to read. This may be specified more than once
|
||||
for multiple metakernels (e.g. /project/dart/data/SPICE/flight/mk/current.tm)""")
|
||||
@DefaultValue("metakernel.tm")
|
||||
List<String> metakernel();
|
||||
|
||||
@Comment("Name of spacecraft frame (e.g. DART_SPACECRAFT)")
|
||||
@DefaultValue("SPACECRAFT_FRAME")
|
||||
String spacecraftFrame();
|
||||
|
||||
@Comment("Instrument frame name (e.g. DART_DRACO)")
|
||||
@DefaultValue("INSTRUMENT_FRAME")
|
||||
String instrumentFrameName();
|
||||
}
|
||||
54
src/main/java/terrasaur/config/SPCBlock.java
Normal file
@@ -0,0 +1,54 @@
|
||||
package terrasaur.config;
|
||||
|
||||
import jackfruit.annotations.Comment;
|
||||
import jackfruit.annotations.DefaultValue;
|
||||
import jackfruit.annotations.Jackfruit;
|
||||
|
||||
@Jackfruit(prefix = "spc")
|
||||
public interface SPCBlock {
|
||||
|
||||
String introLines =
|
||||
"""
|
||||
###############################################################################
|
||||
# SPC PARAMETERS
|
||||
###############################################################################
|
||||
""";
|
||||
|
||||
@Comment(introLines + """
|
||||
SPC defines the camera X axis to be increasing to the right, Y to
|
||||
be increasing down, and Z to point into the page:
|
||||
|
||||
Z
|
||||
/
|
||||
/
|
||||
o---> X
|
||||
|
|
||||
Y |
|
||||
v
|
||||
|
||||
SPICE may use a different convention. Use the flip parameters to
|
||||
map the camera axes to SPICE axes. flipI = J sets camera axis I
|
||||
to SPICE axis J, where I is an axis name (X, Y, or Z), and J is a
|
||||
signed integer number for an axis. The values of J can be (-1, 1,
|
||||
-2, 2, -3, or 3), indicating the (-X, X, -Y, Y, -Z, Z) axis,
|
||||
respectively. The user must take care to correctly enter the
|
||||
flipI values so that the resulting flipped axes form a
|
||||
well-defined, right-handed coordinate system.
|
||||
|
||||
Examples:
|
||||
(flipX, flipY, flipZ) = ( 1, 2, 3) SPICE and camera frames coincide.
|
||||
(flipX, flipY, flipZ) = ( 2,-1, 3) SPICE frame is camera frame rotated 90 degrees about Z.
|
||||
(flipX, flipY, flipZ) = (-2, 1, 3) SPICE frame is camera frame rotated -90 degrees about Z.
|
||||
(flipX, flipY, flipZ) = ( 1,-2,-3) rotates the image 180 degrees about X.""")
|
||||
@DefaultValue("-1")
|
||||
int flipX();
|
||||
|
||||
@Comment("Map the camera Y axis to a SPICE axis. See flipX for details.")
|
||||
@DefaultValue("2")
|
||||
int flipY();
|
||||
|
||||
@Comment("Map the camera Z axis to a SPICE axis. See flipX for details.")
|
||||
@DefaultValue("-3")
|
||||
int flipZ();
|
||||
|
||||
}
|
||||
89
src/main/java/terrasaur/config/TerrasaurConfig.java
Normal file
@@ -0,0 +1,89 @@
|
||||
package terrasaur.config;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.StringWriter;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.time.Instant;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.Locale;
|
||||
import org.apache.commons.configuration2.PropertiesConfiguration;
|
||||
import org.apache.commons.configuration2.PropertiesConfigurationLayout;
|
||||
import org.apache.commons.configuration2.builder.fluent.Configurations;
|
||||
import org.apache.commons.configuration2.ex.ConfigurationException;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import terrasaur.utils.AppVersion;
|
||||
|
||||
public class TerrasaurConfig {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
|
||||
private static TerrasaurConfig instance = null;
|
||||
|
||||
private TerrasaurConfig() {}
|
||||
|
||||
private ConfigBlock configBlock;
|
||||
|
||||
public static ConfigBlock getConfig() {
|
||||
if (instance == null) {
|
||||
logger.error("Configuration has not been loaded! Returning null.");
|
||||
return null;
|
||||
}
|
||||
return instance.configBlock;
|
||||
}
|
||||
|
||||
public static ConfigBlock getTemplate() {
|
||||
if (instance == null) {
|
||||
instance = new TerrasaurConfig();
|
||||
ConfigBlockFactory factory = new ConfigBlockFactory();
|
||||
instance.configBlock = factory.getTemplate();
|
||||
}
|
||||
return instance.configBlock;
|
||||
}
|
||||
|
||||
public static ConfigBlock load(Path filename) {
|
||||
if (!Files.exists(filename)) {
|
||||
System.err.println("Cannot load configuration file " + filename);
|
||||
Thread.dumpStack();
|
||||
System.exit(1);
|
||||
}
|
||||
if (instance == null) {
|
||||
instance = new TerrasaurConfig();
|
||||
|
||||
try {
|
||||
PropertiesConfiguration config = new Configurations().properties(filename.toFile());
|
||||
instance.configBlock = new ConfigBlockFactory().fromConfig(config);
|
||||
} catch (ConfigurationException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
return instance.configBlock;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringWriter string = new StringWriter();
|
||||
try (PrintWriter pw = new PrintWriter(string)) {
|
||||
PropertiesConfiguration config = new ConfigBlockFactory().toConfig(instance.configBlock);
|
||||
PropertiesConfigurationLayout layout = config.getLayout();
|
||||
|
||||
String now =
|
||||
DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")
|
||||
.withLocale(Locale.getDefault())
|
||||
.withZone(ZoneOffset.UTC)
|
||||
.format(Instant.now());
|
||||
layout.setHeaderComment(
|
||||
String.format(
|
||||
"Configuration file for %s\nCreated %s UTC", AppVersion.getVersionString(), now));
|
||||
|
||||
config.write(pw);
|
||||
} catch (ConfigurationException | IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
return string.toString();
|
||||
}
|
||||
|
||||
}
|
||||
1725
src/main/java/terrasaur/enums/AltwgDataType.java
Normal file
60
src/main/java/terrasaur/enums/FORMATS.java
Normal file
@@ -0,0 +1,60 @@
|
||||
package terrasaur.enums;
|
||||
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
|
||||
public enum FORMATS {
|
||||
|
||||
ASCII(true), BIN3(true), BIN4(true), BIN7(true), FITS(false), ICQ(false), OBJ(false), PLT(
|
||||
false), PLY(false), VTK(false);
|
||||
|
||||
/** True if this format contains no facet information */
|
||||
public boolean pointsOnly;
|
||||
|
||||
private FORMATS(boolean pointsOnly) {
|
||||
this.pointsOnly = pointsOnly;
|
||||
}
|
||||
|
||||
/**
|
||||
* Guess the format from the (case-insensitive) filename extension.
|
||||
* <p>
|
||||
* ASCII: ascii, txt, xyz
|
||||
* <p>
|
||||
* BINARY: binary, bin
|
||||
* <p>
|
||||
* FITS: fits, fit
|
||||
* <p>
|
||||
* L2: l2, dat
|
||||
* <p>
|
||||
* OBJ: obj
|
||||
* <p>
|
||||
* PLT: plt
|
||||
* <p>
|
||||
* PLY: ply
|
||||
* <p>
|
||||
* VTK: vtk
|
||||
*
|
||||
* @param filename
|
||||
* @return matched format type, or null if a match is not found
|
||||
*/
|
||||
public static FORMATS formatFromExtension(String filename) {
|
||||
String extension = FilenameUtils.getExtension(filename);
|
||||
for (FORMATS f : FORMATS.values()) {
|
||||
if (f.name().equalsIgnoreCase(extension)) {
|
||||
return f;
|
||||
}
|
||||
}
|
||||
|
||||
switch (extension.toUpperCase()) {
|
||||
case "TXT":
|
||||
case "XYZ":
|
||||
return FORMATS.ASCII;
|
||||
case "BIN":
|
||||
return FORMATS.BIN3;
|
||||
case "FIT":
|
||||
return FORMATS.FITS;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
33
src/main/java/terrasaur/enums/FitsHeaderType.java
Normal file
@@ -0,0 +1,33 @@
|
||||
package terrasaur.enums;
|
||||
|
||||
/**
|
||||
* Enums for a given fits header format. This is used to keep fits headers for the different types
|
||||
* separately configurable.
|
||||
*
|
||||
* @author espirrc1
|
||||
*
|
||||
*/
|
||||
public enum FitsHeaderType {
|
||||
|
||||
ANCIGLOBALGENERIC, ANCILOCALGENERIC, ANCIGLOBALALTWG, ANCIG_FACETRELATION_ALTWG, ANCILOCALALTWG, DTMGLOBALALTWG, DTMLOCALALTWG, DTMGLOBALGENERIC, DTMLOCALGENERIC, NFTMLN;
|
||||
|
||||
public static boolean isGLobal(FitsHeaderType hdrType) {
|
||||
|
||||
boolean globalProduct;
|
||||
|
||||
switch (hdrType) {
|
||||
|
||||
case ANCIGLOBALALTWG:
|
||||
case ANCIGLOBALGENERIC:
|
||||
case DTMGLOBALALTWG:
|
||||
case DTMGLOBALGENERIC:
|
||||
globalProduct = true;
|
||||
break;
|
||||
|
||||
default:
|
||||
globalProduct = false;
|
||||
}
|
||||
|
||||
return globalProduct;
|
||||
}
|
||||
}
|
||||
156
src/main/java/terrasaur/enums/PlaneInfo.java
Normal file
@@ -0,0 +1,156 @@
|
||||
package terrasaur.enums;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
import nom.tam.fits.HeaderCard;
|
||||
import nom.tam.fits.HeaderCardException;
|
||||
|
||||
/**
|
||||
* Enumeration containing the values and comments to use for FITS tags describing data stored in
|
||||
* FITS data cubes. The enumeration name references the type of data stored in a given plane. This
|
||||
* way the user can choose their own value for the FITS keyword (i.e. "PLANE1" or "PLANE10").
|
||||
*
|
||||
* @author espirrc1
|
||||
*
|
||||
*/
|
||||
public enum PlaneInfo {
|
||||
|
||||
//@formatter:off
|
||||
LAT("Latitude of vertices", "[deg]", "deg"),
|
||||
LON("Longitude of vertices", "[deg]", "deg"),
|
||||
RAD("Radius of vertices", "[km]", "km"),
|
||||
X("X coordinate of vertices", "[km]", "km"),
|
||||
Y("Y coordinate of vertices", "[km]", "km"),
|
||||
Z("Z coordinate of vertices", "[km]", "km"),
|
||||
NORM_VECTOR_X("Normal vector X", null, null),
|
||||
NORM_VECTOR_Y("Normal vector Y", null, null),
|
||||
NORM_VECTOR_Z("Normal vector Z", null, null),
|
||||
GRAV_VECTOR_X("Gravity vector X", "[m/s^2]", "m/s**2"),
|
||||
GRAV_VECTOR_Y("Gravity vector Y", "[m/s^2]", "m/s**2"),
|
||||
GRAV_VECTOR_Z("Gravity vector Z", "[m/s^2]", "m/s**2"),
|
||||
GRAV_MAG("Gravitational magnitude", "[m/s^2]", "m/s**2"),
|
||||
GRAV_POT("Gravitational potential", "[J/kg]", "J/kg"),
|
||||
ELEV("Elevation", "[m]", "m"),
|
||||
AREA("Area", "[km^2]", "km**2"),
|
||||
|
||||
// no longer needed! same as HEIGHT!
|
||||
// ELEV_NORM("Elevation relative to normal plane", "[m]", "m"),
|
||||
SLOPE("Slope", "[deg]", "deg"),
|
||||
SHADE("Shaded relief", null, null),
|
||||
TILT("Facet tilt", "[deg]", "deg"),
|
||||
TILT_DIRECTION("Facet tilt direction", "[deg]", "deg"),
|
||||
TILT_AVERAGE("Mean tilt", "[deg]", "deg"),
|
||||
TILT_VARIATION("Tilt variation", "[deg]", "deg"),
|
||||
TILT_AVERAGE_DIRECTION("Mean tilt direction", "[deg]", "deg"),
|
||||
TILT_DIRECTION_VARIATION("Tilt direction variation", "[deg]", "deg"),
|
||||
TILT_RELATIVE("Relative tilt", "[deg]", "deg"),
|
||||
TILT_RELATIVE_DIRECTION("Relative tilt direction", "[deg]", "deg"),
|
||||
TILT_UNCERTAINTY("Tilt Uncertainty", "[deg]", "deg"),
|
||||
FACETRAD("Facet radius", "[m]", "m"),
|
||||
HEIGHT("Height relative to normal plane", "[km]", "km"),
|
||||
RELATIVE_HEIGHT("Max relative height", "[km]", "km"),
|
||||
ALBEDO("Relative albedo", null, null),
|
||||
INTENSITY("Return Intensity", null, null),
|
||||
SIGMA("Sigma", null, null),
|
||||
QUALITY("Quality", null, null),
|
||||
SHADED("Shaded relief", null, null),
|
||||
NUMPOINTS("Number of OLA points used", null, null),
|
||||
HEIGHT_RESIDUAL("Mean of residual between points and fitted height", "[km]", "km"),
|
||||
HEIGHT_STDDEV("Std deviation of residual between points and fitted height", "[km]", "km"),
|
||||
HAZARD("Hazard", "1 indicates a hazard to the spacecraft", null);
|
||||
//@formatter:on
|
||||
|
||||
private String keyValue; // value associated with FITS keyword
|
||||
private String comment; // comment associated with FITS keyword
|
||||
private String units; // units associated with the plane. Usually in PDS4 nomenclature
|
||||
|
||||
PlaneInfo(String keyVal, String comment, String units) {
|
||||
this.keyValue = keyVal;
|
||||
this.comment = comment;
|
||||
this.units = units;
|
||||
}
|
||||
|
||||
public String value() {
|
||||
return keyValue;
|
||||
}
|
||||
|
||||
public String comment() {
|
||||
return comment;
|
||||
}
|
||||
|
||||
public String units() {
|
||||
return units;
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to parse the enum from the given Keyval string. Needs to match exactly (but case
|
||||
* insensitive)!
|
||||
*
|
||||
* @param keyVal
|
||||
* @return
|
||||
*/
|
||||
public static PlaneInfo keyVal2Plane(String keyVal) {
|
||||
for (PlaneInfo planeName : values()) {
|
||||
if ((planeName.value() != null) && (planeName.value().equalsIgnoreCase(keyVal))) {
|
||||
return planeName;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public static PlaneInfo planeFromString(String plane) {
|
||||
for (PlaneInfo planeName : values()) {
|
||||
if (planeName.toString().equals(plane)) {
|
||||
return planeName;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/*
|
||||
* Create enumeration set for the first 6 planes. These are the initial planes created from the
|
||||
* Osiris Rex netCDF file.
|
||||
*/
|
||||
public static final EnumSet<PlaneInfo> first6HTags = EnumSet.range(PlaneInfo.LAT, PlaneInfo.Z);
|
||||
|
||||
public static List<PlaneInfo> coreTiltPlanes() {
|
||||
|
||||
List<PlaneInfo> coreTilts = new ArrayList<PlaneInfo>();
|
||||
coreTilts.add(PlaneInfo.TILT_AVERAGE);
|
||||
coreTilts.add(PlaneInfo.TILT_VARIATION);
|
||||
coreTilts.add(PlaneInfo.TILT_AVERAGE_DIRECTION);
|
||||
coreTilts.add(PlaneInfo.TILT_DIRECTION_VARIATION);
|
||||
coreTilts.add(PlaneInfo.TILT_RELATIVE);
|
||||
coreTilts.add(PlaneInfo.TILT_RELATIVE_DIRECTION);
|
||||
coreTilts.add(PlaneInfo.RELATIVE_HEIGHT);
|
||||
|
||||
return coreTilts;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert List<PlaneInfo> to List<HeaderCard> where each HeaderCard in List follows the
|
||||
* convention: for each "thisPlane" in List<PlaneInfo> HeaderCard = new HeaderCard("PLANE" + cc,
|
||||
* thisPlane.value(), thisPlane.comment()) The order in List<HeaderCard> follows the order in
|
||||
* List<PlaneInfo>
|
||||
*
|
||||
* @param planeList
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public static List<HeaderCard> planesToHeaderCard(List<PlaneInfo> planeList)
|
||||
throws HeaderCardException {
|
||||
List<HeaderCard> planeHeaders = new ArrayList<HeaderCard>();
|
||||
String plane = "PLANE";
|
||||
int cc = 1;
|
||||
for (PlaneInfo thisPlane : planeList) {
|
||||
|
||||
planeHeaders.add(new HeaderCard(plane + cc, thisPlane.value(), thisPlane.comment()));
|
||||
cc++;
|
||||
}
|
||||
return planeHeaders;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
116
src/main/java/terrasaur/enums/SigmaFileType.java
Normal file
@@ -0,0 +1,116 @@
|
||||
package terrasaur.enums;
|
||||
|
||||
import com.google.common.base.Strings;
|
||||
|
||||
/**
|
||||
* Enum for defining the types of sigma files that can be loaded and utilized by the Pipeline. This
|
||||
* allows the pipeline to load and parse different formats of sigma files.
|
||||
*
|
||||
* @author espirrc1
|
||||
*
|
||||
*/
|
||||
public enum SigmaFileType {
|
||||
|
||||
SPCSIGMA {
|
||||
|
||||
@Override
|
||||
public String commentSymbol() {
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String stringArg() {
|
||||
return "spc";
|
||||
}
|
||||
|
||||
@Override
|
||||
public int sigmaCol() {
|
||||
return 3;
|
||||
}
|
||||
},
|
||||
|
||||
ERRORFROMSQLSIGMA {
|
||||
|
||||
@Override
|
||||
public String commentSymbol() {
|
||||
return "#";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String stringArg() {
|
||||
return "errorfromsql";
|
||||
}
|
||||
|
||||
// should be the Standard Deviation column in ErrorFromSQL file.
|
||||
public int sigmaCol() {
|
||||
return 8;
|
||||
}
|
||||
},
|
||||
|
||||
NOMATCH {
|
||||
|
||||
@Override
|
||||
public String commentSymbol() {
|
||||
return "NAN";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String stringArg() {
|
||||
return "NAN";
|
||||
}
|
||||
|
||||
public int sigmaCol() {
|
||||
return -1;
|
||||
}
|
||||
};
|
||||
|
||||
// returns the symbol that is used to denote comment lines
|
||||
public abstract String commentSymbol();
|
||||
|
||||
// input argument to match
|
||||
public abstract String stringArg();
|
||||
|
||||
// column number where sigma values are stored
|
||||
public abstract int sigmaCol();
|
||||
|
||||
public static SigmaFileType getFileType(String sigmaFileType) {
|
||||
|
||||
if (!Strings.isNullOrEmpty(sigmaFileType)) {
|
||||
for (SigmaFileType thisType : values()) {
|
||||
|
||||
if (sigmaFileType.toLowerCase().equals(thisType.stringArg())) {
|
||||
|
||||
return thisType;
|
||||
}
|
||||
}
|
||||
}
|
||||
return NOMATCH;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the SigmaFileType associated with the SrcProductType.
|
||||
*
|
||||
* @param srcType
|
||||
* @return
|
||||
*/
|
||||
public static SigmaFileType sigmaTypeFromSrcType(SrcProductType srcType) {
|
||||
SigmaFileType sigmaType = SigmaFileType.NOMATCH;
|
||||
switch (srcType) {
|
||||
|
||||
case SPC:
|
||||
sigmaType = SigmaFileType.SPCSIGMA;
|
||||
break;
|
||||
|
||||
case OLA:
|
||||
sigmaType = SigmaFileType.ERRORFROMSQLSIGMA;
|
||||
break;
|
||||
|
||||
default:
|
||||
sigmaType = SigmaFileType.NOMATCH;
|
||||
break;
|
||||
|
||||
}
|
||||
|
||||
return sigmaType;
|
||||
}
|
||||
}
|
||||
93
src/main/java/terrasaur/enums/SrcProductType.java
Normal file
@@ -0,0 +1,93 @@
|
||||
package terrasaur.enums;
|
||||
|
||||
/**
|
||||
* Enumeration storing the source product type: the product type of the source data used in creation
|
||||
* of an ALTWG product.
|
||||
*
|
||||
* @author espirrc1
|
||||
*
|
||||
*/
|
||||
public enum SrcProductType {
|
||||
|
||||
SFM {
|
||||
|
||||
@Override
|
||||
public String getAltwgFrag() {
|
||||
return "sfm";
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
SPC {
|
||||
@Override
|
||||
public String getAltwgFrag() {
|
||||
return "spc";
|
||||
}
|
||||
},
|
||||
|
||||
// OLA Altimetry
|
||||
OLA {
|
||||
@Override
|
||||
public String getAltwgFrag() {
|
||||
return "alt";
|
||||
}
|
||||
},
|
||||
|
||||
// SPC-OLA
|
||||
SPO {
|
||||
@Override
|
||||
public String getAltwgFrag() {
|
||||
return "spo";
|
||||
}
|
||||
},
|
||||
|
||||
TRUTH {
|
||||
@Override
|
||||
public String getAltwgFrag() {
|
||||
return "tru";
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
UNKNOWN {
|
||||
@Override
|
||||
public String getAltwgFrag() {
|
||||
return "unk";
|
||||
}
|
||||
};
|
||||
|
||||
public static SrcProductType getType(String value) {
|
||||
value = value.toUpperCase();
|
||||
for (SrcProductType srcType : values()) {
|
||||
if (srcType.toString().equals(value)) {
|
||||
return srcType;
|
||||
}
|
||||
}
|
||||
return UNKNOWN;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the string fragment associated with the source product type. Follows the ALTWG naming
|
||||
* convention.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public abstract String getAltwgFrag();
|
||||
|
||||
/**
|
||||
* Return the SrcProductType whose getAltwgFrag() string matches the stringFrag. Return UNKNOWN if
|
||||
* a match is not found.
|
||||
*
|
||||
* @param stringFrag
|
||||
* @return
|
||||
*/
|
||||
public static SrcProductType fromAltwgFrag(String stringFrag) {
|
||||
|
||||
for (SrcProductType prodType : SrcProductType.values()) {
|
||||
if (prodType.getAltwgFrag().equals(stringFrag))
|
||||
return prodType;
|
||||
}
|
||||
return UNKNOWN;
|
||||
}
|
||||
|
||||
}
|
||||
302
src/main/java/terrasaur/fits/AltPipelnEnum.java
Normal file
@@ -0,0 +1,302 @@
|
||||
package terrasaur.fits;
|
||||
|
||||
import java.util.EnumSet;
|
||||
import java.util.Map;
|
||||
|
||||
public enum AltPipelnEnum {
|
||||
|
||||
// settings enums. Used to determine type of product to create.
|
||||
ANCIGLOBAL,
|
||||
|
||||
// optional: tells code to skip generation of products for highest res shape model
|
||||
SKIPORIGINALSHP,
|
||||
|
||||
// controls whether or not certain global products get created.
|
||||
// REQUIRED TO BE IN CONFIGFILE:
|
||||
DOGLOBALSHAPE, OBJTOFITS, ADDOBJCOMMENTS, GLOBALRES0, DUMBERVALUES, DOGRAVGLOBAL, DOGLOBALGRAV_ANCI, DOGLOBALTILT_ANCI, DOGLOBALMISC_ANCI, DOGLOBALTILT,
|
||||
|
||||
// controls number of slots per job to use when running global distributed gravity
|
||||
// in grid engine mode. Does not apply if running in local parallel mode
|
||||
GLOBALGRAVSLOTS,
|
||||
|
||||
// controls number of slots per job to use when running local distributed gravity
|
||||
// in grid engine mode. Does not apply if running in local parallel mode
|
||||
LOCALGRAVSLOTS,
|
||||
|
||||
// full path to SPICE metakernel to use when generating DSK products
|
||||
DSKERNEL,
|
||||
|
||||
// enable/disable the creation of global and local DSK files
|
||||
DOGLOBALDSK, DOLOCALDSK,
|
||||
|
||||
// global tilt semi-major axis in km. Now a required variable
|
||||
GTILTMAJAXIS,
|
||||
|
||||
// settings for every local product generated by the pipeline
|
||||
USEOLDMAPLETS, DODTMFITSOBJ, DOGRAVLOCAL, GENLOCALGRAV, DOLOCALTILT, DOLOCAL_GRAVANCI, DOLOCAL_TILTANCI, DOLOCAL_MISCANCI,
|
||||
|
||||
// controls whether to use average gravitational potential for global and local gravity
|
||||
// calculations. =0 use minimum reference potential, =1 use average reference potential
|
||||
AVGGRAVPOTGLOBAL, AVGGRAVPOTLOCAL,
|
||||
|
||||
// controls RunBigMap.
|
||||
// integrate slope to height required. Defaults to "n" if this enum does not exist in config file.
|
||||
// otherwise will evaluate value. 0="n", 1="y"
|
||||
INTSLOPE,
|
||||
|
||||
// use grotesque model in RunBigMap. Defaults to not using it if this enum does not exist in
|
||||
// config file
|
||||
// otherwise will evaluate value. 0="do not use grotesque model", 1="do use grotesque model"
|
||||
USEGROTESQUE,
|
||||
|
||||
// controls the source data, product destination, naming convention,
|
||||
// and process flow.
|
||||
DATASOURCE, REPORTTYPE, INDATADIR, OUTPUTDIR, PDSNAMING, RENAMEGLOBALOBJ, USEBIGMAPS, DOREPORT,
|
||||
|
||||
// shape model density and rotation rate are now required variables. This way we can easily spot
|
||||
// what we are using
|
||||
// as defaults.
|
||||
SMDENSITY, SMRRATE,
|
||||
|
||||
// stores type of naming convention. Ex. AltProduct, AltNFTMLN, DartProduct.
|
||||
NAMINGCONVENTION,
|
||||
|
||||
// set values that cannot be derived from data.
|
||||
REPORTBASENAME, VERSION,
|
||||
|
||||
// everything after this is not a required keyword
|
||||
|
||||
// (Optional) controls whether there is an external body that needs to be accounted for when
|
||||
// running gravity code
|
||||
// the values should be a csv string with no spaces. THe values are: mass(kg),x,y,z where x,y,z
|
||||
// are the body
|
||||
// fixed coordinates in km.
|
||||
// e.g. 521951167,1.19,0,0
|
||||
EXTERNALBODY,
|
||||
|
||||
// (Optional). If the keyword exists and value is 1 then no GLOBAL DTMs are assumed to be created.
|
||||
// for example, in the DART Derived Product set we are not creating g_*dtm*.fits files
|
||||
NOGLOBALDTM,
|
||||
|
||||
// (Optional). If the keyword exists then evaluate the shapes to process by parsing the
|
||||
// comma-separated values. Ex.values are 0,1,2 then pipeline will assume it has to
|
||||
// process shape0, shape1, shape2. The pipeline will also disregard the values in
|
||||
// DUMBERVALUES that otherwise determine how many shape files to process.
|
||||
SHAPE2PROC,
|
||||
|
||||
// (optional) controls whether or not STL files get generated. If these do not exist in the
|
||||
// pipeConfig file then they will NOT
|
||||
// get generated!
|
||||
GLOBALSTL, LOCALSTL,
|
||||
|
||||
// keywords for local products
|
||||
//
|
||||
// MAPSmallerSZ: resize local DTMs to a different half size. For pipeline we may want to generate
|
||||
// DTMs at halfsize + tilt radius then resize the DTMs to halfsize in order to have tilts
|
||||
// evaluated with the full range of points at the edges.
|
||||
//
|
||||
// MAPFILE: contains pointer to map centers file (optional). Used by TileShapeModelWithBigmaps.
|
||||
// defaults to auto-generated tiles if this is not specified.
|
||||
// allow for pointers to different files for 30cm and 10cm map products.
|
||||
DOLOCALMAP, MAPDIR, MAPSCALE, MAPHALFSZ, REPORT, MAPSmallerSZ, MAPFILE, ISTAGSITE, LTILTMAJAXIS, LTILTMINAXIS, LTILTPA, MAXSCALE,
|
||||
|
||||
// settings for local tag sites. Note TAGSFILE is not optional.
|
||||
// it contains the tagsite name and lat,lon of tag site tile center
|
||||
TAGDIR, TAGSCALE, TAGHALFSZ, TAGSFILE, REPORTTAG,
|
||||
|
||||
// pointer to OLA database. only required if DATASOURCE is OLA
|
||||
OLADBPATH,
|
||||
|
||||
// force sigma files to all be NaN
|
||||
FORCESIGMA_NAN,
|
||||
|
||||
// global sigma scale factor
|
||||
SIGMA_SCALEFACTOR,
|
||||
|
||||
// local sigma scale factor
|
||||
LOCAL_SIGMA_SCALEFACTOR,
|
||||
|
||||
// SIGMA file type. No longer tied to DataSource!
|
||||
SIGMAFILE_TYPE,
|
||||
|
||||
// force the Report page to be HTML. Default is created at PHP
|
||||
REPORTASHTML,
|
||||
|
||||
/*
|
||||
* The following are used to change default values used by the pipeline these are the shape model
|
||||
* density, rotation rate, gravitational algorithm, gravitational constant, global average
|
||||
* reference potential, local average reference potential. Added values defining the tilt ellipse
|
||||
* to use when evaluating tilts. Note the different enums for global versus local tilt ellipse
|
||||
* parameters. The pipeline will use default values for these enums if they are not defined in the
|
||||
* pipeline configuration file.
|
||||
*/
|
||||
GALGORITHM, GRAVCONST, GTILTMINAXIS, GTILTPA, MASSUNCERT, VSEARCHRAD_PCTGSD, FSEARCHRAD_PCTGSD, PXPERDEG,
|
||||
|
||||
// The following are options to subvert normal pipeline operations or to configure pipeline for
|
||||
// other missions
|
||||
|
||||
|
||||
// global objs are supplied at all resolutions as the starting point.
|
||||
// This means we can skip ICQ2PLT, ICQDUMBER, and PLT2OBJ calls
|
||||
OBJASINPUT,
|
||||
|
||||
// gzip the obj files to save space
|
||||
DOGZIP,
|
||||
|
||||
// specify the queue to use in the GRID ENGINE
|
||||
GRIDQUEUE,
|
||||
|
||||
// default mode for local product creation is to parallelize DistributedGravity
|
||||
// for each tile. Then processing for each job is done in local mode.
|
||||
// set this flag to 1 to submit DistributedGravity for each tile sequentially,
|
||||
// and have each job spawn to the grid engine
|
||||
DISTGRAVITY_USEGRID,
|
||||
|
||||
// override grid engine mode and use local parallel mode with the specified number of cores
|
||||
LOCALPARALLEL,
|
||||
|
||||
// when creating local gravity products skip creation of gravity files that already exist
|
||||
USEOLDGRAV,
|
||||
|
||||
// override ancillary fits table default setting (binary). Set to ASCII instead
|
||||
ANCITXTTABLE,
|
||||
|
||||
// contains pointer to fits header config file (optional)
|
||||
FITSCONFIGFILE,
|
||||
|
||||
// contains pointer to OBJ comments header file (optional). Will only
|
||||
// add commentes if ADDOBJCOMMENTS flag is set.
|
||||
OBJCOMHEADER,
|
||||
|
||||
// identifies whether there are poisson reconstruct data products to include in the webpage report
|
||||
LOCALPOISSON, GLOBALPOISSON;
|
||||
|
||||
/*
|
||||
* The following enumerations are required to exist in the altwg pipeline config file.
|
||||
*/
|
||||
public static final EnumSet<AltPipelnEnum> reqTags = EnumSet.range(DOGLOBALSHAPE, VERSION);
|
||||
|
||||
/*
|
||||
* The following enumerations do not have to be present in the config file. But, if they are not
|
||||
* then the pipeline should use the default values associated with the enums.
|
||||
*/
|
||||
public static final EnumSet<AltPipelnEnum> overrideTags = EnumSet.range(SMDENSITY, GTILTPA);
|
||||
|
||||
public static String mapToString(Map<AltPipelnEnum, String> pipeConfig) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (Map.Entry<AltPipelnEnum, String> entry : pipeConfig.entrySet()) {
|
||||
sb.append(String.format("%s:%s\n", entry.getKey().toString(), entry.getValue()));
|
||||
}
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
public static AltPipelnEnum fromString(String textString) {
|
||||
for (AltPipelnEnum anciType : AltPipelnEnum.values()) {
|
||||
if (anciType.toString().equals(textString))
|
||||
return anciType;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience method for evaluating configuration parameter where the value indicates whether or
|
||||
* not the parameter is true or false via an integer value. Assumes 0 or less = false, 1 or more =
|
||||
* true. If the key does not exist then return false.
|
||||
*
|
||||
* @param pipeConfig
|
||||
* @param key
|
||||
* @return
|
||||
*/
|
||||
public static boolean isTrue(Map<AltPipelnEnum, String> pipeConfig, AltPipelnEnum key) {
|
||||
boolean returnFlag = false;
|
||||
int parsedVal = 0;
|
||||
if (pipeConfig.containsKey(key)) {
|
||||
try {
|
||||
parsedVal = Integer.valueOf(pipeConfig.get(key));
|
||||
} catch (NumberFormatException e) {
|
||||
System.err.println("ERROR! Could not parse integer value for pipeConfig line:");
|
||||
System.err.println(key.toString() + "," + pipeConfig.get(key));
|
||||
System.err.println("Stopping with error!");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
if (parsedVal > 0) {
|
||||
returnFlag = true;
|
||||
}
|
||||
|
||||
}
|
||||
return returnFlag;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks to see whether key exists. If so then return value mapped to key. Otherwise return empty
|
||||
* string.
|
||||
*
|
||||
* @param pipeConfig
|
||||
* @param key
|
||||
* @return
|
||||
*/
|
||||
public static String checkAndGet(Map<AltPipelnEnum, String> pipeConfig, AltPipelnEnum key) {
|
||||
String value = "";
|
||||
if (pipeConfig.containsKey(key)) {
|
||||
value = pipeConfig.get(key);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/*
|
||||
* Some enums will have a default value, e.g. the ones in the overrideTags EnumSet. It is easier
|
||||
* to keep them as string values then convert them to other primitives as needed. Sometimes other
|
||||
* executables will be called w/ the default values, so it is better to keep them as strings to
|
||||
* avoid double conversion.
|
||||
*/
|
||||
public static String getDefault(AltPipelnEnum thisEnum) {
|
||||
|
||||
if (overrideTags.contains(thisEnum)) {
|
||||
|
||||
switch (thisEnum) {
|
||||
|
||||
// shape model density and rotation rate must now be explicitly defined in the configuration
|
||||
// file!
|
||||
// case SMDENSITY:
|
||||
// return "1.186";
|
||||
//
|
||||
// case SMRRATE:
|
||||
// return "0.00040626";
|
||||
|
||||
case GALGORITHM:
|
||||
return "werner";
|
||||
|
||||
case GRAVCONST:
|
||||
return "6.67408e-11";
|
||||
|
||||
case LTILTMAJAXIS:
|
||||
return "0.0125";
|
||||
|
||||
case GTILTMINAXIS:
|
||||
return "0.0125";
|
||||
|
||||
case GTILTPA:
|
||||
case LTILTPA:
|
||||
return "0.0";
|
||||
|
||||
case MASSUNCERT:
|
||||
return "0.01";
|
||||
|
||||
case VSEARCHRAD_PCTGSD:
|
||||
return "0.25";
|
||||
|
||||
case FSEARCHRAD_PCTGSD:
|
||||
return "0.5";
|
||||
|
||||
default:
|
||||
return "NA";
|
||||
|
||||
}
|
||||
}
|
||||
return "NA";
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
60
src/main/java/terrasaur/fits/AltwgAnciGlobal.java
Normal file
@@ -0,0 +1,60 @@
|
||||
package terrasaur.fits;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import nom.tam.fits.HeaderCard;
|
||||
import nom.tam.fits.HeaderCardException;
|
||||
import terrasaur.enums.FitsHeaderType;
|
||||
|
||||
public class AltwgAnciGlobal extends AnciTableFits implements AnciFitsHeader {
|
||||
|
||||
public AltwgAnciGlobal(FitsHdr fitsHeader) {
|
||||
super(fitsHeader, FitsHeaderType.ANCIGLOBALALTWG);
|
||||
}
|
||||
|
||||
// methods below override the concrete methods in AnciTableFits abstract class or
|
||||
// are specific to this class
|
||||
|
||||
/**
|
||||
* Create fits header as a list of HeaderCard. List contains the keywords in the order of
|
||||
* appearance in the ALTWG fits header. Overrides default implementation in AnciTableFits.
|
||||
*/
|
||||
@Override
|
||||
public List<HeaderCard> createFitsHeader() throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
|
||||
headers.addAll(getHeaderInfo("header information"));
|
||||
headers.addAll(getMissionInfo("mission information"));
|
||||
headers.addAll(getIDInfo("identification info"));
|
||||
headers.addAll(getMapDataSrc("shape data source"));
|
||||
headers.addAll(getProcInfo("processing information"));
|
||||
headers.addAll(getMapInfo("map specific information"));
|
||||
headers.addAll(getSpatialInfo("summary spatial information"));
|
||||
headers.addAll(getSpecificInfo("product specific"));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Contains OREX-SPOC specific keywords.
|
||||
*/
|
||||
@Override
|
||||
public List<HeaderCard> getIDInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SPOC_ID));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SDPAREA));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SDPDESC));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MPHASE));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
package terrasaur.fits;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import nom.tam.fits.HeaderCard;
|
||||
import nom.tam.fits.HeaderCardException;
|
||||
import terrasaur.enums.FitsHeaderType;
|
||||
|
||||
public class AltwgAnciGlobalFacetRelation extends AnciTableFits implements AnciFitsHeader {
|
||||
|
||||
public AltwgAnciGlobalFacetRelation(FitsHdr fitsHeader) {
|
||||
|
||||
super(fitsHeader, FitsHeaderType.ANCIG_FACETRELATION_ALTWG);
|
||||
}
|
||||
|
||||
// methods below override the concrete methods in AnciTableFits abstract class or
|
||||
// are specific to this class
|
||||
|
||||
/**
|
||||
* Create fits header as a list of HeaderCard. List contains the keywords in the order of
|
||||
* appearance in the ALTWG fits header. Overrides default implementation in AnciTableFits.
|
||||
*/
|
||||
@Override
|
||||
public List<HeaderCard> createFitsHeader() throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
|
||||
headers.addAll(getHeaderInfo("header information"));
|
||||
headers.addAll(getMissionInfo("mission information"));
|
||||
headers.addAll(getIDInfo("identification info"));
|
||||
headers.addAll(getMapDataSrc("shape data source"));
|
||||
headers.addAll(getProcInfo("processing information"));
|
||||
headers.addAll(getMapInfo("map specific information"));
|
||||
headers.addAll(getSpatialInfo("summary spatial information"));
|
||||
headers.addAll(getSpecificInfo("product specific"));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the HeaderCards associated with a specific product. By default we use the ALTWG specific
|
||||
* product keywords
|
||||
*
|
||||
* @param comment
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
@Override
|
||||
public List<HeaderCard> getSpecificInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.OBJINDX));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.GSDINDX));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.GSDINDXI));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SIGMA));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SIG_DEF));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.DQUAL_1));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.DQUAL_2));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DSIG_DEF));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DENSITY));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.ROT_RATE));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.REF_POT));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.TILT_MAJ));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.TILT_MIN));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.TILT_PA));
|
||||
|
||||
return headers;
|
||||
}
|
||||
}
|
||||
60
src/main/java/terrasaur/fits/AltwgAnciLocal.java
Normal file
@@ -0,0 +1,60 @@
|
||||
package terrasaur.fits;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import nom.tam.fits.HeaderCard;
|
||||
import nom.tam.fits.HeaderCardException;
|
||||
import terrasaur.enums.FitsHeaderType;
|
||||
|
||||
public class AltwgAnciLocal extends AnciTableFits implements AnciFitsHeader {
|
||||
|
||||
public AltwgAnciLocal(FitsHdr fitsHeader) {
|
||||
super(fitsHeader, FitsHeaderType.ANCILOCALALTWG);
|
||||
}
|
||||
|
||||
// methods below override the concrete methods in AnciTableFits abstract class or
|
||||
// are specific to this class
|
||||
|
||||
/**
|
||||
* Create fits header as a list of HeaderCard. List contains the keywords in the order of
|
||||
* appearance in the ALTWG fits header. Overrides default implementation in AnciTableFits.
|
||||
*/
|
||||
@Override
|
||||
public List<HeaderCard> createFitsHeader() throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
|
||||
headers.addAll(getHeaderInfo("header information"));
|
||||
headers.addAll(getMissionInfo("mission information"));
|
||||
headers.addAll(getIDInfo("identification info"));
|
||||
headers.addAll(getMapDataSrc("shape data source"));
|
||||
headers.addAll(getProcInfo("processing information"));
|
||||
headers.addAll(getMapInfo("map specific information"));
|
||||
headers.addAll(getSpatialInfo("summary spatial information"));
|
||||
headers.addAll(getSpecificInfo("product specific"));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Contains OREX-SPOC specific keywords.
|
||||
*/
|
||||
@Override
|
||||
public List<HeaderCard> getIDInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SPOC_ID));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SDPAREA));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SDPDESC));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MPHASE));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
87
src/main/java/terrasaur/fits/AltwgGlobalDTM.java
Normal file
@@ -0,0 +1,87 @@
|
||||
package terrasaur.fits;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import nom.tam.fits.HeaderCard;
|
||||
import nom.tam.fits.HeaderCardException;
|
||||
import terrasaur.enums.FitsHeaderType;
|
||||
import terrasaur.utils.DTMHeader;
|
||||
|
||||
/**
|
||||
* Contains methods for building fits header corresponding to ALTWG Global DTM. Methods that are
|
||||
* specific to the ALTWG Global DTM fits header are contained here. Default methods contained in
|
||||
* DTMFits class.
|
||||
*
|
||||
* @author espirrc1
|
||||
*
|
||||
*/
|
||||
public class AltwgGlobalDTM extends DTMFits implements DTMHeader {
|
||||
|
||||
public AltwgGlobalDTM(FitsHdr fitsHeader) {
|
||||
super(fitsHeader, FitsHeaderType.DTMGLOBALALTWG);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fits header block containing observation or ID related information. Includes keywords specific
|
||||
* to OREX-SPOC
|
||||
*
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
@Override
|
||||
public List<HeaderCard> getIDInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SPOC_ID));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SDPAREA));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SDPDESC));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MPHASE));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* return Fits header block that contains information about the fits header itself. Custom to
|
||||
* OREX-SPOC
|
||||
*
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
@Override
|
||||
public List<HeaderCard> getHeaderInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.HDRVERS));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Added GSDI - specific to OREX-SPOC.
|
||||
*/
|
||||
@Override
|
||||
public List<HeaderCard> getMapInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MAP_NAME));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MAP_VER));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MAP_TYPE));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.GSD));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.GSDI));
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
}
|
||||
132
src/main/java/terrasaur/fits/AltwgLocalDTM.java
Normal file
@@ -0,0 +1,132 @@
|
||||
package terrasaur.fits;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import nom.tam.fits.HeaderCard;
|
||||
import nom.tam.fits.HeaderCardException;
|
||||
import terrasaur.enums.FitsHeaderType;
|
||||
import terrasaur.utils.DTMHeader;
|
||||
|
||||
/**
|
||||
* Contains methods for building fits header corresponding to ALTWG local DTM. Methods that are
|
||||
* specific to the ALTWG Local DTM fits header are contained here. Default methods are contained in
|
||||
* DTMFits class.
|
||||
*
|
||||
* @author espirrc1
|
||||
*
|
||||
*/
|
||||
public class AltwgLocalDTM extends DTMFits implements DTMHeader {
|
||||
|
||||
public AltwgLocalDTM(FitsHdr fitsHeader) {
|
||||
super(fitsHeader, FitsHeaderType.DTMLOCALALTWG);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fits header block containing observation or ID related information. Includes keywords specific
|
||||
* to OREX-SPOC
|
||||
*
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
@Override
|
||||
public List<HeaderCard> getIDInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SPOC_ID));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SDPAREA));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SDPDESC));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MPHASE));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<HeaderCard> getSpecificInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.SIGMA));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SIG_DEF));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DQUAL_1));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DQUAL_2));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.PXPERDEG));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.DENSITY));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.ROT_RATE));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.REF_POT));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.TILT_MAJ));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.TILT_MIN));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.TILT_PA));
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Include corner points, center vector and ux,uy,uz describing local plane
|
||||
*/
|
||||
@Override
|
||||
public List<HeaderCard> getSpatialInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.CLON));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.CLAT));
|
||||
|
||||
headers.addAll(getCornerCards());
|
||||
headers.addAll(getCenterVec());
|
||||
headers.addAll(getUX());
|
||||
headers.addAll(getUY());
|
||||
headers.addAll(getUZ());
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
/**
|
||||
* return Fits header block that contains information about the fits header itself. Custom to
|
||||
* OREX-SPOC
|
||||
*
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
@Override
|
||||
public List<HeaderCard> getHeaderInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.HDRVERS));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Added GSDI - specific to OREX-SPOC.
|
||||
*/
|
||||
@Override
|
||||
public List<HeaderCard> getMapInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MAP_NAME));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MAP_VER));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MAP_TYPE));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.GSD));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.GSDI));
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
}
|
||||
12
src/main/java/terrasaur/fits/AnciFitsHeader.java
Normal file
@@ -0,0 +1,12 @@
|
||||
package terrasaur.fits;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import nom.tam.fits.HeaderCard;
|
||||
import nom.tam.fits.HeaderCardException;
|
||||
|
||||
public interface AnciFitsHeader {
|
||||
|
||||
public List<HeaderCard> createFitsHeader() throws HeaderCardException;
|
||||
|
||||
}
|
||||
291
src/main/java/terrasaur/fits/AnciTableFits.java
Normal file
@@ -0,0 +1,291 @@
|
||||
package terrasaur.fits;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import nom.tam.fits.HeaderCard;
|
||||
import nom.tam.fits.HeaderCardException;
|
||||
import terrasaur.enums.FitsHeaderType;
|
||||
|
||||
/**
|
||||
* Abstract generic class with concrete methods and attributes for creating a FITS table with
|
||||
* generalized fits header. Specific implementations can be written to create custom fits headers as
|
||||
* needed.
|
||||
*
|
||||
* @author espirrc1
|
||||
*
|
||||
*/
|
||||
public abstract class AnciTableFits {
|
||||
|
||||
public final String COMMENT = "COMMENT";
|
||||
FitsHdr fitsHdr;
|
||||
public final FitsHeaderType fitsHeaderType;
|
||||
|
||||
public AnciTableFits(FitsHdr fitsHdr, FitsHeaderType fitsHeaderType) {
|
||||
this.fitsHdr = fitsHdr;
|
||||
this.fitsHeaderType = fitsHeaderType;
|
||||
}
|
||||
|
||||
public List<HeaderCard> createFitsHeader() throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
|
||||
headers.addAll(getHeaderInfo("header information"));
|
||||
headers.addAll(getMissionInfo("mission information"));
|
||||
headers.addAll(getIDInfo("identification info"));
|
||||
headers.addAll(getMapDataSrc("shape data source"));
|
||||
headers.addAll(getProcInfo("processing information"));
|
||||
headers.addAll(getMapInfo("map specific information"));
|
||||
headers.addAll(getSpatialInfo("summary spatial information"));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* return Fits header block that contains information about the fits header itself.
|
||||
*
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public List<HeaderCard> getHeaderInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.HDRVERS));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Fits header block containing information about the mission.
|
||||
*
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public List<HeaderCard> getMissionInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MISSION));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.HOSTNAME));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.TARGET));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.ORIGIN));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Fits header block containing observation or ID related information.
|
||||
*
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public List<HeaderCard> getIDInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MPHASE));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Fits header block containing information about the source data used to create the map.
|
||||
*
|
||||
* @param comment
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public List<HeaderCard> getMapDataSrc(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DATASRC));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DATASRCF));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DATASRCV));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DATASRCS));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DATASRCD));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.CREATOR));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.OBJ_FILE));
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
public List<HeaderCard> getMapInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MAP_NAME));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MAP_VER));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MAP_TYPE));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.GSD));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.GSDI));
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fits header block containing information about the software processing done to generate the
|
||||
* product.
|
||||
*
|
||||
* @param comment
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public List<HeaderCard> getProcInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.PRODNAME));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DATEPRD));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SOFTWARE));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SOFT_VER));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
public List<HeaderCard> getSpatialInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.CLON));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.CLAT));
|
||||
|
||||
headers.addAll(getCornerCards());
|
||||
|
||||
// add CNTR_V_X,Y,Z
|
||||
headers.addAll(getCenterVec());
|
||||
|
||||
// add UX_X,Y,Z
|
||||
headers.addAll(getUX());
|
||||
|
||||
// add UY_X,Y,Z
|
||||
headers.addAll(getUY());
|
||||
|
||||
// add UZ_X,Y,Z
|
||||
headers.addAll(getUZ());
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the HeaderCards associated with a specific product. By default we use the ALTWG specific
|
||||
* product keywords
|
||||
*
|
||||
* @param comment
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public List<HeaderCard> getSpecificInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SIGMA));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SIG_DEF));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.DQUAL_1));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.DQUAL_2));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DSIG_DEF));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DENSITY));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.ROT_RATE));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.REF_POT));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.TILT_MAJ));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.TILT_MIN));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.TILT_PA));
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return headercards associated with the upper/lower left/right corners of the image.
|
||||
*
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public List<HeaderCard> getCornerCards() throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
String fmtS = "%18.13f";
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.LLCLNG, fmtS));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.LLCLAT, fmtS));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.URCLNG, fmtS));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.URCLAT, fmtS));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.LRCLNG, fmtS));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.LRCLAT, fmtS));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.ULCLNG, fmtS));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.ULCLAT, fmtS));
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return headercards for vector to center of image.
|
||||
*
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public List<HeaderCard> getCenterVec() throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.CNTR_V_X));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.CNTR_V_Y));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.CNTR_V_Z));
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
public List<HeaderCard> getUX() throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.UX_X));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.UX_Y));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.UX_Z));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
public List<HeaderCard> getUY() throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.UY_X));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.UY_Y));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.UY_Z));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
public List<HeaderCard> getUZ() throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.UZ_X));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.UZ_Y));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.UZ_Z));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
351
src/main/java/terrasaur/fits/DTMFits.java
Normal file
@@ -0,0 +1,351 @@
|
||||
package terrasaur.fits;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import nom.tam.fits.HeaderCard;
|
||||
import nom.tam.fits.HeaderCardException;
|
||||
import terrasaur.enums.FitsHeaderType;
|
||||
|
||||
/**
|
||||
* Abstract generic class with concrete methods and attributes for creating a FITS DTM cube with a
|
||||
* generalized fits header. Specific implementations can be written to create custom fits headers as
|
||||
* needed.
|
||||
*
|
||||
* @author espirrc1
|
||||
*
|
||||
*/
|
||||
public abstract class DTMFits {
|
||||
|
||||
public final String COMMENT = "COMMENT";
|
||||
final FitsHdr fitsHdr;
|
||||
private FitsData fitsData;
|
||||
private boolean dataContained = false;
|
||||
public final FitsHeaderType fitsHeaderType;
|
||||
|
||||
public DTMFits(FitsHdr fitsHdr, FitsHeaderType fitsHeaderType) {
|
||||
this.fitsHdr = fitsHdr;
|
||||
this.fitsHeaderType = fitsHeaderType;
|
||||
}
|
||||
|
||||
public void setData(FitsData fitsData) {
|
||||
this.fitsData = fitsData;
|
||||
dataContained = true;
|
||||
}
|
||||
|
||||
public List<HeaderCard> createFitsHeader(List<HeaderCard> planeList) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
|
||||
headers.addAll(getHeaderInfo("header information"));
|
||||
headers.addAll(getMissionInfo("mission information"));
|
||||
headers.addAll(getIDInfo("identification info"));
|
||||
headers.addAll(getMapDataSrc("data source"));
|
||||
headers.addAll(getProcInfo("processing information"));
|
||||
headers.addAll(getMapInfo("map specific information"));
|
||||
headers.addAll(getSpatialInfo("summary spatial information"));
|
||||
headers.addAll(getPlaneInfo("plane information", planeList));
|
||||
headers.addAll(getSpecificInfo("product specific"));
|
||||
|
||||
// end keyword
|
||||
headers.add(getEnd());
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
/**
|
||||
* return Fits header block that contains information about the fits header itself. No string
|
||||
* passed, so no comment in header.
|
||||
*
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public List<HeaderCard> getHeaderInfo() throws HeaderCardException {
|
||||
return getHeaderInfo("");
|
||||
}
|
||||
|
||||
/**
|
||||
* return Fits header block that contains information about the fits header itself. This is a
|
||||
* custom section and so is left empty here. It can be defined in the concrete classes that extend
|
||||
* this class.
|
||||
*
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public List<HeaderCard> getHeaderInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Fits header block containing information about the mission.
|
||||
*
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public List<HeaderCard> getMissionInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MISSION));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.HOSTNAME));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.TARGET));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.ORIGIN));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Fits header block containing observation or ID related information.
|
||||
*
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public List<HeaderCard> getIDInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MPHASE));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Fits header block containing information about the source data used to create the map.
|
||||
*
|
||||
* @param comment
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public List<HeaderCard> getMapDataSrc(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DATASRC));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DATASRCF));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DATASRCV));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DATASRCS));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DATASRCD));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.OBJ_FILE));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
public List<HeaderCard> getMapInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MAP_NAME));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MAP_VER));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MAP_TYPE));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.GSD));
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fits header block containing information about the software processing done to generate the
|
||||
* product.
|
||||
*
|
||||
* @param comment
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public List<HeaderCard> getProcInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.PRODNAME));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DATEPRD));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SOFTWARE));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SOFT_VER));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates header block containing spatial information for the DTM, e.g. corner locations, vector
|
||||
* to center, Ux, Uy, Uz.
|
||||
*
|
||||
* @param comment
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public List<HeaderCard> getSpatialInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.CLON));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.CLAT));
|
||||
headers.addAll(getCornerCards());
|
||||
|
||||
// remove these keywords. They are specific to local and MLNs
|
||||
// headers.addAll(getCenterVec());
|
||||
// headers.addAll(getUX());
|
||||
// headers.addAll(getUY());
|
||||
// headers.addAll(getUZ());
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the HeaderCards describing each DTM plane. Used to build the portion of the fits header
|
||||
* that contains information about the planes in the DTM cube. Checks to see that all data planes
|
||||
* are described by comparing size of planeList against length of fits data.
|
||||
*
|
||||
* @param comment
|
||||
* @param planeList
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public List<HeaderCard> getPlaneInfo(String comment, List<HeaderCard> planeList)
|
||||
throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
headers.addAll(planeList);
|
||||
|
||||
if (!dataContained) {
|
||||
String errMesg = "ERROR! Cannot return keywords describing the DTM cube without "
|
||||
+ "having the actual data!";
|
||||
throw new RuntimeException(errMesg);
|
||||
}
|
||||
|
||||
// check if planeList describes all the planes in data, throw runtime exception if not.
|
||||
if (planeList.size() != fitsData.getData().length) {
|
||||
System.out.println("Error: plane List has " + planeList.size() + " planes but datacube has "
|
||||
+ fitsData.getData().length + " planes");
|
||||
for (HeaderCard thisPlane : planeList) {
|
||||
System.out.println(thisPlane.getKey() + ":" + thisPlane.getValue());
|
||||
}
|
||||
String errMesg = "Error: plane List has " + planeList.size() + " planes but datacube "
|
||||
+ " has " + fitsData.getData().length + " planes";
|
||||
throw new RuntimeException(errMesg);
|
||||
}
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the HeaderCards associated with a specific product.
|
||||
*
|
||||
* @param comment
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public List<HeaderCard> getSpecificInfo(String comment) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
if (comment.length() > 0) {
|
||||
headers.add(new HeaderCard(COMMENT, comment, false));
|
||||
}
|
||||
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.SIGMA));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SIG_DEF));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.PXPERDEG));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.DENSITY));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.ROT_RATE));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.REF_POT));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.TILT_MAJ));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.TILT_MIN));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.TILT_PA));
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return headercards associated with the upper/lower left/right corners of the image.
|
||||
*
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public List<HeaderCard> getCornerCards() throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
String fmtS = "%18.13f";
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.LLCLNG, fmtS));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.LLCLAT, fmtS));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.URCLNG, fmtS));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.URCLAT, fmtS));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.LRCLNG, fmtS));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.LRCLAT, fmtS));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.ULCLNG, fmtS));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.ULCLAT, fmtS));
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return headercards for vector to center of image.
|
||||
*
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public List<HeaderCard> getCenterVec() throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.CNTR_V_X));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.CNTR_V_Y));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.CNTR_V_Z));
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
public List<HeaderCard> getUX() throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.UX_X));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.UX_Y));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.UX_Z));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
public List<HeaderCard> getUY() throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.UY_X));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.UY_Y));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.UY_Z));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
public List<HeaderCard> getUZ() throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.UZ_X));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.UZ_Y));
|
||||
headers.add(fitsHdr.getHeaderCardD(HeaderTag.UZ_Z));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
public HeaderCard getEnd() throws HeaderCardException {
|
||||
return new HeaderCard(HeaderTag.END.toString(), HeaderTag.END.value(), HeaderTag.END.comment());
|
||||
}
|
||||
|
||||
}
|
||||
190
src/main/java/terrasaur/fits/FitsData.java
Normal file
@@ -0,0 +1,190 @@
|
||||
package terrasaur.fits;
|
||||
|
||||
import terrasaur.enums.AltwgDataType;
|
||||
import terrasaur.enums.SrcProductType;
|
||||
|
||||
public class FitsData {
|
||||
|
||||
private final double[][][] data;
|
||||
private final double[] V;
|
||||
private final double[] ux;
|
||||
private final double[] uy;
|
||||
private final double[] uz;
|
||||
private final double scale;
|
||||
private final double gsd;
|
||||
private final boolean hasV;
|
||||
private final boolean hasUnitv;
|
||||
private final boolean hasGsd;
|
||||
private final boolean isGlobal;
|
||||
private final boolean hasAltType;
|
||||
private final AltwgDataType altProd;
|
||||
private final String dataSource;
|
||||
|
||||
private FitsData(FitsDataBuilder b) {
|
||||
this.data = b.data;
|
||||
this.V = b.V;
|
||||
this.ux = b.ux;
|
||||
this.uy = b.uy;
|
||||
this.uz = b.uz;
|
||||
this.scale = b.scale;
|
||||
this.gsd = b.gsd;
|
||||
this.hasV = b.hasV;
|
||||
this.hasUnitv = b.hasUnitv;
|
||||
this.hasGsd = b.hasGsd;
|
||||
this.hasAltType = b.hasAltType;
|
||||
this.isGlobal = b.isGlobal;
|
||||
this.altProd = b.altProd;
|
||||
this.dataSource = b.dataSource;
|
||||
}
|
||||
|
||||
public AltwgDataType getAltProdType() {
|
||||
return this.altProd;
|
||||
}
|
||||
|
||||
public String getSrcProdType() {
|
||||
return this.dataSource;
|
||||
}
|
||||
|
||||
public double[][][] getData() {
|
||||
return this.data;
|
||||
}
|
||||
|
||||
public boolean hasV() {
|
||||
return this.hasV;
|
||||
}
|
||||
|
||||
public double[] getV() {
|
||||
return this.V;
|
||||
}
|
||||
|
||||
public boolean hasUnitv() {
|
||||
return this.hasUnitv;
|
||||
}
|
||||
|
||||
public double[] getUnit(UnitDir udir) {
|
||||
switch (udir) {
|
||||
case UX:
|
||||
return this.ux;
|
||||
|
||||
case UY:
|
||||
return this.uy;
|
||||
|
||||
case UZ:
|
||||
return this.uz;
|
||||
|
||||
default:
|
||||
throw new RuntimeException();
|
||||
}
|
||||
}
|
||||
|
||||
public double getScale() {
|
||||
return this.scale;
|
||||
}
|
||||
|
||||
public boolean hasGsd() {
|
||||
return this.hasGsd;
|
||||
}
|
||||
|
||||
public boolean hasAltType() {
|
||||
return this.hasAltType;
|
||||
}
|
||||
|
||||
public boolean isGlobal() {
|
||||
return this.isGlobal;
|
||||
}
|
||||
|
||||
public double getGSD() {
|
||||
if (this.hasGsd) {
|
||||
return this.gsd;
|
||||
} else {
|
||||
String errMesg = "ERROR! fitsData does not have gsd!";
|
||||
throw new RuntimeException(errMesg);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static class FitsDataBuilder {
|
||||
private final double[][][] data;
|
||||
private double[] V = null;
|
||||
private double[] ux = null;
|
||||
private double[] uy = null;
|
||||
private double[] uz = null;
|
||||
private boolean hasV = false;
|
||||
private boolean hasUnitv = false;
|
||||
private boolean hasGsd = false;
|
||||
private boolean isGlobal = false;
|
||||
private boolean hasAltType = false;
|
||||
private double scale = Double.NaN;
|
||||
private double gsd = Double.NaN;
|
||||
private AltwgDataType altProd = null;
|
||||
private String dataSource = SrcProductType.UNKNOWN.toString();
|
||||
|
||||
/**
|
||||
* Constructor. isGlobal used to fill out fits keyword describing whether data is local or
|
||||
* global. May also be used for fits naming convention.
|
||||
*
|
||||
* @param data
|
||||
* @param isGlobal
|
||||
*/
|
||||
public FitsDataBuilder(double[][][] data, boolean isGlobal) {
|
||||
this.data = data;
|
||||
this.isGlobal = isGlobal;
|
||||
}
|
||||
|
||||
public FitsDataBuilder setAltProdType(AltwgDataType altProd) {
|
||||
this.altProd = altProd;
|
||||
this.hasAltType = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
public FitsDataBuilder setDataSource(String dataSource) {
|
||||
this.dataSource = dataSource;
|
||||
return this;
|
||||
}
|
||||
|
||||
public FitsDataBuilder setV(double[] V) {
|
||||
this.V = V;
|
||||
this.hasV = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
public FitsDataBuilder setU(double[] uvec, UnitDir udir) {
|
||||
switch (udir) {
|
||||
case UX:
|
||||
this.ux = uvec;
|
||||
this.hasUnitv = true;
|
||||
break;
|
||||
|
||||
case UY:
|
||||
this.uy = uvec;
|
||||
this.hasUnitv = true;
|
||||
break;
|
||||
|
||||
case UZ:
|
||||
this.uz = uvec;
|
||||
this.hasUnitv = true;
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new RuntimeException();
|
||||
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public FitsDataBuilder setScale(double scale) {
|
||||
this.scale = scale;
|
||||
return this;
|
||||
}
|
||||
|
||||
public FitsDataBuilder setGSD(double gsd) {
|
||||
this.gsd = gsd;
|
||||
this.hasGsd = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
public FitsData build() {
|
||||
return new FitsData(this);
|
||||
}
|
||||
}
|
||||
}
|
||||
1120
src/main/java/terrasaur/fits/FitsHdr.java
Normal file
684
src/main/java/terrasaur/fits/FitsHeader.java
Normal file
@@ -0,0 +1,684 @@
|
||||
package terrasaur.fits;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.EnumMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import nom.tam.fits.FitsException;
|
||||
import nom.tam.fits.HeaderCard;
|
||||
import nom.tam.fits.HeaderCardException;
|
||||
import terrasaur.utils.StringUtil;
|
||||
|
||||
/**
|
||||
* @deprecated this class has been replaced by the FitsHdr class and the FitsHeaderFactory
|
||||
* implementation of the FitsHdr class. USE THIS CLASS AT YOUR PERIL. THE KEYWORDS HAVE
|
||||
* NOT BEEN UPDATED SINCE THIS CLASS WAS DEPRECATED.
|
||||
* @author espirrc1
|
||||
*
|
||||
*/
|
||||
@Deprecated
|
||||
public class FitsHeader {
|
||||
|
||||
public final FitsValCom bitPix;
|
||||
public final FitsValCom nAxis1;
|
||||
public final FitsValCom nAxis2;
|
||||
public final FitsValCom nAxis3;
|
||||
|
||||
// fits keywords common to all ALTWG products
|
||||
public final FitsValCom hdrVers;
|
||||
public final FitsValCom mission;
|
||||
public final FitsValCom hostName;
|
||||
public final FitsValCom target;
|
||||
public final FitsValCom origin;
|
||||
public final FitsValCom spocid;
|
||||
public final FitsValCom sdparea;
|
||||
public final FitsValCom sdpdesc;
|
||||
public final FitsValCom missionPhase;
|
||||
public final FitsValCom dataSource;
|
||||
public final FitsValCom dataSourceV;
|
||||
public final FitsValCom dataSourceS;
|
||||
public final FitsValCom dataSourceFile;
|
||||
public final FitsValCom datasrcd;
|
||||
public final FitsValCom productName;
|
||||
public final FitsValCom dateprd;
|
||||
public final FitsValCom productType;
|
||||
public final FitsValCom productVer;
|
||||
public final FitsValCom author;
|
||||
|
||||
// for ancillary fits products
|
||||
// map Name(description): tilt, gravity, slope, etc
|
||||
// map Type: global or local
|
||||
public final FitsValCom objFile;
|
||||
public final FitsValCom mapName;
|
||||
public final FitsValCom mapType;
|
||||
|
||||
// center lon, lat. common to both local and global anci fits
|
||||
public final FitsValCom clon;
|
||||
public final FitsValCom clat;
|
||||
|
||||
// global anci fits geometry
|
||||
public final FitsValCom minlon;
|
||||
public final FitsValCom maxlon;
|
||||
public final FitsValCom minlat;
|
||||
public final FitsValCom maxlat;
|
||||
|
||||
// local anci fits geometry
|
||||
public final FitsValCom ulcLon;
|
||||
public final FitsValCom ulcLat;
|
||||
public final FitsValCom llcLon;
|
||||
public final FitsValCom llcLat;
|
||||
public final FitsValCom lrcLon;
|
||||
public final FitsValCom lrcLat;
|
||||
public final FitsValCom urcLon;
|
||||
public final FitsValCom urcLat;
|
||||
public final FitsValCom cntr_v_x;
|
||||
public final FitsValCom cntr_v_y;
|
||||
public final FitsValCom cntr_v_z;
|
||||
public final FitsValCom ux_x;
|
||||
public final FitsValCom ux_y;
|
||||
public final FitsValCom ux_z;
|
||||
public final FitsValCom uy_x;
|
||||
public final FitsValCom uy_y;
|
||||
public final FitsValCom uy_z;
|
||||
public final FitsValCom uz_x;
|
||||
public final FitsValCom uz_y;
|
||||
public final FitsValCom uz_z;
|
||||
public final FitsValCom gsd;
|
||||
public final FitsValCom gsdi;
|
||||
|
||||
// common to both local and global anci fits
|
||||
public final FitsValCom sigma;
|
||||
public final FitsValCom sigDef;
|
||||
public final FitsValCom dqual1;
|
||||
public final FitsValCom dqual2;
|
||||
public final FitsValCom dsigDef;
|
||||
public final FitsValCom density;
|
||||
public final FitsValCom rotRate;
|
||||
public final FitsValCom refPot;
|
||||
public final FitsValCom tiltRad;
|
||||
public final FitsValCom tiltMaj;
|
||||
public final FitsValCom tiltMin;
|
||||
public final FitsValCom tiltPa;
|
||||
public final FitsValCom mapVer;
|
||||
|
||||
public final EnumMap<HeaderTag, FitsValCom> tag2valcom;
|
||||
|
||||
private FitsHeader(FitsHeaderBuilder b) {
|
||||
this.bitPix = b.bitPix;
|
||||
this.nAxis1 = b.nAxis1;
|
||||
this.nAxis2 = b.nAxis2;
|
||||
this.nAxis3 = b.nAxis3;
|
||||
this.hdrVers = b.hdrVers;
|
||||
this.mission = b.mission;
|
||||
this.hostName = b.hostName;
|
||||
this.target = b.target;
|
||||
this.origin = b.origin;
|
||||
this.spocid = b.spocid;
|
||||
this.sdparea = b.sdparea;
|
||||
this.sdpdesc = b.sdpdesc;
|
||||
this.missionPhase = b.missionPhase;
|
||||
this.dataSource = b.dataSource;
|
||||
this.dataSourceFile = b.dataSourceFile;
|
||||
this.dataSourceV = b.dataSourceV;
|
||||
this.datasrcd = b.datasrcd;
|
||||
this.dataSourceS = b.dataSourceS;
|
||||
this.productName = b.productName;
|
||||
this.dateprd = b.dateprd;
|
||||
this.productType = b.productType;
|
||||
this.productVer = b.productVer;
|
||||
this.objFile = b.objFile;
|
||||
this.author = b.author;
|
||||
this.mapName = b.mapName;
|
||||
this.mapType = b.mapType;
|
||||
this.clon = b.clon;
|
||||
this.clat = b.clat;
|
||||
this.minlon = b.minlon;
|
||||
this.maxlon = b.maxlon;
|
||||
this.minlat = b.minlat;
|
||||
this.maxlat = b.maxlat;
|
||||
this.ulcLon = b.ulcLon;
|
||||
this.ulcLat = b.ulcLat;
|
||||
this.llcLon = b.llcLon;
|
||||
this.llcLat = b.llcLat;
|
||||
this.lrcLon = b.lrcLon;
|
||||
this.lrcLat = b.lrcLat;
|
||||
this.urcLon = b.urcLon;
|
||||
this.urcLat = b.urcLat;
|
||||
this.cntr_v_x = b.cntr_v_x;
|
||||
this.cntr_v_y = b.cntr_v_y;
|
||||
this.cntr_v_z = b.cntr_v_z;
|
||||
this.ux_x = b.ux_x;
|
||||
this.ux_y = b.ux_y;
|
||||
this.ux_z = b.ux_z;
|
||||
this.uy_x = b.uy_x;
|
||||
this.uy_y = b.uy_y;
|
||||
this.uy_z = b.uy_z;
|
||||
this.uz_x = b.uz_x;
|
||||
this.uz_y = b.uz_y;
|
||||
this.uz_z = b.uz_z;
|
||||
this.gsd = b.gsd;
|
||||
this.gsdi = b.gsdi;
|
||||
this.sigma = b.sigma;
|
||||
this.sigDef = b.sigDef;
|
||||
this.dqual1 = b.dqual1;
|
||||
this.dqual2 = b.dqual2;
|
||||
this.dsigDef = b.dsigDef;
|
||||
this.mapVer = b.mapVer;
|
||||
this.density = b.density;
|
||||
this.rotRate = b.rotRate;
|
||||
this.refPot = b.refPot;
|
||||
this.tiltRad = b.tiltRad;
|
||||
|
||||
// hardcode semi-major and semi-minor axis = radius
|
||||
// since we only deal with circles for now
|
||||
this.tiltMaj = b.tiltRad;
|
||||
this.tiltMin = b.tiltRad;
|
||||
this.tiltPa = b.tiltPa;
|
||||
this.tag2valcom = b.tag2valcom;
|
||||
}
|
||||
|
||||
public static class FitsHeaderBuilder {
|
||||
|
||||
// initialize the FITS keywords. Some of them may not change during the mission, but
|
||||
// the option to change them is given via the public methods.
|
||||
private FitsValCom bitPix = new FitsValCom("32", null);
|
||||
private FitsValCom nAxis1 = new FitsValCom("1024", null);
|
||||
private FitsValCom nAxis2 = new FitsValCom("1024", null);
|
||||
private FitsValCom nAxis3 = new FitsValCom("numberPlanesNotSet", null);
|
||||
|
||||
private FitsValCom hdrVers =
|
||||
new FitsValCom(HeaderTag.HDRVERS.value(), HeaderTag.HDRVERS.comment());
|
||||
private FitsValCom mission =
|
||||
new FitsValCom(HeaderTag.MISSION.value(), HeaderTag.MISSION.comment());
|
||||
private FitsValCom hostName = new FitsValCom(HeaderTag.HOSTNAME.value(), null);
|
||||
private FitsValCom target = new FitsValCom(HeaderTag.TARGET.value(), null);
|
||||
private FitsValCom origin =
|
||||
new FitsValCom(HeaderTag.ORIGIN.value(), HeaderTag.ORIGIN.comment());
|
||||
private FitsValCom spocid =
|
||||
new FitsValCom(HeaderTag.SPOC_ID.value(), HeaderTag.SPOC_ID.comment());
|
||||
private FitsValCom sdparea =
|
||||
new FitsValCom(HeaderTag.SDPAREA.value(), HeaderTag.SDPAREA.comment());
|
||||
private FitsValCom sdpdesc =
|
||||
new FitsValCom(HeaderTag.SDPDESC.value(), HeaderTag.SDPDESC.comment());
|
||||
private FitsValCom missionPhase =
|
||||
new FitsValCom(HeaderTag.MPHASE.value(), HeaderTag.MPHASE.comment());
|
||||
private FitsValCom dataSource =
|
||||
new FitsValCom(HeaderTag.DATASRC.value(), HeaderTag.DATASRC.comment());
|
||||
private FitsValCom dataSourceFile =
|
||||
new FitsValCom(HeaderTag.DATASRCF.value(), HeaderTag.DATASRCF.comment());
|
||||
private FitsValCom dataSourceS =
|
||||
new FitsValCom(HeaderTag.DATASRCS.value(), HeaderTag.DATASRCS.comment());
|
||||
private FitsValCom dataSourceV =
|
||||
new FitsValCom(HeaderTag.DATASRCV.value(), HeaderTag.DATASRCV.comment());
|
||||
private FitsValCom software =
|
||||
new FitsValCom(HeaderTag.SOFTWARE.value(), HeaderTag.SOFTWARE.comment());
|
||||
private FitsValCom softver =
|
||||
new FitsValCom(HeaderTag.SOFT_VER.value(), HeaderTag.SOFT_VER.comment());
|
||||
private FitsValCom datasrcd =
|
||||
new FitsValCom(HeaderTag.DATASRCD.value(), HeaderTag.DATASRCD.comment());
|
||||
private FitsValCom productName =
|
||||
new FitsValCom(HeaderTag.PRODNAME.value(), HeaderTag.PRODNAME.comment());
|
||||
private FitsValCom dateprd =
|
||||
new FitsValCom(HeaderTag.DATEPRD.value(), HeaderTag.DATEPRD.comment());
|
||||
private FitsValCom productType = new FitsValCom("productTypeNotSet", null);
|
||||
private FitsValCom productVer =
|
||||
new FitsValCom(HeaderTag.PRODVERS.value(), HeaderTag.PRODVERS.comment());
|
||||
private FitsValCom mapVer =
|
||||
new FitsValCom(HeaderTag.MAP_VER.value(), HeaderTag.MAP_VER.comment());
|
||||
|
||||
private FitsValCom objFile =
|
||||
new FitsValCom(HeaderTag.OBJ_FILE.value(), HeaderTag.OBJ_FILE.comment());
|
||||
private FitsValCom author =
|
||||
new FitsValCom(HeaderTag.CREATOR.value(), HeaderTag.CREATOR.comment());
|
||||
|
||||
private FitsValCom mapName =
|
||||
new FitsValCom(HeaderTag.MAP_NAME.value(), HeaderTag.MAP_NAME.comment());
|
||||
private FitsValCom mapType =
|
||||
new FitsValCom(HeaderTag.MAP_TYPE.value(), HeaderTag.MAP_TYPE.comment());
|
||||
private FitsValCom clon = new FitsValCom("-999", HeaderTag.CLON.comment());
|
||||
private FitsValCom clat = new FitsValCom("-999", HeaderTag.CLAT.comment());
|
||||
private FitsValCom minlon = new FitsValCom("-999", HeaderTag.MINLON.comment());
|
||||
private FitsValCom maxlon = new FitsValCom("-999", HeaderTag.MAXLON.comment());
|
||||
private FitsValCom minlat = new FitsValCom("-999", HeaderTag.MINLAT.comment());
|
||||
private FitsValCom maxlat = new FitsValCom("-999", HeaderTag.MAXLAT.comment());
|
||||
private FitsValCom pxperdeg = new FitsValCom("-999", HeaderTag.PXPERDEG.comment());
|
||||
private FitsValCom ulcLon = new FitsValCom("-999", HeaderTag.ULCLNG.comment());
|
||||
private FitsValCom ulcLat = new FitsValCom("-999", HeaderTag.ULCLAT.comment());
|
||||
private FitsValCom llcLon = new FitsValCom("-999", HeaderTag.LLCLNG.comment());
|
||||
private FitsValCom llcLat = new FitsValCom("-999", HeaderTag.LLCLAT.comment());
|
||||
private FitsValCom lrcLon = new FitsValCom("-999", HeaderTag.LRCLNG.comment());
|
||||
private FitsValCom lrcLat = new FitsValCom("-999", HeaderTag.LRCLAT.comment());
|
||||
private FitsValCom urcLon = new FitsValCom("-999", HeaderTag.URCLNG.comment());
|
||||
private FitsValCom urcLat = new FitsValCom("-999", HeaderTag.URCLAT.comment());
|
||||
private FitsValCom cntr_v_x = new FitsValCom("-999", HeaderTag.CNTR_V_X.comment());
|
||||
private FitsValCom cntr_v_y = new FitsValCom("-999", HeaderTag.CNTR_V_Y.comment());
|
||||
private FitsValCom cntr_v_z = new FitsValCom("-999", HeaderTag.CNTR_V_Z.comment());
|
||||
private FitsValCom ux_x = new FitsValCom("-999", HeaderTag.UX_X.comment());
|
||||
private FitsValCom ux_y = new FitsValCom("-999", HeaderTag.UX_Y.comment());
|
||||
private FitsValCom ux_z = new FitsValCom("-999", HeaderTag.UX_Z.comment());
|
||||
private FitsValCom uy_x = new FitsValCom("-999", HeaderTag.UY_X.comment());
|
||||
private FitsValCom uy_y = new FitsValCom("-999", HeaderTag.UY_Y.comment());
|
||||
private FitsValCom uy_z = new FitsValCom("-999", HeaderTag.UY_Z.comment());
|
||||
private FitsValCom uz_x = new FitsValCom("-999", HeaderTag.UZ_X.comment());
|
||||
private FitsValCom uz_y = new FitsValCom("-999", HeaderTag.UZ_Y.comment());
|
||||
private FitsValCom uz_z = new FitsValCom("-999", HeaderTag.UZ_Z.comment());
|
||||
private FitsValCom gsd = new FitsValCom("-999", HeaderTag.GSD.comment());
|
||||
private FitsValCom gsdi = new FitsValCom("-999", HeaderTag.GSDI.comment());
|
||||
private FitsValCom sigma = new FitsValCom("-999", "N/A");
|
||||
private FitsValCom sigDef = new FitsValCom(HeaderTag.SIGMA.value(), HeaderTag.SIGMA.comment());
|
||||
private FitsValCom dqual1 = new FitsValCom("-999", HeaderTag.DQUAL_1.comment());
|
||||
private FitsValCom dqual2 = new FitsValCom("-999", HeaderTag.DQUAL_2.comment());
|
||||
private FitsValCom dsigDef =
|
||||
new FitsValCom(HeaderTag.DSIG_DEF.value(), HeaderTag.DSIG_DEF.comment());
|
||||
private FitsValCom density = new FitsValCom("-999", HeaderTag.DENSITY.comment());
|
||||
private FitsValCom rotRate = new FitsValCom("-999", HeaderTag.ROT_RATE.comment());
|
||||
private FitsValCom refPot = new FitsValCom("-999", HeaderTag.REF_POT.comment());
|
||||
private FitsValCom tiltRad = new FitsValCom("-999", HeaderTag.TILT_RAD.comment());
|
||||
private FitsValCom tiltMaj = new FitsValCom("-999", HeaderTag.TILT_MAJ.comment());
|
||||
private FitsValCom tiltMin = new FitsValCom("-999", HeaderTag.TILT_MIN.comment());
|
||||
private FitsValCom tiltPa = new FitsValCom("0", HeaderTag.TILT_PA.comment());
|
||||
|
||||
private EnumMap<HeaderTag, FitsValCom> tag2valcom =
|
||||
new EnumMap<HeaderTag, FitsValCom>(HeaderTag.class);
|
||||
|
||||
public FitsHeaderBuilder() {
|
||||
|
||||
/*
|
||||
* initialize the map between header tags and the fits val com variables. This allows us to
|
||||
* use enumeration to select which of the fitsvalcom variables we want to update, eliminating
|
||||
* the need for specific 'set' statements for each variable.
|
||||
*/
|
||||
tag2valcom.put(HeaderTag.HDRVERS, hdrVers);
|
||||
tag2valcom.put(HeaderTag.MISSION, mission);
|
||||
tag2valcom.put(HeaderTag.HOSTNAME, hostName);
|
||||
tag2valcom.put(HeaderTag.TARGET, target);
|
||||
tag2valcom.put(HeaderTag.ORIGIN, origin);
|
||||
tag2valcom.put(HeaderTag.SPOC_ID, spocid);
|
||||
tag2valcom.put(HeaderTag.SDPAREA, sdparea);
|
||||
tag2valcom.put(HeaderTag.SDPDESC, sdpdesc);
|
||||
tag2valcom.put(HeaderTag.MPHASE, missionPhase);
|
||||
tag2valcom.put(HeaderTag.DATASRC, dataSource);
|
||||
tag2valcom.put(HeaderTag.DATASRCV, dataSourceV);
|
||||
tag2valcom.put(HeaderTag.DATASRCF, dataSourceFile);
|
||||
tag2valcom.put(HeaderTag.DATASRCS, dataSourceS);
|
||||
// removed from ALTWG keywords per Map Format SIS draft v2
|
||||
tag2valcom.put(HeaderTag.DATASRCD, datasrcd);
|
||||
tag2valcom.put(HeaderTag.SOFTWARE, software);
|
||||
tag2valcom.put(HeaderTag.SOFT_VER, softver);
|
||||
tag2valcom.put(HeaderTag.PRODNAME, productName);
|
||||
tag2valcom.put(HeaderTag.DATEPRD, dateprd);
|
||||
tag2valcom.put(HeaderTag.PRODVERS, productVer);
|
||||
tag2valcom.put(HeaderTag.MAP_VER, mapVer);
|
||||
tag2valcom.put(HeaderTag.CREATOR, author);
|
||||
tag2valcom.put(HeaderTag.OBJ_FILE, objFile);
|
||||
tag2valcom.put(HeaderTag.CLON, clon);
|
||||
tag2valcom.put(HeaderTag.CLAT, clat);
|
||||
tag2valcom.put(HeaderTag.MINLON, minlon);
|
||||
tag2valcom.put(HeaderTag.MAXLON, maxlon);
|
||||
tag2valcom.put(HeaderTag.MINLAT, minlat);
|
||||
tag2valcom.put(HeaderTag.MAXLAT, maxlat);
|
||||
tag2valcom.put(HeaderTag.PXPERDEG, pxperdeg);
|
||||
tag2valcom.put(HeaderTag.LLCLNG, llcLon);
|
||||
tag2valcom.put(HeaderTag.LLCLAT, llcLat);
|
||||
tag2valcom.put(HeaderTag.LRCLNG, lrcLon);
|
||||
tag2valcom.put(HeaderTag.LRCLAT, lrcLat);
|
||||
tag2valcom.put(HeaderTag.URCLNG, urcLon);
|
||||
tag2valcom.put(HeaderTag.URCLAT, urcLat);
|
||||
tag2valcom.put(HeaderTag.ULCLNG, ulcLon);
|
||||
tag2valcom.put(HeaderTag.ULCLAT, ulcLat);
|
||||
tag2valcom.put(HeaderTag.CNTR_V_X, cntr_v_x);
|
||||
tag2valcom.put(HeaderTag.CNTR_V_Y, cntr_v_y);
|
||||
tag2valcom.put(HeaderTag.CNTR_V_Z, cntr_v_z);
|
||||
tag2valcom.put(HeaderTag.UX_X, ux_x);
|
||||
tag2valcom.put(HeaderTag.UX_Y, ux_y);
|
||||
tag2valcom.put(HeaderTag.UX_Z, ux_z);
|
||||
tag2valcom.put(HeaderTag.UY_X, uy_x);
|
||||
tag2valcom.put(HeaderTag.UY_Y, uy_y);
|
||||
tag2valcom.put(HeaderTag.UY_Z, uy_z);
|
||||
tag2valcom.put(HeaderTag.UZ_X, ux_x);
|
||||
tag2valcom.put(HeaderTag.UZ_Y, ux_y);
|
||||
tag2valcom.put(HeaderTag.UZ_Z, ux_z);
|
||||
tag2valcom.put(HeaderTag.GSD, gsd);
|
||||
tag2valcom.put(HeaderTag.GSDI, gsdi);
|
||||
tag2valcom.put(HeaderTag.SIGMA, sigma);
|
||||
tag2valcom.put(HeaderTag.SIG_DEF, sigDef);
|
||||
tag2valcom.put(HeaderTag.DQUAL_1, dqual1);
|
||||
tag2valcom.put(HeaderTag.DQUAL_2, dqual2);
|
||||
tag2valcom.put(HeaderTag.DSIG_DEF, dsigDef);
|
||||
tag2valcom.put(HeaderTag.DENSITY, density);
|
||||
tag2valcom.put(HeaderTag.ROT_RATE, rotRate);
|
||||
tag2valcom.put(HeaderTag.REF_POT, refPot);
|
||||
tag2valcom.put(HeaderTag.TILT_RAD, tiltRad);
|
||||
tag2valcom.put(HeaderTag.TILT_MAJ, tiltMaj);
|
||||
tag2valcom.put(HeaderTag.TILT_MIN, tiltMin);
|
||||
tag2valcom.put(HeaderTag.TILT_PA, tiltPa);
|
||||
tag2valcom.put(HeaderTag.MAP_NAME, mapName);
|
||||
tag2valcom.put(HeaderTag.MAP_TYPE, mapType);
|
||||
tag2valcom.put(HeaderTag.MAP_VER, mapVer);
|
||||
|
||||
}
|
||||
|
||||
public FitsHeaderBuilder setTarget(String val, String comment) {
|
||||
this.target.setV(val);
|
||||
this.target.setC(comment);
|
||||
return this;
|
||||
}
|
||||
|
||||
public FitsHeaderBuilder setBitPix(String val, String comment) {
|
||||
this.bitPix.setV(val);
|
||||
this.bitPix.setC(comment);
|
||||
return this;
|
||||
}
|
||||
|
||||
public FitsHeaderBuilder setNAx1(String val, String comment) {
|
||||
this.nAxis1.setV(val);
|
||||
this.nAxis1.setC(comment);
|
||||
return this;
|
||||
}
|
||||
|
||||
public FitsHeaderBuilder setNAx2(String val, String comment) {
|
||||
this.nAxis2.setV(val);
|
||||
this.nAxis2.setC(comment);
|
||||
return this;
|
||||
}
|
||||
|
||||
public FitsHeaderBuilder setNAx3(String val, String comment) {
|
||||
this.nAxis3.setV(val);
|
||||
this.nAxis3.setC(comment);
|
||||
return this;
|
||||
}
|
||||
|
||||
public FitsHeaderBuilder setVCbyHeaderTag(HeaderTag hdrTag, String value, String comment) {
|
||||
|
||||
if (tag2valcom.containsKey(hdrTag)) {
|
||||
tag2valcom.get(hdrTag).setVC(value, comment);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public FitsHeaderBuilder setVbyHeaderTag(HeaderTag hdrTag, String value) {
|
||||
if (tag2valcom.containsKey(hdrTag)) {
|
||||
tag2valcom.get(hdrTag).setV(value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public FitsHeaderBuilder setCbyHeaderTag(HeaderTag hdrTag, String comment) {
|
||||
if (tag2valcom.containsKey(hdrTag)) {
|
||||
tag2valcom.get(hdrTag).setC(comment);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set values in the ancillary header builder class by parsing the headerCard. Parse appropriate
|
||||
* values as determined by the value of the headercard key.
|
||||
*
|
||||
* @param headerCard
|
||||
* @return
|
||||
*/
|
||||
public FitsHeaderBuilder setbyHeaderCard(HeaderCard headerCard) {
|
||||
HeaderTag hdrTag = HeaderTag.END;
|
||||
try {
|
||||
hdrTag = HeaderTag.valueOf(headerCard.getKey());
|
||||
setVCbyHeaderTag(hdrTag, headerCard.getValue(), headerCard.getComment());
|
||||
} catch (IllegalArgumentException e) {
|
||||
if ((headerCard.getKey().contains("COMMENT")) || (headerCard.getKey().contains("PLANE"))) {
|
||||
} else {
|
||||
System.out.println(headerCard.getKey() + " not a HeaderTag");
|
||||
}
|
||||
} catch (NullPointerException ne) {
|
||||
System.out.println("null pointer exception for:" + headerCard.getKey());
|
||||
}
|
||||
|
||||
return this;
|
||||
|
||||
}
|
||||
|
||||
public FitsHeader build() {
|
||||
|
||||
return new FitsHeader(this);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Loops through map of fits header cards and tries to parse values relevant to the ALTWG Fits
|
||||
* file.
|
||||
*
|
||||
* @param map
|
||||
*/
|
||||
public static FitsHeaderBuilder copyFitsHeader(Map<String, HeaderCard> map) {
|
||||
|
||||
FitsHeaderBuilder hdrBuilder = new FitsHeaderBuilder();
|
||||
|
||||
// loop through each of the HeaderCards in the map and see if any will help build the altwg
|
||||
// header
|
||||
for (Map.Entry<String, HeaderCard> entry : map.entrySet()) {
|
||||
HeaderCard thisCard = entry.getValue();
|
||||
hdrBuilder.setbyHeaderCard(thisCard);
|
||||
}
|
||||
return hdrBuilder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy the fits header from fits file and use it to populate and return FitsHeaderBuilder.
|
||||
*
|
||||
* @param fitsFile
|
||||
* @return
|
||||
*/
|
||||
public static FitsHeaderBuilder copyFitsHeader(File fitsFile) {
|
||||
|
||||
FitsHeaderBuilder hdrBuilder = new FitsHeaderBuilder();
|
||||
try {
|
||||
Map<String, HeaderCard> map = FitsUtil.getFitsHeaderAsMap(fitsFile.getCanonicalPath());
|
||||
hdrBuilder = copyFitsHeader(map);
|
||||
return hdrBuilder;
|
||||
} catch (FitsException | IOException e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
String errmesg = "ERROR in FitsHeader.copyFitsHeader()! " + " Unable to parse fits file:"
|
||||
+ fitsFile.toString() + " for fits header!";
|
||||
System.err.println(errmesg);
|
||||
System.exit(1);
|
||||
|
||||
}
|
||||
return hdrBuilder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a fits configuration file and update a FitsHeaderBuilder. The builder is used to generate
|
||||
* a fits header.
|
||||
*
|
||||
* @param configFile
|
||||
* @param hdrBuilder - can either be an existing builder or null. If null then will create and
|
||||
* return a new builder.
|
||||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
public static FitsHeaderBuilder configHdrBuilder(String configFile, FitsHeaderBuilder hdrBuilder)
|
||||
throws IOException {
|
||||
|
||||
File checkF = new File(configFile);
|
||||
if (!checkF.exists()) {
|
||||
// System.out.println("ERROR:FITS header configuration file:" + configFile + " does not
|
||||
// exist!");
|
||||
String errMesg = "ERROR:FITS header configuration file:" + configFile + " does not exist!";
|
||||
throw new RuntimeException(errMesg);
|
||||
}
|
||||
|
||||
if (hdrBuilder == null) {
|
||||
System.out.println("builder passed to FitsHeader.configHdrBuilder() is null. Generating"
|
||||
+ " new FitsHeaderBuilder");
|
||||
hdrBuilder = new FitsHeaderBuilder();
|
||||
}
|
||||
List<String> content = FileUtils.readLines(new File(configFile), Charset.defaultCharset());
|
||||
boolean separatorFound = false;
|
||||
for (String line : content) {
|
||||
|
||||
String[] keyval = line.split("#");
|
||||
if (keyval.length > 1) {
|
||||
|
||||
separatorFound = true;
|
||||
// check if there is a match w/ HeaderTags
|
||||
HeaderTag thisTag = HeaderTag.tagFromString(keyval[0]);
|
||||
|
||||
if (thisTag != HeaderTag.NOMATCH) {
|
||||
// pass to fits header builder and see if it matches on a fits keyword
|
||||
|
||||
if (keyval.length == 2) {
|
||||
// assume user only wants to overwrite the value portion. Leave the comments alone.
|
||||
System.out.println("setting " + thisTag.toString() + " to " + keyval[1]);
|
||||
hdrBuilder.setVbyHeaderTag(thisTag, keyval[1]);
|
||||
} else if (keyval.length == 3) {
|
||||
if (keyval[2].contains("null")) {
|
||||
// user explicitly wants to override any comment in this header with null
|
||||
hdrBuilder.setVCbyHeaderTag(thisTag, keyval[1], null);
|
||||
} else {
|
||||
System.out.println("setting " + thisTag.toString() + " to " + keyval[1]
|
||||
+ ", comment to " + keyval[2]);
|
||||
hdrBuilder.setVCbyHeaderTag(thisTag, keyval[1], keyval[2]);
|
||||
}
|
||||
} else {
|
||||
System.out.println(
|
||||
"Warning: the following line in the config file line has more than 2 colons:");
|
||||
System.out.println(line);
|
||||
System.out.println("Cannot parse. skipping this line");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!separatorFound) {
|
||||
System.out.println("WARNING! The fits config file:" + configFile
|
||||
+ " does not appear to be a valid config file! There is no # separator!");
|
||||
}
|
||||
return hdrBuilder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a fits keyword return the object containing the keyword value and comment.
|
||||
*
|
||||
* @param tag - fits keyword as enumeration.
|
||||
* @return HeaderCard
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public HeaderCard getHeaderCard(HeaderTag tag) throws HeaderCardException {
|
||||
|
||||
// format for double values
|
||||
String fmtS = "%18.13f";
|
||||
if (tag2valcom.containsKey(tag)) {
|
||||
FitsValCom valcom = tag2valcom.get(tag);
|
||||
|
||||
/*
|
||||
* FitsValCom stores the values as String because extracting the value from the source fits
|
||||
* file returns it as a string. Need to convert geometry values to double and store it in the
|
||||
* HeaderCard as a double. Then when written to the fits file it will not have quotes around
|
||||
* the value.
|
||||
*/
|
||||
|
||||
/*
|
||||
* type of value to store in headercard. =0 string (single quotes around value) =1 formatted
|
||||
* double =2 free-form double
|
||||
*/
|
||||
int returnType = 0;
|
||||
|
||||
switch (tag) {
|
||||
|
||||
case PXPERDEG:
|
||||
case CLON:
|
||||
case CLAT:
|
||||
case MINLON:
|
||||
case MAXLON:
|
||||
case MINLAT:
|
||||
case MAXLAT:
|
||||
case URCLNG:
|
||||
case URCLAT:
|
||||
case LRCLNG:
|
||||
case LRCLAT:
|
||||
case LLCLNG:
|
||||
case LLCLAT:
|
||||
case ULCLNG:
|
||||
case ULCLAT:
|
||||
case GSDI:
|
||||
returnType = 1;
|
||||
break;
|
||||
|
||||
case CNTR_V_X:
|
||||
case CNTR_V_Y:
|
||||
case CNTR_V_Z:
|
||||
case UX_X:
|
||||
case UX_Y:
|
||||
case UX_Z:
|
||||
case UY_X:
|
||||
case UY_Y:
|
||||
case UY_Z:
|
||||
case UZ_X:
|
||||
case UZ_Y:
|
||||
case UZ_Z:
|
||||
case DENSITY:
|
||||
case ROT_RATE:
|
||||
case REF_POT:
|
||||
case TILT_RAD:
|
||||
case TILT_MAJ:
|
||||
case TILT_MIN:
|
||||
case TILT_PA:
|
||||
case GSD:
|
||||
case SIGMA:
|
||||
case DQUAL_1:
|
||||
case DQUAL_2:
|
||||
returnType = 2;
|
||||
break;
|
||||
|
||||
default:
|
||||
returnType = 0;
|
||||
break;
|
||||
}
|
||||
|
||||
switch (returnType) {
|
||||
case 0:
|
||||
return new HeaderCard(tag.toString(), valcom.getV(), valcom.getC());
|
||||
|
||||
case 1:
|
||||
return new HeaderCard(tag.toString(), StringUtil.str2fmtD(fmtS, valcom.getV()),
|
||||
valcom.getC());
|
||||
|
||||
case 2:
|
||||
return new HeaderCard(tag.toString(), StringUtil.parseSafeD(valcom.getV()),
|
||||
valcom.getC());
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
String errMesg = "ERROR!, cannot find fits keyword:" + tag.toString();
|
||||
throw new RuntimeException(errMesg);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize fits header builder for using keyword/values from a fits config file. Can be used to
|
||||
* initialize headerBuilder for any fits file.
|
||||
*
|
||||
* @param configFile
|
||||
* @return
|
||||
*/
|
||||
public static FitsHeaderBuilder initHdrBuilder(String configFile) {
|
||||
FitsHeaderBuilder hdrBuilder = new FitsHeaderBuilder();
|
||||
|
||||
if (configFile != null) {
|
||||
// try to load config file if it exists and modify fits header builder with it.
|
||||
try {
|
||||
configHdrBuilder(configFile, hdrBuilder);
|
||||
} catch (IOException e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
System.out.println("Error trying to read config file:" + configFile);
|
||||
}
|
||||
}
|
||||
return hdrBuilder;
|
||||
}
|
||||
|
||||
}
|
||||
213
src/main/java/terrasaur/fits/FitsHeaderFactory.java
Normal file
@@ -0,0 +1,213 @@
|
||||
package terrasaur.fits;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import nom.tam.fits.HeaderCard;
|
||||
import nom.tam.fits.HeaderCardException;
|
||||
import terrasaur.enums.FitsHeaderType;
|
||||
import terrasaur.utils.DTMHeader;
|
||||
|
||||
/**
|
||||
* Factory class that returns List<HeaderCard> where the HeaderCard objects are in the correct order
|
||||
* for writing to a fits header. Also contains methods for creating List<HeaderCard> for different
|
||||
* sections of a fits header
|
||||
*
|
||||
* @author espirrc1
|
||||
*
|
||||
*/
|
||||
public class FitsHeaderFactory {
|
||||
|
||||
private static final String PLANE = "PLANE";
|
||||
private static final String COMMENT = "COMMENT";
|
||||
|
||||
|
||||
public static DTMHeader getDTMHeader(FitsHdr fitsHdr, FitsHeaderType headerType) {
|
||||
|
||||
switch (headerType) {
|
||||
|
||||
case NFTMLN:
|
||||
return new NFTmln(fitsHdr);
|
||||
|
||||
case DTMLOCALALTWG:
|
||||
return new AltwgLocalDTM(fitsHdr);
|
||||
|
||||
case DTMGLOBALALTWG:
|
||||
return new AltwgGlobalDTM(fitsHdr);
|
||||
|
||||
case DTMGLOBALGENERIC:
|
||||
return new GenericGlobalDTM(fitsHdr);
|
||||
|
||||
case DTMLOCALGENERIC:
|
||||
return new GenericLocalDTM(fitsHdr);
|
||||
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static AnciFitsHeader getAnciHeader(FitsHdr fitsHdr, FitsHeaderType headerType) {
|
||||
|
||||
switch (headerType) {
|
||||
case ANCIGLOBALGENERIC:
|
||||
return new GenericAnciGlobal(fitsHdr);
|
||||
|
||||
case ANCILOCALGENERIC:
|
||||
return new GenericAnciLocal(fitsHdr);
|
||||
|
||||
case ANCIGLOBALALTWG:
|
||||
return new AltwgAnciGlobal(fitsHdr);
|
||||
|
||||
case ANCIG_FACETRELATION_ALTWG:
|
||||
return new AltwgAnciGlobalFacetRelation(fitsHdr);
|
||||
|
||||
case ANCILOCALALTWG:
|
||||
return new AltwgAnciLocal(fitsHdr);
|
||||
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Fits Header block that contains information about the fits header itself. Ex. Header version
|
||||
* number.
|
||||
*
|
||||
* @param fitsHdr
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public static List<HeaderCard> getHeaderInfo(FitsHeader fitsHdr) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
headers.add(new HeaderCard(COMMENT, "header information", false));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.HDRVERS));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Fits header block that contains information about the mission Ex. MISSION name, HOST name,
|
||||
* Target name.
|
||||
*
|
||||
* @param fitsHdr
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public static List<HeaderCard> getMissionInfo(FitsHeader fitsHdr) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
headers.add(new HeaderCard(COMMENT, "mission information", false));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MISSION));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.HOSTNAME));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.TARGET));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.ORIGIN));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Fits header block that contains ID information, i.e. information that would uniquely identify
|
||||
* the data product.
|
||||
*
|
||||
* @param fitsHdr
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public static List<HeaderCard> getIdInfo(FitsHeader fitsHdr) throws HeaderCardException {
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
headers.add(new HeaderCard(COMMENT, "identification info", false));
|
||||
|
||||
// check latest Map Format SIS revision to see if SPOC handles these keywords
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SPOC_ID));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SDPAREA));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SDPDESC));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MPHASE));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Fits header block that contains information about the source shape model used to create the
|
||||
* fits file.
|
||||
*
|
||||
* @param fitsHdr
|
||||
* @return
|
||||
* @throws HeaderCardException
|
||||
*/
|
||||
public static List<HeaderCard> getShapeSourceInfo(FitsHeader fitsHdr) throws HeaderCardException {
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
headers.add(new HeaderCard(COMMENT, "shape data source", false));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DATASRC));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DATASRCF));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DATASRCV));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DATASRCS));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DATASRCD));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.OBJ_FILE));
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
public static List<HeaderCard> getProcInfo(FitsHeader fitsHdr) throws HeaderCardException {
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
headers.add(new HeaderCard(COMMENT, "processing information", false));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.PRODNAME));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.DATEPRD));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SOFTWARE));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.SOFT_VER));
|
||||
|
||||
return headers;
|
||||
|
||||
}
|
||||
|
||||
public static List<HeaderCard> getMapSpecificInfo(FitsHeader fitsHdr) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
headers.add(new HeaderCard(COMMENT, "map specific information", false));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MAP_NAME));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MAP_VER));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.MAP_TYPE));
|
||||
|
||||
return headers;
|
||||
|
||||
// check latest Map Format SIS revision to see if SPOC handles these keywords
|
||||
// MAP_PROJ*
|
||||
// GSD*
|
||||
// GSDI*
|
||||
}
|
||||
|
||||
public static List<HeaderCard> getSummarySpatialInfo(FitsHeader fitsHdr,
|
||||
FitsHeaderType fitsHeaderType) throws HeaderCardException {
|
||||
|
||||
List<HeaderCard> headers = new ArrayList<HeaderCard>();
|
||||
|
||||
// this section common to all fitsHeaderTypes
|
||||
headers.add(new HeaderCard(COMMENT, "summary spatial information", false));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.CLON));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.CLAT));
|
||||
|
||||
switch (fitsHeaderType) {
|
||||
case ANCILOCALGENERIC:
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.LLCLNG));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.LLCLAT));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.URCLNG));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.URCLAT));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.LRCLNG));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.LRCLAT));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.ULCLNG));
|
||||
headers.add(fitsHdr.getHeaderCard(HeaderTag.ULCLAT));
|
||||
break;
|
||||
|
||||
default:
|
||||
// default does nothing because switch only handles specific cases.
|
||||
break;
|
||||
}
|
||||
return headers;
|
||||
|
||||
}
|
||||
}
|
||||