Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • orekit/rugged
  • sdinot/rugged
  • yzokras/rugged
  • youngcle/rugged-mod
4 results
Show changes
Showing
with 3605 additions and 818 deletions
/* Copyright 2013-2022 CS GROUP
* Licensed to CS GROUP (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.rugged.adjustment;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.hipparchus.analysis.differentiation.Gradient;
import org.hipparchus.linear.Array2DRowRealMatrix;
import org.hipparchus.linear.ArrayRealVector;
import org.hipparchus.linear.RealMatrix;
import org.hipparchus.linear.RealVector;
import org.hipparchus.optim.ConvergenceChecker;
import org.hipparchus.optim.nonlinear.vector.leastsquares.LeastSquaresBuilder;
import org.hipparchus.optim.nonlinear.vector.leastsquares.LeastSquaresProblem;
import org.hipparchus.optim.nonlinear.vector.leastsquares.MultivariateJacobianFunction;
import org.hipparchus.optim.nonlinear.vector.leastsquares.ParameterValidator;
import org.hipparchus.util.Pair;
import org.orekit.rugged.adjustment.measurements.Observables;
import org.orekit.rugged.adjustment.measurements.SensorToSensorMapping;
import org.orekit.rugged.api.Rugged;
import org.orekit.rugged.errors.RuggedException;
import org.orekit.rugged.errors.RuggedMessages;
import org.orekit.rugged.linesensor.LineSensor;
import org.orekit.rugged.linesensor.SensorPixel;
import org.orekit.rugged.utils.SpacecraftToObservedBody;
import org.orekit.time.AbsoluteDate;
import org.orekit.utils.ParameterDriver;
/** Constructs the optimization problem for a list of tie points.
* @author Guylaine Prat
* @author Lucie Labat Allee
* @author Jonathan Guinet
* @author Luc Maisonobe
* @since 2.0
*/
public class InterSensorsOptimizationProblemBuilder extends OptimizationProblemBuilder {
/** Key for target. */
private static final String TARGET = "Target";
/** Key for weight. */
private static final String WEIGHT = "Weight";
/** List of rugged instance to refine.*/
private Map<String, Rugged> ruggedMap;
/** Sensor to ground mapping to generate target tab for optimization.*/
private List<SensorToSensorMapping> sensorToSensorMappings;
/** Targets and weights of optimization problem. */
private HashMap<String, double[] > targetAndWeight;
/** Constructor.
* @param sensors list of sensors to refine
* @param measurements set of observables
* @param ruggedList names of rugged to refine
*/
public InterSensorsOptimizationProblemBuilder(final List<LineSensor> sensors,
final Observables measurements, final Collection<Rugged> ruggedList) {
super(sensors, measurements);
this.ruggedMap = new LinkedHashMap<String, Rugged>();
for (final Rugged rugged : ruggedList) {
this.ruggedMap.put(rugged.getName(), rugged);
}
this.initMapping();
}
/** {@inheritDoc} */
@Override
protected void initMapping() {
this.sensorToSensorMappings = new ArrayList<>();
for (final String ruggedNameA : this.ruggedMap.keySet()) {
for (final String ruggedNameB : this.ruggedMap.keySet()) {
for (final LineSensor sensorA : this.getSensors()) {
for (final LineSensor sensorB : this.getSensors()) {
final String sensorNameA = sensorA.getName();
final String sensorNameB = sensorB.getName();
final SensorToSensorMapping mapping = this.getMeasurements().getInterMapping(ruggedNameA, sensorNameA, ruggedNameB, sensorNameB);
if (mapping != null) {
this.sensorToSensorMappings.add(mapping);
}
}
}
}
}
}
/** {@inheritDoc} */
@Override
protected void createTargetAndWeight() {
int n = 0;
for (final SensorToSensorMapping reference : this.sensorToSensorMappings) {
n += reference.getMapping().size();
}
if (n == 0) {
throw new RuggedException(RuggedMessages.NO_REFERENCE_MAPPINGS);
}
n = 2 * n;
final double[] target = new double[n];
final double[] weight = new double[n];
int k = 0;
for (final SensorToSensorMapping reference : this.sensorToSensorMappings) {
// Get central body constraint weight
final double bodyConstraintWeight = reference.getBodyConstraintWeight();
int i = 0;
for (Iterator<Map.Entry<SensorPixel, SensorPixel>> gtIt = reference.getMapping().iterator(); gtIt.hasNext(); i++) {
if (i == reference.getMapping().size()) break;
// Get LOS distance
final Double losDistance = reference.getLosDistance(i);
weight[k] = 1.0 - bodyConstraintWeight;
target[k++] = losDistance.doubleValue();
// Get central body distance (constraint)
final Double bodyDistance = reference.getBodyDistance(i);
weight[k] = bodyConstraintWeight;
target[k++] = bodyDistance.doubleValue();
}
}
this.targetAndWeight = new HashMap<String, double[]>();
this.targetAndWeight.put(TARGET, target);
this.targetAndWeight.put(WEIGHT, weight);
}
/** {@inheritDoc} */
@Override
protected MultivariateJacobianFunction createFunction() {
// model function
final MultivariateJacobianFunction model = point -> {
// set the current parameters values
int i = 0;
for (final ParameterDriver driver : this.getDrivers()) {
driver.setNormalizedValue(point.getEntry(i++));
}
final double[] target = this.targetAndWeight.get(TARGET);
// compute distance and its partial derivatives
final RealVector value = new ArrayRealVector(target.length);
final RealMatrix jacobian = new Array2DRowRealMatrix(target.length, this.getNbParams());
int l = 0;
for (final SensorToSensorMapping reference : this.sensorToSensorMappings) {
final String ruggedNameA = reference.getRuggedNameA();
final String ruggedNameB = reference.getRuggedNameB();
final Rugged ruggedA = this.ruggedMap.get(ruggedNameA);
if (ruggedA == null) {
throw new RuggedException(RuggedMessages.INVALID_RUGGED_NAME);
}
final Rugged ruggedB = this.ruggedMap.get(ruggedNameB);
if (ruggedB == null) {
throw new RuggedException(RuggedMessages.INVALID_RUGGED_NAME);
}
for (final Map.Entry<SensorPixel, SensorPixel> mapping : reference.getMapping()) {
final SensorPixel spA = mapping.getKey();
final SensorPixel spB = mapping.getValue();
final LineSensor lineSensorB = ruggedB.getLineSensor(reference.getSensorNameB());
final LineSensor lineSensorA = ruggedA.getLineSensor(reference.getSensorNameA());
final AbsoluteDate dateA = lineSensorA.getDate(spA.getLineNumber());
final AbsoluteDate dateB = lineSensorB.getDate(spB.getLineNumber());
final double pixelA = spA.getPixelNumber();
final double pixelB = spB.getPixelNumber();
final SpacecraftToObservedBody scToBodyA = ruggedA.getScToBody();
final Gradient[] ilResult =
ruggedB.distanceBetweenLOSderivatives(lineSensorA, dateA, pixelA, scToBodyA,
lineSensorB, dateB, pixelB, this.getGenerator());
// extract the value
value.setEntry(l, ilResult[0].getValue());
value.setEntry(l + 1, ilResult[1].getValue());
// extract the Jacobian
final int[] orders = new int[this.getNbParams()];
int m = 0;
for (final ParameterDriver driver : this.getDrivers()) {
final double scale = driver.getScale();
orders[m] = 1;
jacobian.setEntry(l, m, ilResult[0].getPartialDerivative(orders) * scale);
jacobian.setEntry(l + 1, m, ilResult[1].getPartialDerivative(orders) * scale);
orders[m] = 0;
m++;
}
l += 2; // pass to the next evaluation
}
}
// distance result with Jacobian for all reference points
return new Pair<RealVector, RealMatrix>(value, jacobian);
};
return model;
}
/** Least square problem builder.
* @param maxEvaluations maxIterations and evaluations
* @param convergenceThreshold parameter convergence threshold
* @return the least square problem
*/
@Override
public final LeastSquaresProblem build(final int maxEvaluations, final double convergenceThreshold) {
this.createTargetAndWeight();
final double[] target = this.targetAndWeight.get(TARGET);
final double[] start = this.createStartTab();
final ParameterValidator validator = this.createParameterValidator();
final ConvergenceChecker<LeastSquaresProblem.Evaluation> checker = this.createChecker(convergenceThreshold);
final MultivariateJacobianFunction model = this.createFunction();
return new LeastSquaresBuilder()
.lazyEvaluation(false).maxIterations(maxEvaluations)
.maxEvaluations(maxEvaluations).weight(null).start(start)
.target(target).parameterValidator(validator).checker(checker)
.model(model).build();
}
}
/* Copyright 2013-2022 CS GROUP
* Licensed to CS GROUP (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.rugged.adjustment;
import org.hipparchus.linear.LUDecomposer;
import org.hipparchus.linear.QRDecomposer;
import org.hipparchus.optim.nonlinear.vector.leastsquares.GaussNewtonOptimizer;
import org.hipparchus.optim.nonlinear.vector.leastsquares.LeastSquaresOptimizer;
import org.hipparchus.optim.nonlinear.vector.leastsquares.LeastSquaresOptimizer.Optimum;
import org.hipparchus.optim.nonlinear.vector.leastsquares.LeastSquaresProblem;
import org.hipparchus.optim.nonlinear.vector.leastsquares.LevenbergMarquardtOptimizer;
import org.orekit.rugged.errors.RuggedInternalError;
/** LeastSquareAdjuster
* Class for setting least square algorithm chosen for solving optimization problem.
* @author Jonathan Guinet
* @author Lucie Labat-Allee
* @author Guylaine Prat
* @since 2.0
*/
public class LeastSquareAdjuster {
/** Least square optimizer.*/
private final LeastSquaresOptimizer adjuster;
/** Least square optimizer choice.*/
private final OptimizerId optimizerID;
/** Constructor.
* @param optimizerID optimizer choice
*/
public LeastSquareAdjuster(final OptimizerId optimizerID) {
this.optimizerID = optimizerID;
this.adjuster = this.selectOptimizer();
}
/** Default constructor with Gauss Newton with QR decomposition algorithm.*/
public LeastSquareAdjuster() {
this.optimizerID = OptimizerId.GAUSS_NEWTON_QR;
this.adjuster = this.selectOptimizer();
}
/** Solve the least square problem.
* @param problem the least square problem
* @return the solution
*/
public Optimum optimize(final LeastSquaresProblem problem) {
return this.adjuster.optimize(problem);
}
/** Create the optimizer.
* @return the least square optimizer
*/
private LeastSquaresOptimizer selectOptimizer() {
// Set up the optimizer
switch (this.optimizerID) {
case LEVENBERG_MARQUADT:
return new LevenbergMarquardtOptimizer();
case GAUSS_NEWTON_LU :
return new GaussNewtonOptimizer(new LUDecomposer(1e-11), true);
case GAUSS_NEWTON_QR :
return new GaussNewtonOptimizer(new QRDecomposer(1e-11), false);
default :
// this should never happen
throw new RuggedInternalError(null);
}
}
}
/* Copyright 2013-2022 CS GROUP
* Licensed to CS GROUP (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.rugged.adjustment;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.hipparchus.Field;
import org.hipparchus.analysis.differentiation.Gradient;
import org.hipparchus.analysis.differentiation.GradientField;
import org.hipparchus.optim.ConvergenceChecker;
import org.hipparchus.optim.nonlinear.vector.leastsquares.LeastSquaresProblem;
import org.hipparchus.optim.nonlinear.vector.leastsquares.MultivariateJacobianFunction;
import org.hipparchus.optim.nonlinear.vector.leastsquares.ParameterValidator;
import org.orekit.rugged.adjustment.measurements.Observables;
import org.orekit.rugged.errors.RuggedException;
import org.orekit.rugged.errors.RuggedMessages;
import org.orekit.rugged.linesensor.LineSensor;
import org.orekit.rugged.utils.DerivativeGenerator;
import org.orekit.utils.ParameterDriver;
/**
* Builder for optimization problem.
* <p>
* Constructs the optimization problem defined by a set of measurement and sensors.
* </p>
* @author Jonathan Guinet
* @author Guylaine Prat
* @since 2.0
*/
abstract class OptimizationProblemBuilder {
/** Margin used in parameters estimation for the inverse location lines range. */
protected static final int ESTIMATION_LINE_RANGE_MARGIN = 100;
/** Gradient generator.*/
private final DerivativeGenerator<Gradient> generator;
/** Parameter drivers list. */
private final List<ParameterDriver> drivers;
/** Number of parameters to refine. */
private final int nbParams;
/** Measurements. */
private Observables measurements;
/** Sensors list. */
private final List<LineSensor> sensors;
/** Constructor.
* @param sensors list of sensors to refine
* @param measurements set of observables
*/
OptimizationProblemBuilder(final List<LineSensor> sensors, final Observables measurements) {
this.generator = this.createGenerator(sensors);
this.drivers = this.generator.getSelected();
this.nbParams = this.drivers.size();
if (this.nbParams == 0) {
throw new RuggedException(RuggedMessages.NO_PARAMETERS_SELECTED);
}
this.measurements = measurements;
this.sensors = sensors;
}
/** Least squares problem builder.
* @param maxEvaluations maximum number of evaluations
* @param convergenceThreshold convergence threshold
* @return the least squares problem
*/
public abstract LeastSquaresProblem build(int maxEvaluations, double convergenceThreshold);
/** Create the convergence check.
* <p>
* check LInf distance of parameters variation between previous and current iteration
* </p>
* @param parametersConvergenceThreshold convergence threshold
* @return the checker
*/
final ConvergenceChecker<LeastSquaresProblem.Evaluation>
createChecker(final double parametersConvergenceThreshold) {
final ConvergenceChecker<LeastSquaresProblem.Evaluation> checker = (iteration, previous, current)
-> current.getPoint().getLInfDistance(previous.getPoint()) <= parametersConvergenceThreshold;
return checker;
}
/** Create start points for optimization algorithm.
* @return start parameters values (normalized)
*/
final double[] createStartTab() {
// Get start points (as a normalized value)
final double[] start = new double[this.nbParams];
int iStart = 0;
for (final ParameterDriver driver : this.drivers) {
start[iStart++] = driver.getNormalizedValue();
}
return start;
}
/** Create targets and weights of optimization problem. */
protected abstract void createTargetAndWeight();
/** Create the model function value and its Jacobian.
* @return the model function value and its Jacobian
*/
protected abstract MultivariateJacobianFunction createFunction();
/** Parse the observables to select mapping .*/
protected abstract void initMapping();
/** Create parameter validator.
* @return parameter validator
*/
final ParameterValidator createParameterValidator() {
// Prevent parameters to exceed their prescribed bounds
final ParameterValidator validator = params -> {
int i = 0;
for (final ParameterDriver driver : this.drivers) {
// let the parameter handle min/max clipping
driver.setNormalizedValue(params.getEntry(i));
params.setEntry(i++, driver.getNormalizedValue());
}
return params;
};
return validator;
}
/** Create the generator for {@link Gradient} instances.
* @param selectedSensors list of sensors referencing the parameters drivers
* @return a new generator
*/
private DerivativeGenerator<Gradient> createGenerator(final List<LineSensor> selectedSensors) {
// Initialize set of drivers name
final Set<String> names = new HashSet<>();
// Get the drivers name
for (final LineSensor sensor : selectedSensors) {
// Get the drivers name for the sensor
sensor.getParametersDrivers().forEach(driver -> {
// Add the name of the driver to the set of drivers name
if (names.contains(driver.getName()) == false) {
names.add(driver.getName());
}
});
}
// Set up generator list and map
final List<ParameterDriver> selected = new ArrayList<>();
final Map<String, Integer> map = new HashMap<>();
// Get the list of selected drivers
for (final LineSensor sensor : selectedSensors) {
sensor.getParametersDrivers().filter(driver -> driver.isSelected()).forEach(driver -> {
if (map.get(driver.getName()) == null) {
map.put(driver.getName(), map.size());
selected.add(driver);
}
});
}
// gradient Generator
final GradientField field = GradientField.getField(map.size());
return new DerivativeGenerator<Gradient>() {
/** {@inheritDoc} */
@Override
public List<ParameterDriver> getSelected() {
return selected;
}
/** {@inheritDoc} */
@Override
public Gradient constant(final double value) {
return Gradient.constant(map.size(), value);
}
/** {@inheritDoc} */
@Override
public Gradient variable(final ParameterDriver driver) {
final Integer index = map.get(driver.getName());
if (index == null) {
return constant(driver.getValue());
} else {
return Gradient.variable(map.size(), index.intValue(), driver.getValue());
}
}
/** {@inheritDoc} */
@Override
public Field<Gradient> getField() {
return field;
}
};
}
/** Get the sensors list.
* @return the sensors list
*/
protected List<LineSensor> getSensors() {
return sensors;
}
/** Get the number of parameters to refine.
* @return the number of parameters to refine
*/
protected final int getNbParams() {
return this.nbParams;
}
/**
* Get the parameters drivers list.
* @return the selected list of parameters driver
*/
protected final List<ParameterDriver> getDrivers() {
return this.drivers;
}
/**
* Get the derivative structure generator.
* @return the derivative structure generator.
*/
protected final DerivativeGenerator<Gradient> getGenerator() {
return this.generator;
}
/** Get the measurements.
* @return the measurements
*/
protected Observables getMeasurements() {
return measurements;
}
}
/* Copyright 2013-2022 CS GROUP
* Licensed to CS GROUP (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.rugged.adjustment;
/** Enumerate for Optimizer used in Least square optimization.
* @author Jonathan Guinet
* @since 2.0
*/
public enum OptimizerId {
/** Levenberg Marquadt. */
LEVENBERG_MARQUADT,
/** Gauss Newton with LU decomposition. */
GAUSS_NEWTON_LU,
/** Gauss Newton with QR decomposition. */
GAUSS_NEWTON_QR
}
/* Copyright 2013-2022 CS GROUP
* Licensed to CS GROUP (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.rugged.adjustment.measurements;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
/** Class for measurements generation.
* @see SensorToSensorMapping
* @see SensorToGroundMapping
* @author Lucie Labat-Allee
* @author Guylaine Prat
* @since 2.0
*/
public class Observables {
/** Separator between Rugged name and sensor name. */
private static final String RUGGED_SENSOR_SEPARATOR = "_";
/** Separator between sensors. */
private static final String SENSORS_SEPARATOR = "__";
/** Sensor to ground mapping structure (example: for Ground Control Points GCP points).*/
private final Map<String, SensorToGroundMapping> groundMappings;
/** Sensor to sensor mappings structure (Tie points). */
private final Map<String, SensorToSensorMapping> interMappings;
/** Number of viewing models to map.*/
private final int nbModels;
/** Build a new instance.
* @param nbModels number of viewing models to map
*/
public Observables(final int nbModels) {
this.groundMappings = new LinkedHashMap<String, SensorToGroundMapping>();
this.interMappings = new LinkedHashMap<String, SensorToSensorMapping>();
this.nbModels = nbModels;
}
/** Add a mapping between two viewing models.
* @param interMapping sensor to sensor mapping
*/
public void addInterMapping(final SensorToSensorMapping interMapping) {
interMappings.put(this.createKey(interMapping), interMapping);
}
/** Add a ground mapping.
* <p>
* A ground mapping is defined by a set of GCPs.
* </p>
* @param groundMapping sensor to ground mapping
*/
public void addGroundMapping(final SensorToGroundMapping groundMapping) {
groundMappings.put(this.createKey(groundMapping), groundMapping);
}
/** Get all the ground mapping entries.
* @return an unmodifiable view of all mapping entries
*/
public Collection<SensorToGroundMapping> getGroundMappings() {
return groundMappings.values();
}
/**
* Get a ground Mapping for a sensor.
* @param ruggedName Rugged name
* @param sensorName sensor name
* @return selected ground mapping or null if sensor is not found
*/
public SensorToGroundMapping getGroundMapping(final String ruggedName, final String sensorName) {
final SensorToGroundMapping mapping = this.groundMappings.get(ruggedName + RUGGED_SENSOR_SEPARATOR + sensorName);
return mapping;
}
/** Get the sensor to sensor values.
* @return the inter-mappings
*/
public Collection<SensorToSensorMapping> getInterMappings() {
return interMappings.values();
}
/** Get the number of viewing models to map.
* @return the number of viewing models to map
*/
public int getNbModels() {
return nbModels;
}
/**
* Get a sensor mapping for a sensor.
* <p>
* returns sensor to sensor mapping associated with specific sensors and related rugged instance.
* </p>
* @param ruggedNameA Rugged name A
* @param sensorNameA sensor name A
* @param ruggedNameB Rugged name B
* @param sensorNameB sensor name B
* @return selected ground mapping or null if a sensor is not found
*/
public SensorToSensorMapping getInterMapping(final String ruggedNameA, final String sensorNameA,
final String ruggedNameB, final String sensorNameB) {
final String keyA = ruggedNameA + RUGGED_SENSOR_SEPARATOR + sensorNameA;
final String keyB = ruggedNameB + RUGGED_SENSOR_SEPARATOR + sensorNameB;
final SensorToSensorMapping mapping = interMappings.get(keyA + SENSORS_SEPARATOR + keyB);
return mapping;
}
/** Create key for SensorToGroundMapping map.
* @param groundMapping the ground mapping
* @return the key
*/
private String createKey(final SensorToGroundMapping groundMapping)
{
final String key = groundMapping.getRuggedName() + RUGGED_SENSOR_SEPARATOR + groundMapping.getSensorName();
return key;
}
/** Create key for SensorToSensorMapping map.
* @param sensorMapping the inter mapping
* @return the key
*/
private String createKey(final SensorToSensorMapping sensorMapping)
{
final String keyA = sensorMapping.getRuggedNameA() + RUGGED_SENSOR_SEPARATOR + sensorMapping.getSensorNameA();
final String keyB = sensorMapping.getRuggedNameB() + RUGGED_SENSOR_SEPARATOR + sensorMapping.getSensorNameB();
return keyA + SENSORS_SEPARATOR + keyB;
}
}
/* Copyright 2013-2022 CS GROUP
* Licensed to CS GROUP (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.rugged.adjustment.measurements;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import org.orekit.rugged.linesensor.SensorPixel;
/** Container for mapping sensor pixels with sensor pixels or ground points.
* @author Lucie Labat-Allee
* @author Guylaine Prat
* @since 2.0
*/
public class SensorMapping<T> {
/** Default name for Rugged. */
private static final String RUGGED = "Rugged";
/** Name of the sensor to which mapping applies. */
private final String sensorName;
/** Name of the Rugged to which mapping applies. */
private final String ruggedName;
/** Mapping from sensor to other (sensor or ground). */
private final Map<SensorPixel, T> mapping;
/** Build a new instance (with default Rugged name).
* @param sensorName name of the sensor to which mapping applies
*/
public SensorMapping(final String sensorName) {
this(sensorName, RUGGED);
}
/** Build a new instance with a specific Rugged name.
* @param sensorName name of the sensor to which mapping applies
* @param ruggedName name of the Rugged to which mapping applies
*/
public SensorMapping(final String sensorName, final String ruggedName) {
this.sensorName = sensorName;
this.ruggedName = ruggedName;
this.mapping = new LinkedHashMap<SensorPixel, T>();
}
/** Get the name of the sensor to which mapping applies.
* @return name of the sensor to which mapping applies
*/
public String getSensorName() {
return sensorName;
}
/** Get the name of the Rugged to which mapping applies.
* @return name of the Rugged to which mapping applies
*/
public String getRuggedName() {
return ruggedName;
}
/** Add a mapping between a sensor pixel and another point (sensor pixel or ground point).
* @param pixel sensor pixel
* @param point sensor pixel or ground point corresponding to the sensor pixel
*/
public void addMapping(final SensorPixel pixel, final T point) {
mapping.put(pixel, point);
}
/** Get all the mapping entries.
* @return an unmodifiable view of all mapping entries
*/
public Set<Map.Entry<SensorPixel, T>> getMapping() {
return Collections.unmodifiableSet(mapping.entrySet());
}
}
/* Copyright 2013-2022 CS GROUP
* Licensed to CS GROUP (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.rugged.adjustment.measurements;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import org.orekit.bodies.GeodeticPoint;
import org.orekit.rugged.linesensor.SensorPixel;
/** Container for mapping between sensor pixels and ground points.
* @see SensorMapping
* @author Luc Maisonobe
* @author Lucie Labat-Allee
* @author Guylaine Prat
* @since 2.0
*/
public class SensorToGroundMapping {
/** Default name for Rugged. */
private static final String RUGGED = "Rugged";
/** Name of the sensor to which mapping applies. */
private final String sensorName;
/** Mapping from sensor to ground. */
private final SensorMapping<GeodeticPoint> groundMapping;
/** Build a new instance (with default Rugged name).
* @param sensorName name of the sensor to which mapping applies
*/
public SensorToGroundMapping(final String sensorName) {
this(RUGGED, sensorName);
}
/** Build a new instance with a specific Rugged name.
* @param ruggedName name of the Rugged to which mapping applies
* @param sensorName name of the sensor to which mapping applies
*/
public SensorToGroundMapping(final String ruggedName, final String sensorName) {
this.sensorName = sensorName;
this.groundMapping = new SensorMapping<>(sensorName, ruggedName);
}
/** Get the name of the sensor to which mapping applies.
* @return name of the sensor to which mapping applies
*/
public String getSensorName() {
return sensorName;
}
/** Get the name of the Rugged to which mapping applies.
* @return name of the Rugged to which mapping applies
*/
public String getRuggedName() {
return this.groundMapping.getRuggedName();
}
/** Add a mapping between one sensor pixel and one ground point.
* @param pixel sensor pixel
* @param groundPoint ground point corresponding to the sensor pixel
*/
public void addMapping(final SensorPixel pixel, final GeodeticPoint groundPoint) {
groundMapping.addMapping(pixel, groundPoint);
}
/** Get all the mapping entries.
* @return an unmodifiable view of all mapping entries
*/
public Set<Map.Entry<SensorPixel, GeodeticPoint>> getMapping() {
return Collections.unmodifiableSet(groundMapping.getMapping());
}
}
/* Copyright 2013-2022 CS GROUP
* Licensed to CS GROUP (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.rugged.adjustment.measurements;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.orekit.rugged.linesensor.SensorPixel;
/** Container for mapping sensors pixels of two viewing models.
* Store the distance between both lines of sight computed with
* {@link org.orekit.rugged.api.Rugged#distanceBetweenLOS(org.orekit.rugged.linesensor.LineSensor, org.orekit.time.AbsoluteDate, double, org.orekit.rugged.utils.SpacecraftToObservedBody, org.orekit.rugged.linesensor.LineSensor, org.orekit.time.AbsoluteDate, double)}
* <p> Constraints in relation to central body distance can be added.
* @see SensorMapping
* @author Lucie LabatAllee
* @author Guylaine Prat
* @since 2.0
*/
public class SensorToSensorMapping {
/** Default name for Rugged. */
private static final String RUGGED = "Rugged";
/** Name of the sensor B to which mapping applies. */
private final String sensorNameB;
/** Name of the Rugged B to which mapping applies. */
private final String ruggedNameB;
/** Mapping from sensor A to sensor B. */
private final SensorMapping<SensorPixel> interMapping;
/** Distances between two LOS. */
private final List<Double> losDistances;
/** Central body distances associated with pixel A. */
private final List<Double> bodyDistances;
/** Body constraint weight. */
private double bodyConstraintWeight;
/** Build a new instance without central body constraint (with default Rugged names).
* @param sensorNameA name of the sensor A to which mapping applies
* @param sensorNameB name of the sensor B to which mapping applies
*/
public SensorToSensorMapping(final String sensorNameA, final String sensorNameB) {
this(sensorNameA, RUGGED, sensorNameB, RUGGED, 0.0);
}
/** Build a new instance with central body constraint.
* @param sensorNameA name of the sensor A to which mapping applies
* @param ruggedNameA name of the Rugged A to which mapping applies
* @param sensorNameB name of the sensor B to which mapping applies
* @param ruggedNameB name of the Rugged B to which mapping applies
* @param bodyConstraintWeight weight given to the central body distance constraint
* with respect to the LOS distance (between 0 and 1).
* <br>Weighting will be applied as follow :
* <ul>
* <li>(1 - bodyConstraintWeight) for LOS distance weighting</li>
* <li>bodyConstraintWeight for central body distance weighting</li>
* </ul>
*/
public SensorToSensorMapping(final String sensorNameA, final String ruggedNameA,
final String sensorNameB, final String ruggedNameB,
final double bodyConstraintWeight) {
this.interMapping = new SensorMapping<>(sensorNameA, ruggedNameA);
this.sensorNameB = sensorNameB;
this.ruggedNameB = ruggedNameB;
this.losDistances = new ArrayList<>();
this.bodyDistances = new ArrayList<>();
this.bodyConstraintWeight = bodyConstraintWeight;
}
/** Build a new instance without central body constraints.
* @param sensorNameA name of the sensor A to which mapping applies
* @param ruggedNameA name of the Rugged A to which mapping applies
* @param sensorNameB name of the sensor B to which mapping applies
* @param ruggedNameB name of the Rugged B to which mapping applies
*/
public SensorToSensorMapping(final String sensorNameA, final String ruggedNameA,
final String sensorNameB, final String ruggedNameB) {
this(sensorNameA, ruggedNameA, sensorNameB, ruggedNameB, 0.0);
}
/** Build a new instance with central body constraints (with default Rugged names):
* we want to minimize the distance between pixel A and central body.
* @param sensorNameA name of the sensor A to which mapping applies
* @param sensorNameB name of the sensor B to which mapping applies
* @param bodyConstraintWeight weight given to the central body distance constraint
* with respect to the LOS distance (between 0 and 1).
* <br>Weighting will be applied as follow :
* <ul>
* <li>(1 - bodyConstraintWeight) for LOS distance weighting</li>
* <li>bodyConstraintWeight for central body distance weighting</li>
* </ul>
*/
public SensorToSensorMapping(final String sensorNameA, final String sensorNameB,
final double bodyConstraintWeight) {
this(sensorNameA, RUGGED, sensorNameB, RUGGED, bodyConstraintWeight);
}
/** Get the name of the sensor B to which mapping applies.
* @return name of the sensor B to which mapping applies
*/
public String getSensorNameB() {
return sensorNameB;
}
/** Get the name of the sensor A to which mapping applies.
* @return name of the sensor A to which mapping applies
*/
public String getSensorNameA() {
return interMapping.getSensorName();
}
/** Get the name of the Rugged B to which mapping applies.
* @return name of the Rugged B to which mapping applies
*/
public String getRuggedNameB() {
return ruggedNameB;
}
/** Get the name of the Rugged A to which mapping applies.
* @return name of the Rugged A to which mapping applies
*/
public String getRuggedNameA() {
return interMapping.getRuggedName();
}
/** Get all the inter-mapping entries.
* @return an unmodifiable view of all mapping entries
*/
public Set<Map.Entry<SensorPixel, SensorPixel>> getMapping() {
return interMapping.getMapping();
}
/** Get distances between lines of sight (from both view).
* @return the LOS distances
*/
public List<Double> getLosDistances() {
return losDistances;
}
/** Get distances between central body and pixel A (mapping with constraints).
* @return the central body distances
*/
public List<Double> getBodyDistances() {
return bodyDistances;
}
/** Get the weight given to the central body distance constraint with respect to the LOS distance.
* @return the central body constraint weight
*/
public double getBodyConstraintWeight() {
return bodyConstraintWeight;
}
/** Get distance between central body and pixel A, corresponding to the inter-mapping index.
* @param idx inter-mapping index
* @return the central body distances at index idx
*/
public Double getBodyDistance(final int idx) {
return getBodyDistances().get(idx);
}
/** Get distance between LOS, corresponding to the inter-mapping index.
* @param idx inter-mapping index
* @return the LOS distance at index idx
*/
public Double getLosDistance(final int idx) {
return getLosDistances().get(idx);
}
/** Add a mapping between two sensor pixels (A and B) and corresponding distance between the LOS.
* @param pixelA sensor pixel A
* @param pixelB sensor pixel B corresponding to the sensor pixel A (by direct then inverse location)
* @param losDistance distance between the two lines of sight
*/
public void addMapping(final SensorPixel pixelA, final SensorPixel pixelB, final Double losDistance) {
interMapping.addMapping(pixelA, pixelB);
losDistances.add(losDistance);
}
/** Add a mapping between two sensor pixels (A and B) and corresponding distance between the LOS
* and the central body distance constraint associated with pixel A.
* @param pixelA sensor pixel A
* @param pixelB sensor pixel B corresponding to the sensor pixel A (by direct then inverse location)
* @param losDistance distance between the two lines of sight
* @param bodyDistance elevation to central body
*/
public void addMapping(final SensorPixel pixelA, final SensorPixel pixelB,
final Double losDistance, final Double bodyDistance) {
interMapping.addMapping(pixelA, pixelB);
losDistances.add(losDistance);
bodyDistances.add(bodyDistance);
}
/** Set the central body constraint weight.
* @param bodyConstraintWeight the central body constraint weight to set
*/
public void setBodyConstraintWeight(final double bodyConstraintWeight) {
this.bodyConstraintWeight = bodyConstraintWeight;
}
}
/* Copyright 2002-2014 CS Systèmes d'Information
* Licensed to CS Systèmes d'Information (CS) under one or more
/* Copyright 2013-2022 CS GROUP
* Licensed to CS GROUP (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
......@@ -14,15 +14,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.rugged.core;
import org.orekit.rugged.core.raster.IntersectionAlgorithm;
public class BasicScanAlgorithmTest extends AbstractAlgorithmTest {
public IntersectionAlgorithm createAlgorithm() {
return new BasicScanAlgorithm();
}
}
/**
*
* This package provides tools for measurements generation.
*
* @author Luc Maisonobe
* @author Guylaine Prat
* @author Jonathan Guinet
* @author Lucie Labat-Allee
*
*/
package org.orekit.rugged.adjustment.measurements;
/* Copyright 2013-2022 CS GROUP
* Licensed to CS GROUP (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* This package provides tools to deal with viewing model refining.
*
* @author Luc Maisonobe
* @author Guylaine Prat
* @author Jonathan Guinet
*
*/
package org.orekit.rugged.adjustment;
/* Copyright 2013-2022 CS GROUP
* Licensed to CS GROUP (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.rugged.api;
/** Enumerate for Digital Elevation Model intersection.
* @author Luc Maisonobe
*/
public enum AlgorithmId {
/** Fast algorithm due to Bernardt Duvenhage.
* <p>
* The algorithm is described in the 2009 paper:
* <a href="http://researchspace.csir.co.za/dspace/bitstream/10204/3041/1/Duvenhage_2009.pdf">Using
* An Implicit Min/Max KD-Tree for Doing Efficient Terrain Line of Sight Calculations</a>.
* </p>
*/
DUVENHAGE,
/** Fast algorithm due to Bernardt Duvenhage.
* <p>
* The algorithm is described in the 2009 paper:
* <a href="http://researchspace.csir.co.za/dspace/bitstream/10204/3041/1/Duvenhage_2009.pdf">Using
* An Implicit Min/Max KD-Tree for Doing Efficient Terrain Line of Sight Calculations</a>.
* </p>
* <p>
* This version of the duvenhage's algorithm considers the body to be flat, i.e. lines computed
* from entry/exit points in the Digital Elevation Model are considered to be straight lines
* also in geodetic coordinates. The sagitta resulting from real ellipsoid curvature is therefore
* <em>not</em> corrected in this case. As this computation is not costly (a few percents overhead),
* the full {@link #DUVENHAGE} is recommended instead of this one. This choice is mainly intended
* for comparison purposes with other systems.
* </p>
*/
DUVENHAGE_FLAT_BODY,
/** Basic, <em>very slow</em> algorithm, designed only for tests and validation purposes.
* <p>
* The algorithm simply computes entry and exit points at high and low altitudes,
* and scans all Digital Elevation Models in the sub-tiles defined by these two
* corner points. It is not designed for operational use.
* </p>
*/
BASIC_SLOW_EXHAUSTIVE_SCAN_FOR_TESTS_ONLY,
/** Algorithm that simply uses a constant elevation over ellipsoid.
* <p>
* Intersections are computed only with respect to the reference ellipsoid
* and a user-specified elevation. If the user-specified elevation is 0.0,
* then this algorithm is equivalent to {@link #IGNORE_DEM_USE_ELLIPSOID},
* only slower.
* </p>
*/
CONSTANT_ELEVATION_OVER_ELLIPSOID,
/** Dummy algorithm that simply ignores the Digital Elevation Model.
* <p>
* Intersections are computed only with respect to the reference ellipsoid.
* </p>
* <p>
* This algorithm is equivalent to {@link #CONSTANT_ELEVATION_OVER_ELLIPSOID}
* when the elevation is set to 0.0, but this one is much faster in this
* specific case.
* </p>
*/
IGNORE_DEM_USE_ELLIPSOID
}
/* Copyright 2013-2014 CS Systèmes d'Information
* Licensed to CS Systèmes d'Information (CS) under one or more
/* Copyright 2013-2022 CS GROUP
* Licensed to CS GROUP (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
......@@ -16,23 +16,19 @@
*/
package org.orekit.rugged.api;
/** Interface used to update Digital Elevation Model tiles.
* <p>
* Implementations of this interface are must be provided by
* the image processing mission-specific layer, thus allowing
* the Rugged library to access the Digital Elevation Model data.
* </p>
/** Enumerate for body rotating frames.
* @author Luc Maisonobe
*/
public interface TileUpdater {
public enum BodyRotatingFrameId {
/** Constant for International Terrestrial Reference Frame. */
ITRF,
/** Constant for International Terrestrial Reference Frame based on older equinox paradigm. */
ITRF_EQUINOX,
/** Set the tile global geometry.
* @param latitude latitude that must be covered by the tile
* @param longitude longitude that must be covered by the tile
* @param tile to update
* @exception RuggedException if tile cannot be updated
*/
void updateTile(double latitude, double longitude, UpdatableTile tile)
throws RuggedException;
/** Constant for Geocentric True Of Date frame. */
GTOD
}
/* Copyright 2013-2022 CS GROUP
* Licensed to CS GROUP (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.rugged.api;
/** Enumerate for ellipsoid.
* @author Luc Maisonobe
*/
public enum EllipsoidId {
/** Constant for GRS 80 ellipsoid. */
GRS80,
/** Constant for WGS 84 ellipsoid. */
WGS84,
/** Constant for IERS 96 ellipsoid. */
IERS96,
/** Constant for IERS 2003 ellipsoid. */
IERS2003
}
/* Copyright 2013-2022 CS GROUP
* Licensed to CS GROUP (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.rugged.api;
/** Enumerate for inertial frames.
* @author Luc Maisonobe
*/
public enum InertialFrameId {
/** Constant for Geocentric Celestial Reference Frame. */
GCRF,
/** Constant for Earth Mean Equator 2000 frame (aka J2000). */
EME2000,
/** Constant for Mean Of Date frame, with IERS 96 conventions (Lieske precession). */
MOD,
/** Constant for True Of Date frame, with IERS 96 conventions (Wahr nutation). */
TOD,
/** Constant for Veis 1950 frame. */
VEIS1950
}
/* Copyright 2013-2014 CS Systèmes d'Information
* Licensed to CS Systèmes d'Information (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.rugged.api;
import java.io.Serializable;
/** Container for pixels line of sight.
* <p>
* Instances of this class are guaranteed to be immutable.
* </p>
* @author Luc Maisonobe
*/
public class PixelLOS implements Serializable {
/** Serializable UID. */
private static final long serialVersionUID = 20140311L;
/** Position along x axis(m). */
private final double px;
/** Position along y axis(m). */
private final double py;
/** Position along z axis(m). */
private final double pz;
/** Line of sight direction along x axis. */
private final double dx;
/** Line of sight direction along x axis. */
private final double dy;
/** Line of sight direction along x axis. */
private final double dz;
/**
* Build a new instance.
*
* @param px position along x axis (m)
* @param py position along y axis (m)
* @param pz position along z axis (m)
* @param dx line of sight direction along x axis
* @param dy line of sight direction along y axis
* @param dz line of sight direction along z axis
*/
public PixelLOS(final double px, final double py, final double pz,
final double dx, final double dy, final double dz) {
this.px = px;
this.py = py;
this.pz = pz;
this.dx = dx;
this.dy = dy;
this.dz = dz;
}
/** Get the position along x axis.
* @return position along x axis
*/
public double getPx() {
return px;
}
/** Get the position along y axis.
* @return position along y axis
*/
public double getPy() {
return py;
}
/** Get the position along z axis.
* @return position along z axis
*/
public double getPz() {
return pz;
}
/** Get the line of sight direction along x axis.
* @return line of sight direction along x axis
*/
public double getDx() {
return dx;
}
/** Get the line of sight direction along y axis.
* @return line of sight direction along y axis
*/
public double getDy() {
return dy;
}
/** Get the line of sight direction along z axis.
* @return line of sight direction along z axis
*/
public double getDz() {
return dz;
}
}
/* Copyright 2013-2014 CS Systèmes d'Information
* Licensed to CS Systèmes d'Information (CS) under one or more
/* Copyright 2013-2022 CS GROUP
* Licensed to CS GROUP (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
......@@ -16,151 +16,1207 @@
*/
package org.orekit.rugged.api;
import java.io.File;
import java.util.List;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
/** Main interface to Rugged library.
import org.hipparchus.analysis.differentiation.Derivative;
import org.hipparchus.analysis.differentiation.DerivativeStructure;
import org.hipparchus.geometry.euclidean.threed.FieldVector3D;
import org.hipparchus.geometry.euclidean.threed.Vector3D;
import org.hipparchus.util.FastMath;
import org.hipparchus.util.MathArrays;
import org.orekit.bodies.GeodeticPoint;
import org.orekit.frames.Transform;
import org.orekit.rugged.errors.DumpManager;
import org.orekit.rugged.errors.RuggedException;
import org.orekit.rugged.errors.RuggedInternalError;
import org.orekit.rugged.errors.RuggedMessages;
import org.orekit.rugged.intersection.IntersectionAlgorithm;
import org.orekit.rugged.linesensor.LineSensor;
import org.orekit.rugged.linesensor.SensorMeanPlaneCrossing;
import org.orekit.rugged.linesensor.SensorPixel;
import org.orekit.rugged.linesensor.SensorPixelCrossing;
import org.orekit.rugged.refraction.AtmosphericRefraction;
import org.orekit.rugged.utils.DerivativeGenerator;
import org.orekit.rugged.utils.ExtendedEllipsoid;
import org.orekit.rugged.utils.NormalizedGeodeticPoint;
import org.orekit.rugged.utils.SpacecraftToObservedBody;
import org.orekit.time.AbsoluteDate;
import org.orekit.utils.Constants;
import org.orekit.utils.PVCoordinates;
/** Main class of Rugged library API.
* @see RuggedBuilder
* @author Luc Maisonobe
* @author Guylaine Prat
* @author Jonathan Guinet
* @author Lucie LabatAllee
*/
public interface Rugged {
public class Rugged {
/** Accuracy to use in the first stage of inverse location.
* <p>
* This accuracy is only used to locate the point within one
* pixel, hence there is no point in choosing a too small value here.
* </p>
*/
private static final double COARSE_INVERSE_LOCATION_ACCURACY = 0.01;
/** Enumerate for ellipsoid. */
enum Ellipsoid {
/** Maximum number of evaluations for crossing algorithms. */
private static final int MAX_EVAL = 50;
/** Constant for GRS 80 ellipsoid. */
GRS80,
/** Threshold for pixel convergence in fixed point method
* (for inverse location with atmospheric refraction correction). */
private static final double PIXEL_CV_THRESHOLD = 1.e-4;
/** Constant for WGS 84 ellipsoid. */
WGS84,
/** Threshold for line convergence in fixed point method
* (for inverse location with atmospheric refraction correction). */
private static final double LINE_CV_THRESHOLD = 1.e-4;
/** Constant for IERS 96 ellipsoid. */
IERS96,
/** Reference ellipsoid. */
private final ExtendedEllipsoid ellipsoid;
/** Constant for IERS 2003 ellipsoid. */
IERS2003
/** Converter between spacecraft and body. */
private final SpacecraftToObservedBody scToBody;
}
/** Sensors. */
private final Map<String, LineSensor> sensors;
/** Mean plane crossing finders. */
private final Map<String, SensorMeanPlaneCrossing> finders;
/** DEM intersection algorithm. */
private final IntersectionAlgorithm algorithm;
/** Flag for light time correction. */
private boolean lightTimeCorrection;
/** Flag for aberration of light correction. */
private boolean aberrationOfLightCorrection;
/** Rugged name. */
private final String name;
/** Atmospheric refraction for line of sight correction. */
private AtmosphericRefraction atmosphericRefraction;
/** Build a configured instance.
* <p>
* By default, the instance performs both light time correction (which refers
* to ground point motion with respect to inertial frame) and aberration of
* light correction (which refers to spacecraft proper velocity). Explicit calls
* to {@link #setLightTimeCorrection(boolean) setLightTimeCorrection}
* and {@link #setAberrationOfLightCorrection(boolean) setAberrationOfLightCorrection}
* can be made after construction if these phenomena should not be corrected.
* </p>
* @param algorithm algorithm to use for Digital Elevation Model intersection
* @param ellipsoid f reference ellipsoid
* @param lightTimeCorrection if true, the light travel time between ground
* @param aberrationOfLightCorrection if true, the aberration of light
* is corrected for more accurate location
* and spacecraft is compensated for more accurate location
* @param atmosphericRefraction the atmospheric refraction model to be used for more accurate location
* @param scToBody transforms interpolator
* @param sensors sensors
* @param name Rugged name
*/
Rugged(final IntersectionAlgorithm algorithm, final ExtendedEllipsoid ellipsoid, final boolean lightTimeCorrection,
final boolean aberrationOfLightCorrection, final AtmosphericRefraction atmosphericRefraction,
final SpacecraftToObservedBody scToBody, final Collection<LineSensor> sensors, final String name) {
/** Enumerate for inertial frames. */
enum InertialFrame {
/** Constant for Geocentric Celestial Reference Frame. */
GCRF,
// space reference
this.ellipsoid = ellipsoid;
/** Constant for Earth Mean Equator 2000 frame (aka J2000). */
EME2000,
// orbit/attitude to body converter
this.scToBody = scToBody;
/** Constant for Mean Of Date frame, with IERS 96 conventions (Lieske precession). */
MOD,
// intersection algorithm
this.algorithm = algorithm;
/** Constant for True Of Date frame, with IERS 96 conventions (Wahr nutation). */
TOD,
// Rugged name
// @since 2.0
this.name = name;
/** Constant for Veis 1950 frame. */
VEIS1950
this.sensors = new HashMap<String, LineSensor>();
for (final LineSensor s : sensors) {
this.sensors.put(s.getName(), s);
}
this.finders = new HashMap<String, SensorMeanPlaneCrossing>();
this.lightTimeCorrection = lightTimeCorrection;
this.aberrationOfLightCorrection = aberrationOfLightCorrection;
this.atmosphericRefraction = atmosphericRefraction;
}
/** Enumerate for body rotating frames. */
enum BodyRotatingFrame {
/** Get the Rugged name.
* @return Rugged name
* @since 2.0
*/
public String getName() {
return name;
}
/** Constant for International Terrestrial Reference Frame. */
ITRF,
/** Get the DEM intersection algorithm.
* @return DEM intersection algorithm
*/
public IntersectionAlgorithm getAlgorithm() {
return algorithm;
}
/** Constant for Geocentric True Of Date frame. */
GTOD
/** Get the DEM intersection algorithm identifier.
* @return DEM intersection algorithm Id
* @since 2.2
*/
public AlgorithmId getAlgorithmId() {
return algorithm.getAlgorithmId();
}
/** Get flag for light time correction.
* @return true if the light time between ground and spacecraft is
* compensated for more accurate location
*/
public boolean isLightTimeCorrected() {
return lightTimeCorrection;
}
/** Enumerate for Digital Elevation Model intersection. */
enum Algorithm {
/** Get flag for aberration of light correction.
* @return true if the aberration of light time is corrected
* for more accurate location
*/
public boolean isAberrationOfLightCorrected() {
return aberrationOfLightCorrection;
}
/** Fast algorithm due to Bernardt Duvenhage.
* <p>
* The algorithm is described in the 2009 paper:
* <a href="http://researchspace.csir.co.za/dspace/bitstream/10204/3041/1/Duvenhage_2009.pdf">Using
* An Implicit Min/Max KD-Tree for Doing Efficient Terrain Line of Sight Calculations</a>.
* </p>
*/
DUVENHAGE,
/** Get the atmospheric refraction model.
* @return atmospheric refraction model
* @since 2.0
*/
public AtmosphericRefraction getRefractionCorrection() {
return atmosphericRefraction;
}
/** Basic, <em>very slow</em> algorithm, designed only for tests and validation purposes.
* <p>
* The algorithm simply computes entry and exit points at high and low altitudes,
* and scans all Digital Elevation Models in the sub-tiles defined by these two
* corner points. It is not designed for operational use.
* </p>
*/
BASIC_SLOW_EXHAUSTIVE_SCAN_FOR_TESTS_ONLY,
/** Get the line sensors.
* @return line sensors
*/
public Collection<LineSensor> getLineSensors() {
return sensors.values();
}
/** Dummy algorithm that simply ignores the Digital Elevation Model.
* <p>
* Intersections are computed only with respect to the reference ellipsoid.
* </p>
*/
IGNORE_DEM_USE_ELLIPSOID
/** Get the start of search time span.
* @return start of search time span
*/
public AbsoluteDate getMinDate() {
return scToBody.getMinDate();
}
/** Get the end of search time span.
* @return end of search time span
*/
public AbsoluteDate getMaxDate() {
return scToBody.getMaxDate();
}
/** Set up general context.
/** Check if a date is in the supported range.
* <p>
* This method is the first one that must be called, otherwise the
* other methods will fail due to uninitialized context.
* The support range is given by the {@code minDate} and
* {@code maxDate} construction parameters, with an {@code
* overshootTolerance} margin accepted (i.e. a date slightly
* before {@code minDate} or slightly after {@code maxDate}
* will be considered in range if the overshoot does not exceed
* the tolerance set at construction).
* </p>
* @param orekitDataDir top directory for Orekit data
* @param referenceDate reference date from which all other dates are computed
* @param algorithmID identifier of algorithm to use for Digital Elevation Model intersection
* @param ellipsoidID identifier of reference ellipsoid
* @param inertialFrameID identifier of inertial frame
* @param bodyRotatingFrameID identifier of body rotating frame
* @param positionsVelocities satellite position and velocity
* @param pvInterpolationOrder order to use for position/velocity interpolation
* @param quaternions satellite quaternions
* @param aInterpolationOrder order to use for attitude interpolation
* @exception RuggedException if data needed for some frame cannot be loaded
*/
void setGeneralContext(File orekitDataDir, String referenceDate,
Algorithm algorithmID, Ellipsoid ellipsoidID,
InertialFrame inertialFrameID, BodyRotatingFrame bodyRotatingFrameID,
List<SatellitePV> positionsVelocities, int pvInterpolationOrder,
List<SatelliteQ> quaternions, int aInterpolationOrder)
throws RuggedException;
/** Set up the tiles management.
* @param updater updater used to load Digital Elevation Model tiles
* @param maxCachedTiles maximum number of tiles stored in the cache
*/
void setUpTilesManagement(TileUpdater updater, int maxCachedTiles);
/** Set up line sensor model.
* @param name name of the line sensor.
* @param pixels lines of sight for each pixels
* @param datationModel model to use for dating sensor lines
*/
void setLineSensor(String name, List<PixelLOS> pixels, LineDatation datationModel);
/** Direct localization of a sensor line.
* @param name name of the line sensor
* @param date date to check
* @return true if date is in the supported range
*/
public boolean isInRange(final AbsoluteDate date) {
return scToBody.isInRange(date);
}
/** Get the observed body ellipsoid.
* @return observed body ellipsoid
*/
public ExtendedEllipsoid getEllipsoid() {
return ellipsoid;
}
/** Direct location of a sensor line.
* @param sensorName name of the line sensor
* @param lineNumber number of the line to localize on ground
* @return ground position of all pixels of the specified sensor line
* @exception RuggedException if line cannot be localized,
* if {@link #setGeneralContext(File, InertialFrame, BodyRotatingFrame, Ellipsoid)} has
* not been called beforehand, or if {@link #setOrbitAndAttitude(List, List)} has not
* been called beforehand, or sensor is unknown
*/
GroundPoint[] directLocalization(String name, double lineNumber)
throws RuggedException;
/** Inverse localization of a ground point.
* @param name name of the line sensor
* @param groundPoint ground point to localize
* @return sensor pixel seeing ground point
* @exception RuggedException if line cannot be localized,
* if {@link #setGeneralContext(File, InertialFrame, BodyRotatingFrame, Ellipsoid)} has
* not been called beforehand, or if {@link #setOrbitAndAttitude(List, List)} has not
* been called beforehand, or sensor is unknown
*/
SensorPixel inverseLocalization(String name, GroundPoint groundPoint)
throws RuggedException;
*/
public GeodeticPoint[] directLocation(final String sensorName, final double lineNumber) {
final LineSensor sensor = getLineSensor(sensorName);
final Vector3D sensorPosition = sensor.getPosition();
final AbsoluteDate date = sensor.getDate(lineNumber);
// Compute the transform for the date
// from spacecraft to inertial
final Transform scToInert = scToBody.getScToInertial(date);
// from inertial to body
final Transform inertToBody = scToBody.getInertialToBody(date);
// Compute spacecraft velocity in inertial frame
final Vector3D spacecraftVelocity = scToInert.transformPVCoordinates(PVCoordinates.ZERO).getVelocity();
// Compute sensor position in inertial frame
// TBN: for simplicity, due to the size of sensor, we consider each pixel to be at sensor position
final Vector3D pInert = scToInert.transformPosition(sensorPosition);
// Compute location of each pixel
final GeodeticPoint[] gp = new GeodeticPoint[sensor.getNbPixels()];
for (int i = 0; i < sensor.getNbPixels(); ++i) {
final Vector3D los = sensor.getLOS(date, i);
DumpManager.dumpDirectLocation(date, sensorPosition, los, lightTimeCorrection,
aberrationOfLightCorrection, atmosphericRefraction != null);
// compute the line of sight in inertial frame (without correction)
final Vector3D obsLInert = scToInert.transformVector(los);
final Vector3D lInert;
if (aberrationOfLightCorrection) {
// apply aberration of light correction on LOS
lInert = applyAberrationOfLightCorrection(obsLInert, spacecraftVelocity);
} else {
// don't apply aberration of light correction on LOS
lInert = obsLInert;
}
if (lightTimeCorrection) {
// compute DEM intersection with light time correction
// TBN: for simplicity, due to the size of sensor, we consider each pixel to be at sensor position
gp[i] = computeWithLightTimeCorrection(date, sensorPosition, los, scToInert, inertToBody, pInert, lInert);
} else {
// compute DEM intersection without light time correction
final Vector3D pBody = inertToBody.transformPosition(pInert);
final Vector3D lBody = inertToBody.transformVector(lInert);
gp[i] = algorithm.refineIntersection(ellipsoid, pBody, lBody,
algorithm.intersection(ellipsoid, pBody, lBody));
}
// compute with atmospheric refraction correction if necessary
if (atmosphericRefraction != null && atmosphericRefraction.mustBeComputed()) {
// apply atmospheric refraction correction
final Vector3D pBody = inertToBody.transformPosition(pInert);
final Vector3D lBody = inertToBody.transformVector(lInert);
gp[i] = atmosphericRefraction.applyCorrection(pBody, lBody, (NormalizedGeodeticPoint) gp[i], algorithm);
}
DumpManager.dumpDirectLocationResult(gp[i]);
}
return gp;
}
/** Direct location of a single line-of-sight.
* @param date date of the location
* @param sensorPosition sensor position in spacecraft frame. For simplicity, due to the size of sensor,
* we consider each pixel to be at sensor position
* @param los normalized line-of-sight in spacecraft frame
* @return ground position of intersection point between specified los and ground
*/
public GeodeticPoint directLocation(final AbsoluteDate date, final Vector3D sensorPosition, final Vector3D los) {
DumpManager.dumpDirectLocation(date, sensorPosition, los, lightTimeCorrection, aberrationOfLightCorrection,
atmosphericRefraction != null);
// Compute the transforms for the date
// from spacecraft to inertial
final Transform scToInert = scToBody.getScToInertial(date);
// from inertial to body
final Transform inertToBody = scToBody.getInertialToBody(date);
// Compute spacecraft velocity in inertial frame
final Vector3D spacecraftVelocity = scToInert.transformPVCoordinates(PVCoordinates.ZERO).getVelocity();
// Compute sensor position in inertial frame
// TBN: for simplicity, due to the size of sensor, we consider each pixel to be at sensor position
final Vector3D pInert = scToInert.transformPosition(sensorPosition);
// Compute the line of sight in inertial frame (without correction)
final Vector3D obsLInert = scToInert.transformVector(los);
final Vector3D lInert;
if (aberrationOfLightCorrection) {
// apply aberration of light correction on LOS
lInert = applyAberrationOfLightCorrection(obsLInert, spacecraftVelocity);
} else {
// don't apply aberration of light correction on LOS
lInert = obsLInert;
}
// Compute ground location of specified pixel
final NormalizedGeodeticPoint gp;
if (lightTimeCorrection) {
// compute DEM intersection with light time correction
// TBN: for simplicity, due to the size of sensor, we consider each pixel to be at sensor position
gp = computeWithLightTimeCorrection(date, sensorPosition, los, scToInert, inertToBody, pInert, lInert);
} else {
// compute DEM intersection without light time correction
final Vector3D pBody = inertToBody.transformPosition(pInert);
final Vector3D lBody = inertToBody.transformVector(lInert);
gp = algorithm.refineIntersection(ellipsoid, pBody, lBody,
algorithm.intersection(ellipsoid, pBody, lBody));
}
NormalizedGeodeticPoint result = gp;
// compute the ground location with atmospheric correction if asked for
if (atmosphericRefraction != null && atmosphericRefraction.mustBeComputed()) {
// apply atmospheric refraction correction
final Vector3D pBody = inertToBody.transformPosition(pInert);
final Vector3D lBody = inertToBody.transformVector(lInert);
result = atmosphericRefraction.applyCorrection(pBody, lBody, gp, algorithm);
} // end test on atmosphericRefraction != null
DumpManager.dumpDirectLocationResult(result);
return result;
}
/** Find the date at which sensor sees a ground point.
* <p>
* This method is a partial {@link #inverseLocation(String, GeodeticPoint, int, int) inverse location} focusing only on date.
* </p>
* <p>
* The point is given only by its latitude and longitude, the elevation is
* computed from the Digital Elevation Model.
* </p>
* <p>
* Note that for each sensor name, the {@code minLine} and {@code maxLine} settings
* are cached, because they induce costly frames computation. So these settings
* should not be tuned very finely and changed at each call, but should rather be
* a few thousand lines wide and refreshed only when needed. If for example an
* inverse location is roughly estimated to occur near line 53764 (for example
* using {@link org.orekit.rugged.utils.RoughVisibilityEstimator}), {@code minLine}
* and {@code maxLine} could be set for example to 50000 and 60000, which would
* be OK also if next line inverse location is expected to occur near line 53780,
* and next one ... The setting could be changed for example to 55000 and 65000 when
* an inverse location is expected to occur after 55750. Of course, these values
* are only an example and should be adjusted depending on mission needs.
* </p>
* @param sensorName name of the line sensor
* @param latitude ground point latitude (rad)
* @param longitude ground point longitude (rad)
* @param minLine minimum line number
* @param maxLine maximum line number
* @return date at which ground point is seen by line sensor
* @see #inverseLocation(String, double, double, int, int)
* @see org.orekit.rugged.utils.RoughVisibilityEstimator
*/
public AbsoluteDate dateLocation(final String sensorName,
final double latitude, final double longitude,
final int minLine, final int maxLine) {
final GeodeticPoint groundPoint =
new GeodeticPoint(latitude, longitude, algorithm.getElevation(latitude, longitude));
return dateLocation(sensorName, groundPoint, minLine, maxLine);
}
/** Find the date at which sensor sees a ground point.
* <p>
* This method is a partial {@link #inverseLocation(String,
* GeodeticPoint, int, int) inverse location} focusing only on date.
* </p>
* <p>
* Note that for each sensor name, the {@code minLine} and {@code maxLine} settings
* are cached, because they induce costly frames computation. So these settings
* should not be tuned very finely and changed at each call, but should rather be
* a few thousand lines wide and refreshed only when needed. If for example an
* inverse location is roughly estimated to occur near line 53764 (for example
* using {@link org.orekit.rugged.utils.RoughVisibilityEstimator}), {@code minLine}
* and {@code maxLine} could be set for example to 50000 and 60000, which would
* be OK also if next line inverse location is expected to occur near line 53780,
* and next one ... The setting could be changed for example to 55000 and 65000 when
* an inverse location is expected to occur after 55750. Of course, these values
* are only an example and should be adjusted depending on mission needs.
* </p>
* @param sensorName name of the line sensor
* @param point point to localize
* @param minLine minimum line number
* @param maxLine maximum line number
* @return date at which ground point is seen by line sensor
* @see #inverseLocation(String, GeodeticPoint, int, int)
* @see org.orekit.rugged.utils.RoughVisibilityEstimator
*/
public AbsoluteDate dateLocation(final String sensorName, final GeodeticPoint point,
final int minLine, final int maxLine) {
final LineSensor sensor = getLineSensor(sensorName);
final SensorMeanPlaneCrossing planeCrossing = getPlaneCrossing(sensorName, minLine, maxLine);
// find approximately the sensor line at which ground point crosses sensor mean plane
final Vector3D target = ellipsoid.transform(point);
final SensorMeanPlaneCrossing.CrossingResult crossingResult = planeCrossing.find(target);
if (crossingResult == null) {
// target is out of search interval
return null;
} else {
return sensor.getDate(crossingResult.getLine());
}
}
/** Inverse location of a ground point.
* <p>
* The point is given only by its latitude and longitude, the elevation is
* computed from the Digital Elevation Model.
* </p>
* <p>
* Note that for each sensor name, the {@code minLine} and {@code maxLine} settings
* are cached, because they induce costly frames computation. So these settings
* should not be tuned very finely and changed at each call, but should rather be
* a few thousand lines wide and refreshed only when needed. If for example an
* inverse location is roughly estimated to occur near line 53764 (for example
* using {@link org.orekit.rugged.utils.RoughVisibilityEstimator}), {@code minLine}
* and {@code maxLine} could be set for example to 50000 and 60000, which would
* be OK also if next line inverse location is expected to occur near line 53780,
* and next one ... The setting could be changed for example to 55000 and 65000 when
* an inverse location is expected to occur after 55750. Of course, these values
* are only an example and should be adjusted depending on mission needs.
* </p>
* @param sensorName name of the line sensor
* @param latitude ground point latitude (rad)
* @param longitude ground point longitude (rad)
* @param minLine minimum line number
* @param maxLine maximum line number
* @return sensor pixel seeing ground point, or null if ground point cannot
* be seen between the prescribed line numbers
* @see org.orekit.rugged.utils.RoughVisibilityEstimator
*/
public SensorPixel inverseLocation(final String sensorName,
final double latitude, final double longitude,
final int minLine, final int maxLine) {
final GeodeticPoint groundPoint = new GeodeticPoint(latitude, longitude, algorithm.getElevation(latitude, longitude));
return inverseLocation(sensorName, groundPoint, minLine, maxLine);
}
/** Inverse location of a point.
* <p>
* Note that for each sensor name, the {@code minLine} and {@code maxLine} settings
* are cached, because they induce costly frames computation. So these settings
* should not be tuned very finely and changed at each call, but should rather be
* a few thousand lines wide and refreshed only when needed. If for example an
* inverse location is roughly estimated to occur near line 53764 (for example
* using {@link org.orekit.rugged.utils.RoughVisibilityEstimator}), {@code minLine}
* and {@code maxLine} could be set for example to 50000 and 60000, which would
* be OK also if next line inverse location is expected to occur near line 53780,
* and next one ... The setting could be changed for example to 55000 and 65000 when
* an inverse location is expected to occur after 55750. Of course, these values
* are only an example and should be adjusted depending on mission needs.
* </p>
* @param sensorName name of the line sensor
* @param point geodetic point to localize
* @param minLine minimum line number where the search will be performed
* @param maxLine maximum line number where the search will be performed
* @return sensor pixel seeing point, or null if point cannot be seen between the
* prescribed line numbers
* @see #dateLocation(String, GeodeticPoint, int, int)
* @see org.orekit.rugged.utils.RoughVisibilityEstimator
*/
public SensorPixel inverseLocation(final String sensorName, final GeodeticPoint point,
final int minLine, final int maxLine) {
final LineSensor sensor = getLineSensor(sensorName);
DumpManager.dumpInverseLocation(sensor, point, ellipsoid, minLine, maxLine, lightTimeCorrection,
aberrationOfLightCorrection, atmosphericRefraction != null);
final SensorMeanPlaneCrossing planeCrossing = getPlaneCrossing(sensorName, minLine, maxLine);
DumpManager.dumpSensorMeanPlane(planeCrossing);
if (atmosphericRefraction == null || !atmosphericRefraction.mustBeComputed()) {
// Compute inverse location WITHOUT atmospheric refraction
return findSensorPixelWithoutAtmosphere(point, sensor, planeCrossing);
} else {
// Compute inverse location WITH atmospheric refraction
return findSensorPixelWithAtmosphere(point, sensor, minLine, maxLine);
}
}
/** Apply aberration of light correction (for direct location).
* @param spacecraftVelocity spacecraft velocity in inertial frame
* @param obsLInert line of sight in inertial frame
* @return line of sight with aberration of light correction
*/
private Vector3D applyAberrationOfLightCorrection(final Vector3D obsLInert, final Vector3D spacecraftVelocity) {
// As the spacecraft velocity is small with respect to speed of light,
// we use classical velocity addition and not relativistic velocity addition
// we look for a positive k such that: c * lInert + vsat = k * obsLInert
// with lInert normalized
final double a = obsLInert.getNormSq();
final double b = -Vector3D.dotProduct(obsLInert, spacecraftVelocity);
final double c = spacecraftVelocity.getNormSq() - Constants.SPEED_OF_LIGHT * Constants.SPEED_OF_LIGHT;
// a > 0 and c < 0
final double s = FastMath.sqrt(b * b - a * c);
// Only the k > 0 are kept as solutions (the solutions: -(s+b)/a and c/(s-b) are useless)
final double k = (b > 0) ? -c / (s + b) : (s - b) / a;
final Vector3D lInert = new Vector3D( k / Constants.SPEED_OF_LIGHT, obsLInert, -1.0 / Constants.SPEED_OF_LIGHT, spacecraftVelocity);
return lInert;
}
/** Compute the DEM intersection with light time correction.
* @param date date of the los
* @param sensorPosition sensor position in spacecraft frame
* @param los los in spacecraft frame
* @param scToInert transform for the date from spacecraft to inertial
* @param inertToBody transform for the date from inertial to body
* @param pInert sensor position in inertial frame
* @param lInert line of sight in inertial frame
* @return geodetic point with light time correction
*/
private NormalizedGeodeticPoint computeWithLightTimeCorrection(final AbsoluteDate date,
final Vector3D sensorPosition, final Vector3D los,
final Transform scToInert, final Transform inertToBody,
final Vector3D pInert, final Vector3D lInert) {
// compute the approximate transform between spacecraft and observed body
final Transform approximate = new Transform(date, scToInert, inertToBody);
final Vector3D sL = approximate.transformVector(los);
final Vector3D sP = approximate.transformPosition(sensorPosition);
final Vector3D eP1 = ellipsoid.transform(ellipsoid.pointOnGround(sP, sL, 0.0));
final double deltaT1 = eP1.distance(sP) / Constants.SPEED_OF_LIGHT;
final Transform shifted1 = inertToBody.shiftedBy(-deltaT1);
final NormalizedGeodeticPoint gp1 = algorithm.intersection(ellipsoid,
shifted1.transformPosition(pInert),
shifted1.transformVector(lInert));
final Vector3D eP2 = ellipsoid.transform(gp1);
final double deltaT2 = eP2.distance(sP) / Constants.SPEED_OF_LIGHT;
final Transform shifted2 = inertToBody.shiftedBy(-deltaT2);
return algorithm.refineIntersection(ellipsoid,
shifted2.transformPosition(pInert),
shifted2.transformVector(lInert),
gp1);
}
/**
* Find the sensor pixel WITHOUT atmospheric refraction correction.
* @param point geodetic point to localize
* @param sensor the line sensor
* @param planeCrossing the sensor mean plane crossing
* @return the sensor pixel crossing or null if cannot be found
* @since 2.1
*/
private SensorPixel findSensorPixelWithoutAtmosphere(final GeodeticPoint point,
final LineSensor sensor, final SensorMeanPlaneCrossing planeCrossing) {
// find approximately the sensor line at which ground point crosses sensor mean plane
final Vector3D target = ellipsoid.transform(point);
final SensorMeanPlaneCrossing.CrossingResult crossingResult = planeCrossing.find(target);
if (crossingResult == null) {
// target is out of search interval
return null;
}
// find approximately the pixel along this sensor line
final SensorPixelCrossing pixelCrossing =
new SensorPixelCrossing(sensor, planeCrossing.getMeanPlaneNormal(),
crossingResult.getTargetDirection(),
MAX_EVAL, COARSE_INVERSE_LOCATION_ACCURACY);
final double coarsePixel = pixelCrossing.locatePixel(crossingResult.getDate());
if (Double.isNaN(coarsePixel)) {
// target is out of search interval
return null;
}
// fix line by considering the closest pixel exact position and line-of-sight
// (this pixel might point towards a direction slightly above or below the mean sensor plane)
final int lowIndex = FastMath.max(0, FastMath.min(sensor.getNbPixels() - 2, (int) FastMath.floor(coarsePixel)));
final Vector3D lowLOS = sensor.getLOS(crossingResult.getDate(), lowIndex);
final Vector3D highLOS = sensor.getLOS(crossingResult.getDate(), lowIndex + 1);
final Vector3D localZ = Vector3D.crossProduct(lowLOS, highLOS).normalize();
final double beta = FastMath.acos(Vector3D.dotProduct(crossingResult.getTargetDirection(), localZ));
final double s = Vector3D.dotProduct(crossingResult.getTargetDirectionDerivative(), localZ);
final double betaDer = -s / FastMath.sqrt(1 - s * s);
final double deltaL = (0.5 * FastMath.PI - beta) / betaDer;
final double fixedLine = crossingResult.getLine() + deltaL;
final Vector3D fixedDirection = new Vector3D(1, crossingResult.getTargetDirection(),
deltaL, crossingResult.getTargetDirectionDerivative()).normalize();
// fix neighbouring pixels
final AbsoluteDate fixedDate = sensor.getDate(fixedLine);
final Vector3D fixedX = sensor.getLOS(fixedDate, lowIndex);
final Vector3D fixedZ = Vector3D.crossProduct(fixedX, sensor.getLOS(fixedDate, lowIndex + 1));
final Vector3D fixedY = Vector3D.crossProduct(fixedZ, fixedX);
// fix pixel
final double pixelWidth = FastMath.atan2(Vector3D.dotProduct(highLOS, fixedY),
Vector3D.dotProduct(highLOS, fixedX));
final double alpha = FastMath.atan2(Vector3D.dotProduct(fixedDirection, fixedY),
Vector3D.dotProduct(fixedDirection, fixedX));
final double fixedPixel = lowIndex + alpha / pixelWidth;
final SensorPixel result = new SensorPixel(fixedLine, fixedPixel);
DumpManager.dumpInverseLocationResult(result);
return result;
}
/**
* Find the sensor pixel WITH atmospheric refraction correction.
* @param point geodetic point to localize
* @param sensor the line sensor
* @param minLine minimum line number where the search will be performed
* @param maxLine maximum line number where the search will be performed
* @return the sensor pixel crossing or null if cannot be found
* @since 2.1
*/
private SensorPixel findSensorPixelWithAtmosphere(final GeodeticPoint point,
final LineSensor sensor, final int minLine, final int maxLine) {
// TBN: there is no direct way to compute the inverse location.
// The method is based on an interpolation grid associated with the fixed point method
final String sensorName = sensor.getName();
// Compute a correction grid (at sensor level)
// ===========================================
// Need to be computed only once for a given sensor (with the same minLine and maxLine)
if (atmosphericRefraction.getBifPixel() == null || atmosphericRefraction.getBifLine() == null || // lazy evaluation
!atmosphericRefraction.isSameContext(sensorName, minLine, maxLine)) { // Must be recomputed if the context changed
// Definition of a regular grid (at sensor level)
atmosphericRefraction.configureCorrectionGrid(sensor, minLine, maxLine);
// Get the grid nodes
final int nbPixelGrid = atmosphericRefraction.getComputationParameters().getNbPixelGrid();
final int nbLineGrid = atmosphericRefraction.getComputationParameters().getNbLineGrid();
final double[] pixelGrid = atmosphericRefraction.getComputationParameters().getUgrid();
final double[] lineGrid = atmosphericRefraction.getComputationParameters().getVgrid();
// Computation, for the sensor grid, of the direct location WITH atmospheric refraction
// (full computation)
atmosphericRefraction.reactivateComputation();
final GeodeticPoint[][] geodeticGridWithAtmosphere = computeDirectLocOnGridWithAtmosphere(pixelGrid, lineGrid, sensor);
// pixelGrid and lineGrid are the nodes where the direct loc is computed WITH atmosphere
// Computation of the inverse location WITHOUT atmospheric refraction for the grid nodes
atmosphericRefraction.deactivateComputation();
final SensorPixel[][] sensorPixelGridInverseWithout = computeInverseLocOnGridWithoutAtmosphere(geodeticGridWithAtmosphere,
nbPixelGrid, nbLineGrid, sensor, minLine, maxLine);
atmosphericRefraction.reactivateComputation();
// Compute the grid correction functions (for pixel and line)
atmosphericRefraction.computeGridCorrectionFunctions(sensorPixelGridInverseWithout);
}
// Fixed point method
// ==================
// Initialization
// --------------
// Deactivate the dump because no need to keep intermediate computations of inverse loc (can be regenerate)
final Boolean wasSuspended = DumpManager.suspend();
// compute the sensor pixel on the desired ground point WITHOUT atmosphere
atmosphericRefraction.deactivateComputation();
final SensorPixel sp0 = inverseLocation(sensorName, point, minLine, maxLine);
atmosphericRefraction.reactivateComputation();
// Reactivate the dump
DumpManager.resume(wasSuspended);
if (sp0 == null) {
// In order for the dump to end nicely
DumpManager.endNicely();
// Impossible to find the sensor pixel in the given range lines (without atmosphere)
throw new RuggedException(RuggedMessages.SENSOR_PIXEL_NOT_FOUND_IN_RANGE_LINES, minLine, maxLine);
}
// set up the starting point of the fixed point method
final double pixel0 = sp0.getPixelNumber();
final double line0 = sp0.getLineNumber();
// Needed data for the dump
sensor.dumpRate(line0);
// Apply fixed point method until convergence in pixel and line
// ------------------------------------------------------------
// compute the first (pixel, line) value:
// initial sensor pixel value + correction due to atmosphere at this same sensor pixel
double corrPixelPrevious = pixel0 + atmosphericRefraction.getBifPixel().value(pixel0, line0);
double corrLinePrevious = line0 + atmosphericRefraction.getBifLine().value(pixel0, line0);
double deltaCorrPixel = Double.POSITIVE_INFINITY;
double deltaCorrLine = Double.POSITIVE_INFINITY;
while (deltaCorrPixel > PIXEL_CV_THRESHOLD && deltaCorrLine > LINE_CV_THRESHOLD) {
// Compute the current (pixel, line) value =
// initial sensor pixel value + correction due to atmosphere on the previous sensor pixel
final double corrPixelCurrent = pixel0 + atmosphericRefraction.getBifPixel().value(corrPixelPrevious, corrLinePrevious);
final double corrLineCurrent = line0 + atmosphericRefraction.getBifLine().value(corrPixelPrevious, corrLinePrevious);
// Compute the delta in pixel and line to check the convergence
deltaCorrPixel = FastMath.abs(corrPixelCurrent - corrPixelPrevious);
deltaCorrLine = FastMath.abs(corrLineCurrent - corrLinePrevious);
// Store the (pixel, line) for next loop
corrPixelPrevious = corrPixelCurrent;
corrLinePrevious = corrLineCurrent;
}
// The sensor pixel is found !
final SensorPixel sensorPixelWithAtmosphere = new SensorPixel(corrLinePrevious, corrPixelPrevious);
// Dump the found sensorPixel
DumpManager.dumpInverseLocationResult(sensorPixelWithAtmosphere);
return sensorPixelWithAtmosphere;
}
/** Compute the inverse location WITHOUT atmospheric refraction for the geodetic points
* associated to the sensor grid nodes.
* @param groundGridWithAtmosphere ground grid found for sensor grid nodes with atmosphere
* @param nbPixelGrid size of the pixel grid
* @param nbLineGrid size of the line grid
* @param sensor the line sensor
* @param minLine minimum line number where the search will be performed
* @param maxLine maximum line number where the search will be performed
* @return the sensor pixel grid computed without atmosphere
* @since 2.1
*/
private SensorPixel[][] computeInverseLocOnGridWithoutAtmosphere(final GeodeticPoint[][] groundGridWithAtmosphere,
final int nbPixelGrid, final int nbLineGrid,
final LineSensor sensor, final int minLine, final int maxLine) {
// Deactivate the dump because no need to keep intermediate computations of inverse loc (can be regenerate)
final Boolean wasSuspended = DumpManager.suspend();
final SensorPixel[][] sensorPixelGrid = new SensorPixel[nbPixelGrid][nbLineGrid];
final String sensorName = sensor.getName();
for (int uIndex = 0; uIndex < nbPixelGrid; uIndex++) {
for (int vIndex = 0; vIndex < nbLineGrid; vIndex++) {
// Check if the geodetic point exists
if (groundGridWithAtmosphere[uIndex][vIndex] != null) {
final GeodeticPoint groundPoint = groundGridWithAtmosphere[uIndex][vIndex];
final double currentLat = groundPoint.getLatitude();
final double currentLon = groundPoint.getLongitude();
try {
// Compute the inverse location for the current node
sensorPixelGrid[uIndex][vIndex] = inverseLocation(sensorName, currentLat, currentLon, minLine, maxLine);
} catch (RuggedException re) { // This should never happen
// In order for the dump to end nicely
DumpManager.endNicely();
throw new RuggedInternalError(re);
}
// Check if the pixel is inside the sensor (with a margin) OR if the inverse location was impossible (null result)
if (!pixelIsInside(sensorPixelGrid[uIndex][vIndex], sensor)) {
// In order for the dump to end nicely
DumpManager.endNicely();
if (sensorPixelGrid[uIndex][vIndex] == null) {
// Impossible to find the sensor pixel in the given range lines
throw new RuggedException(RuggedMessages.SENSOR_PIXEL_NOT_FOUND_IN_RANGE_LINES, minLine, maxLine);
} else {
// Impossible to find the sensor pixel
final double invLocationMargin = atmosphericRefraction.getComputationParameters().getInverseLocMargin();
throw new RuggedException(RuggedMessages.SENSOR_PIXEL_NOT_FOUND_IN_PIXELS_LINE, sensorPixelGrid[uIndex][vIndex].getPixelNumber(),
-invLocationMargin, invLocationMargin + sensor.getNbPixels() - 1, invLocationMargin);
}
}
} else { // groundGrid[uIndex][vIndex] == null: impossible to compute inverse loc because ground point not defined
sensorPixelGrid[uIndex][vIndex] = null;
} // groundGrid[uIndex][vIndex] != null
} // end loop vIndex
} // end loop uIndex
// Reactivate the dump
DumpManager.resume(wasSuspended);
// The sensor grid computed WITHOUT atmospheric refraction correction
return sensorPixelGrid;
}
/** Check if pixel is inside the sensor with a margin.
* @param pixel pixel to check (may be null if not found)
* @param sensor the line sensor
* @return true if the pixel is inside the sensor
*/
private boolean pixelIsInside(final SensorPixel pixel, final LineSensor sensor) {
// Get the inverse location margin
final double invLocationMargin = atmosphericRefraction.getComputationParameters().getInverseLocMargin();
return pixel != null && pixel.getPixelNumber() >= -invLocationMargin && pixel.getPixelNumber() < invLocationMargin + sensor.getNbPixels() - 1;
}
/** Computation, for the sensor pixels grid, of the direct location WITH atmospheric refraction.
* (full computation)
* @param pixelGrid the pixel grid
* @param lineGrid the line grid
* @param sensor the line sensor
* @return the ground grid computed with atmosphere
* @since 2.1
*/
private GeodeticPoint[][] computeDirectLocOnGridWithAtmosphere(final double[] pixelGrid, final double[] lineGrid,
final LineSensor sensor) {
// Deactivate the dump because no need to keep intermediate computations of direct loc (can be regenerate)
final Boolean wasSuspended = DumpManager.suspend();
final int nbPixelGrid = pixelGrid.length;
final int nbLineGrid = lineGrid.length;
final GeodeticPoint[][] groundGridWithAtmosphere = new GeodeticPoint[nbPixelGrid][nbLineGrid];
final Vector3D sensorPosition = sensor.getPosition();
for (int uIndex = 0; uIndex < nbPixelGrid; uIndex++) {
final double pixelNumber = pixelGrid[uIndex];
for (int vIndex = 0; vIndex < nbLineGrid; vIndex++) {
final double lineNumber = lineGrid[vIndex];
final AbsoluteDate date = sensor.getDate(lineNumber);
final Vector3D los = sensor.getLOS(date, pixelNumber);
try {
// Compute the direct location for the current node
groundGridWithAtmosphere[uIndex][vIndex] = directLocation(date, sensorPosition, los);
} catch (RuggedException re) { // This should never happen
// In order for the dump to end nicely
DumpManager.endNicely();
throw new RuggedInternalError(re);
}
} // end loop vIndex
} // end loop uIndex
// Reactivate the dump
DumpManager.resume(wasSuspended);
// The ground grid computed WITH atmospheric refraction correction
return groundGridWithAtmosphere;
}
/** Compute distances between two line sensors.
* @param sensorA line sensor A
* @param dateA current date for sensor A
* @param pixelA pixel index for sensor A
* @param scToBodyA spacecraft to body transform for sensor A
* @param sensorB line sensor B
* @param dateB current date for sensor B
* @param pixelB pixel index for sensor B
* @return distances computed between LOS and to the ground
* @since 2.0
*/
public double[] distanceBetweenLOS(final LineSensor sensorA, final AbsoluteDate dateA, final double pixelA,
final SpacecraftToObservedBody scToBodyA,
final LineSensor sensorB, final AbsoluteDate dateB, final double pixelB) {
// Compute the approximate transform between spacecraft and observed body
// from Rugged instance A
final Transform scToInertA = scToBodyA.getScToInertial(dateA);
final Transform inertToBodyA = scToBodyA.getInertialToBody(dateA);
final Transform transformScToBodyA = new Transform(dateA, scToInertA, inertToBodyA);
// from (current) Rugged instance B
final Transform scToInertB = scToBody.getScToInertial(dateB);
final Transform inertToBodyB = scToBody.getInertialToBody(dateB);
final Transform transformScToBodyB = new Transform(dateB, scToInertB, inertToBodyB);
// Get sensors LOS into local frame
final Vector3D vALocal = sensorA.getLOS(dateA, pixelA);
final Vector3D vBLocal = sensorB.getLOS(dateB, pixelB);
// Position of sensors into local frame
final Vector3D sALocal = sensorA.getPosition(); // S_a : sensorA 's position
final Vector3D sBLocal = sensorB.getPosition(); // S_b : sensorB 's position
// Get sensors position and LOS into body frame
final Vector3D sA = transformScToBodyA.transformPosition(sALocal); // S_a : sensorA's position
final Vector3D vA = transformScToBodyA.transformVector(vALocal); // V_a : line of sight's vectorA
final Vector3D sB = transformScToBodyB.transformPosition(sBLocal); // S_b : sensorB's position
final Vector3D vB = transformScToBodyB.transformVector(vBLocal); // V_b : line of sight's vectorB
// Compute distance
final Vector3D vBase = sB.subtract(sA); // S_b - S_a
final double svA = Vector3D.dotProduct(vBase, vA); // SV_a = (S_b - S_a).V_a
final double svB = Vector3D.dotProduct(vBase, vB); // SV_b = (S_b - S_a).V_b
final double vAvB = Vector3D.dotProduct(vA, vB); // V_a.V_b
// Compute lambda_b = (SV_a * V_a.V_b - SV_b) / (1 - (V_a.V_b)²)
final double lambdaB = (svA * vAvB - svB) / (1 - vAvB * vAvB);
// Compute lambda_a = SV_a + lambdaB * V_a.V_b
final double lambdaA = svA + lambdaB * vAvB;
// Compute vector M_a = S_a + lambda_a * V_a
final Vector3D mA = sA.add(vA.scalarMultiply(lambdaA));
// Compute vector M_b = S_b + lambda_b * V_b
final Vector3D mB = sB.add(vB.scalarMultiply(lambdaB));
// Compute vector M_a -> M_B for which distance between LOS is minimum
final Vector3D vDistanceMin = mB.subtract(mA); // M_b - M_a
// Compute vector from mid point of vector M_a -> M_B to the ground (corresponds to minimum elevation)
final Vector3D midPoint = (mB.add(mA)).scalarMultiply(0.5);
// Get the euclidean norms to compute the minimum distances: between LOS and to the ground
final double[] distances = {vDistanceMin.getNorm(), midPoint.getNorm()};
return distances;
}
/** Compute distances between two line sensors with derivatives.
* @param <T> derivative type
* @param sensorA line sensor A
* @param dateA current date for sensor A
* @param pixelA pixel index for sensor A
* @param scToBodyA spacecraftToBody transform for sensor A
* @param sensorB line sensor B
* @param dateB current date for sensor B
* @param pixelB pixel index for sensor B
* @param generator generator to use for building {@link DerivativeStructure} instances
* @return distances computed, with derivatives, between LOS and to the ground
* @see #distanceBetweenLOS(LineSensor, AbsoluteDate, double, SpacecraftToObservedBody, LineSensor, AbsoluteDate, double)
*/
public <T extends Derivative<T>> T[] distanceBetweenLOSderivatives(
final LineSensor sensorA, final AbsoluteDate dateA, final double pixelA,
final SpacecraftToObservedBody scToBodyA,
final LineSensor sensorB, final AbsoluteDate dateB, final double pixelB,
final DerivativeGenerator<T> generator) {
// Compute the approximate transforms between spacecraft and observed body
// from Rugged instance A
final Transform scToInertA = scToBodyA.getScToInertial(dateA);
final Transform inertToBodyA = scToBodyA.getInertialToBody(dateA);
final Transform transformScToBodyA = new Transform(dateA, scToInertA, inertToBodyA);
// from (current) Rugged instance B
final Transform scToInertB = scToBody.getScToInertial(dateB);
final Transform inertToBodyB = scToBody.getInertialToBody(dateB);
final Transform transformScToBodyB = new Transform(dateB, scToInertB, inertToBodyB);
// Get sensors LOS into local frame
final FieldVector3D<T> vALocal = sensorA.getLOSDerivatives(dateA, pixelA, generator);
final FieldVector3D<T> vBLocal = sensorB.getLOSDerivatives(dateB, pixelB, generator);
// Get sensors LOS into body frame
final FieldVector3D<T> vA = transformScToBodyA.transformVector(vALocal); // V_a : line of sight's vectorA
final FieldVector3D<T> vB = transformScToBodyB.transformVector(vBLocal); // V_b : line of sight's vectorB
// Position of sensors into local frame
final Vector3D sAtmp = sensorA.getPosition();
final Vector3D sBtmp = sensorB.getPosition();
final T scaleFactor = FieldVector3D.dotProduct(vA.normalize(), vA.normalize()); // V_a.V_a=1
// Build a vector from the position and a scale factor (equals to 1).
// The vector built will be scaleFactor * sAtmp for example.
final FieldVector3D<T> sALocal = new FieldVector3D<>(scaleFactor, sAtmp);
final FieldVector3D<T> sBLocal = new FieldVector3D<>(scaleFactor, sBtmp);
// Get sensors position into body frame
final FieldVector3D<T> sA = transformScToBodyA.transformPosition(sALocal); // S_a : sensorA 's position
final FieldVector3D<T> sB = transformScToBodyB.transformPosition(sBLocal); // S_b : sensorB 's position
// Compute distance
final FieldVector3D<T> vBase = sB.subtract(sA); // S_b - S_a
final T svA = FieldVector3D.dotProduct(vBase, vA); // SV_a = (S_b - S_a).V_a
final T svB = FieldVector3D.dotProduct(vBase, vB); // SV_b = (S_b - S_a).V_b
final T vAvB = FieldVector3D.dotProduct(vA, vB); // V_a.V_b
// Compute lambda_b = (SV_a * V_a.V_b - SV_b) / (1 - (V_a.V_b)²)
final T lambdaB = (svA.multiply(vAvB).subtract(svB)).divide(vAvB.multiply(vAvB).subtract(1).negate());
// Compute lambda_a = SV_a + lambdaB * V_a.V_b
final T lambdaA = vAvB.multiply(lambdaB).add(svA);
// Compute vector M_a:
final FieldVector3D<T> mA = sA.add(vA.scalarMultiply(lambdaA)); // M_a = S_a + lambda_a * V_a
// Compute vector M_b
final FieldVector3D<T> mB = sB.add(vB.scalarMultiply(lambdaB)); // M_b = S_b + lambda_b * V_b
// Compute vector M_a -> M_B for which distance between LOS is minimum
final FieldVector3D<T> vDistanceMin = mB.subtract(mA); // M_b - M_a
// Compute vector from mid point of vector M_a -> M_B to the ground (corresponds to minimum elevation)
final FieldVector3D<T> midPoint = (mB.add(mA)).scalarMultiply(0.5);
// Get the euclidean norms to compute the minimum distances:
// between LOS
final T dMin = vDistanceMin.getNorm();
// to the ground
final T dCentralBody = midPoint.getNorm();
final T[] ret = MathArrays.buildArray(dMin.getField(), 2);
ret[0] = dMin;
ret[1] = dCentralBody;
return ret;
}
/** Get the mean plane crossing finder for a sensor.
* @param sensorName name of the line sensor
* @param minLine minimum line number
* @param maxLine maximum line number
* @return mean plane crossing finder
*/
private SensorMeanPlaneCrossing getPlaneCrossing(final String sensorName,
final int minLine, final int maxLine) {
final LineSensor sensor = getLineSensor(sensorName);
SensorMeanPlaneCrossing planeCrossing = finders.get(sensorName);
if (planeCrossing == null ||
planeCrossing.getMinLine() != minLine ||
planeCrossing.getMaxLine() != maxLine) {
// create a new finder for the specified sensor and range
planeCrossing = new SensorMeanPlaneCrossing(sensor, scToBody, minLine, maxLine,
lightTimeCorrection, aberrationOfLightCorrection,
MAX_EVAL, COARSE_INVERSE_LOCATION_ACCURACY);
// store the finder, in order to reuse it
// (and save some computation done in its constructor)
setPlaneCrossing(planeCrossing);
}
return planeCrossing;
}
/** Set the mean plane crossing finder for a sensor.
* @param planeCrossing plane crossing finder
*/
private void setPlaneCrossing(final SensorMeanPlaneCrossing planeCrossing) {
finders.put(planeCrossing.getSensor().getName(), planeCrossing);
}
/** Inverse location of a point with derivatives.
* @param <T> derivative type
* @param sensorName name of the line sensor
* @param point point to localize
* @param minLine minimum line number
* @param maxLine maximum line number
* @param generator generator to use for building {@link Derivative} instances
* @return sensor pixel seeing point with derivatives, or null if point cannot be seen between the
* prescribed line numbers
* @see #inverseLocation(String, GeodeticPoint, int, int)
* @since 2.0
*/
public <T extends Derivative<T>> T[] inverseLocationDerivatives(final String sensorName,
final GeodeticPoint point,
final int minLine,
final int maxLine,
final DerivativeGenerator<T> generator) {
final LineSensor sensor = getLineSensor(sensorName);
final SensorMeanPlaneCrossing planeCrossing = getPlaneCrossing(sensorName, minLine, maxLine);
// find approximately the sensor line at which ground point crosses sensor mean plane
final Vector3D target = ellipsoid.transform(point);
final SensorMeanPlaneCrossing.CrossingResult crossingResult = planeCrossing.find(target);
if (crossingResult == null) {
// target is out of search interval
return null;
}
// find approximately the pixel along this sensor line
final SensorPixelCrossing pixelCrossing =
new SensorPixelCrossing(sensor, planeCrossing.getMeanPlaneNormal(),
crossingResult.getTargetDirection(),
MAX_EVAL, COARSE_INVERSE_LOCATION_ACCURACY);
final double coarsePixel = pixelCrossing.locatePixel(crossingResult.getDate());
if (Double.isNaN(coarsePixel)) {
// target is out of search interval
return null;
}
// fix line by considering the closest pixel exact position and line-of-sight
// (this pixel might point towards a direction slightly above or below the mean sensor plane)
final int lowIndex = FastMath.max(0, FastMath.min(sensor.getNbPixels() - 2, (int) FastMath.floor(coarsePixel)));
final FieldVector3D<T> lowLOS =
sensor.getLOSDerivatives(crossingResult.getDate(), lowIndex, generator);
final FieldVector3D<T> highLOS = sensor.getLOSDerivatives(crossingResult.getDate(), lowIndex + 1, generator);
final FieldVector3D<T> localZ = FieldVector3D.crossProduct(lowLOS, highLOS).normalize();
final T beta = FieldVector3D.dotProduct(crossingResult.getTargetDirection(), localZ).acos();
final T s = FieldVector3D.dotProduct(crossingResult.getTargetDirectionDerivative(), localZ);
final T minusBetaDer = s.divide(s.multiply(s).subtract(1).negate().sqrt());
final T deltaL = beta.subtract(0.5 * FastMath.PI) .divide(minusBetaDer);
final T fixedLine = deltaL.add(crossingResult.getLine());
final FieldVector3D<T> fixedDirection =
new FieldVector3D<>(deltaL.getField().getOne(), crossingResult.getTargetDirection(),
deltaL, crossingResult.getTargetDirectionDerivative()).normalize();
// fix neighbouring pixels
final AbsoluteDate fixedDate = sensor.getDate(fixedLine.getValue());
final FieldVector3D<T> fixedX = sensor.getLOSDerivatives(fixedDate, lowIndex, generator);
final FieldVector3D<T> fixedZ = FieldVector3D.crossProduct(fixedX, sensor.getLOSDerivatives(fixedDate, lowIndex + 1, generator));
final FieldVector3D<T> fixedY = FieldVector3D.crossProduct(fixedZ, fixedX);
// fix pixel
final T hY = FieldVector3D.dotProduct(highLOS, fixedY);
final T hX = FieldVector3D.dotProduct(highLOS, fixedX);
final T pixelWidth = hY.atan2(hX);
final T fY = FieldVector3D.dotProduct(fixedDirection, fixedY);
final T fX = FieldVector3D.dotProduct(fixedDirection, fixedX);
final T alpha = fY.atan2(fX);
final T fixedPixel = alpha.divide(pixelWidth).add(lowIndex);
final T[] ret = MathArrays.buildArray(fixedPixel.getField(), 2);
ret[0] = fixedLine;
ret[1] = fixedPixel;
return ret;
}
/** Get transform from spacecraft to inertial frame.
* @param date date of the transform
* @return transform from spacecraft to inertial frame
*/
public Transform getScToInertial(final AbsoluteDate date) {
return scToBody.getScToInertial(date);
}
/** Get transform from inertial frame to observed body frame.
* @param date date of the transform
* @return transform from inertial frame to observed body frame
*/
public Transform getInertialToBody(final AbsoluteDate date) {
return scToBody.getInertialToBody(date);
}
/** Get transform from observed body frame to inertial frame.
* @param date date of the transform
* @return transform from observed body frame to inertial frame
*/
public Transform getBodyToInertial(final AbsoluteDate date) {
return scToBody.getBodyToInertial(date);
}
/** Get a sensor.
* @param sensorName sensor name
* @return selected sensor
*/
public LineSensor getLineSensor(final String sensorName) {
final LineSensor sensor = sensors.get(sensorName);
if (sensor == null) {
throw new RuggedException(RuggedMessages.UNKNOWN_SENSOR, sensorName);
}
return sensor;
}
/** Get converter between spacecraft and body.
* @return the scToBody
* @since 2.0
*/
public SpacecraftToObservedBody getScToBody() {
return scToBody;
}
}
/* Copyright 2013-2022 CS GROUP
* Licensed to CS GROUP (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.rugged.api;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.hipparchus.exception.LocalizedCoreFormats;
import org.hipparchus.geometry.euclidean.threed.Rotation;
import org.hipparchus.geometry.euclidean.threed.Vector3D;
import org.orekit.bodies.OneAxisEllipsoid;
import org.orekit.frames.Frame;
import org.orekit.frames.FramesFactory;
import org.orekit.propagation.Propagator;
import org.orekit.rugged.errors.RuggedException;
import org.orekit.rugged.errors.RuggedInternalError;
import org.orekit.rugged.errors.RuggedMessages;
import org.orekit.rugged.intersection.BasicScanAlgorithm;
import org.orekit.rugged.intersection.ConstantElevationAlgorithm;
import org.orekit.rugged.intersection.IgnoreDEMAlgorithm;
import org.orekit.rugged.intersection.IntersectionAlgorithm;
import org.orekit.rugged.intersection.duvenhage.DuvenhageAlgorithm;
import org.orekit.rugged.linesensor.LineSensor;
import org.orekit.rugged.raster.TileUpdater;
import org.orekit.rugged.refraction.AtmosphericRefraction;
import org.orekit.rugged.utils.ExtendedEllipsoid;
import org.orekit.rugged.utils.SpacecraftToObservedBody;
import org.orekit.time.AbsoluteDate;
import org.orekit.utils.AngularDerivativesFilter;
import org.orekit.utils.CartesianDerivativesFilter;
import org.orekit.utils.Constants;
import org.orekit.utils.IERSConventions;
import org.orekit.utils.PVCoordinates;
import org.orekit.utils.TimeStampedAngularCoordinates;
import org.orekit.utils.TimeStampedPVCoordinates;
/** Builder for {@link Rugged} instances.
* <p>
* This class implements the <em>builder pattern</em> to create {@link Rugged} instances.
* It does so by using a <em>fluent API</em> in order to clarify reading and allow
* later extensions with new configuration parameters.
* </p>
* <p>
* A typical use would be:
* </p>
* <pre>
* Rugged rugged = new RuggedBuilder().
* setEllipsoid(EllipsoidId.WGS84, BodyRotatingFrameId.ITRF).
* setAlgorithmID(AlgorithmId.Duvenhage).
* setDigitalElevationModel(tileUpdater, maxCachedTiles).
* setTimeSpan(minDate, maxDate, tStep, overshootTolerance).
* setTrajectory(positionsVelocities, pvInterpolationNumber, pvFilter,
* quaternions, aInterpolationNumber, aFilter).
* addLineSensor(sensor1).
* addLineSensor(sensor2).
* addLineSensor(sensor3).
* build();
* </pre>
* <p>
* If a configuration parameter has not been set prior to the call to {]link #build()}, then
* an exception will be triggered with an explicit error message.
* </p>
* @see <a href="https://en.wikipedia.org/wiki/Builder_pattern">Builder pattern (wikipedia)</a>
* @see <a href="https://en.wikipedia.org/wiki/Fluent_interface">Fluent interface (wikipedia)</a>
* @author Luc Maisonobe
* @author Guylaine Prat
*/
public class RuggedBuilder {
/** Reference ellipsoid. */
private ExtendedEllipsoid ellipsoid;
/** DEM intersection algorithm. */
private AlgorithmId algorithmID;
/** Updater used to load Digital Elevation Model tiles. */
private TileUpdater tileUpdater;
/** Constant elevation over ellipsoid (m).
* used only with {@link AlgorithmId#CONSTANT_ELEVATION_OVER_ELLIPSOID. */
private double constantElevation;
/** Maximum number of tiles stored in the cache. */
private int maxCachedTiles;
/** Start of search time span. */
private AbsoluteDate minDate;
/** End of search time span. */
private AbsoluteDate maxDate;
/** Step to use for inertial frame to body frame transforms cache computations (s). */
private double tStep;
/** OvershootTolerance tolerance in seconds allowed for {@code minDate} and {@code maxDate} overshooting (s). */
private double overshootTolerance;
/** Inertial frame for position/velocity/attitude. */
private Frame inertial;
/** Satellite position and velocity (m and m/s in inertial frame). */
private List<TimeStampedPVCoordinates> pvSample;
/** Number of points to use for position/velocity interpolation. */
private int pvNeighborsSize;
/** Filter for derivatives from the sample to use in position/velocity interpolation. */
private CartesianDerivativesFilter pvDerivatives;
/** Satellite quaternions with respect to inertial frame. */
private List<TimeStampedAngularCoordinates> aSample;
/** Number of points to use for attitude interpolation. */
private int aNeighborsSize;
/** Filter for derivatives from the sample to use in attitude interpolation. */
private AngularDerivativesFilter aDerivatives;
/** Propagator for position/velocity/attitude. */
private Propagator pvaPropagator;
/** Step to use for inertial/Earth/spacecraft transforms interpolations (s). */
private double iStep;
/** Number of points to use for inertial/Earth/spacecraft transforms interpolations. */
private int iN;
/** Converter between spacecraft and body. */
private SpacecraftToObservedBody scToBody;
/** Flag for light time correction. */
private boolean lightTimeCorrection;
/** Flag for aberration of light correction. */
private boolean aberrationOfLightCorrection;
/** Atmospheric refraction to use for line of sight correction. */
private AtmosphericRefraction atmosphericRefraction;
/** Sensors. */
private final List<LineSensor> sensors;
/** Rugged name. */
private String name;
/** Create a non-configured builder.
* <p>
* The builder <em>must</em> be configured before calling the
* {@link #build} method, otherwise an exception will be triggered
* at build time.
* </p>
*/
public RuggedBuilder() {
sensors = new ArrayList<>();
constantElevation = Double.NaN;
lightTimeCorrection = true;
aberrationOfLightCorrection = true;
name = "Rugged";
}
/** Set the reference ellipsoid.
* @param ellipsoidID reference ellipsoid
* @param bodyRotatingFrameID body rotating frame identifier
* from an earlier run and frames mismatch
* @return the builder instance
* @see #setEllipsoid(OneAxisEllipsoid)
* @see #getEllipsoid()
*/
public RuggedBuilder setEllipsoid(final EllipsoidId ellipsoidID, final BodyRotatingFrameId bodyRotatingFrameID) {
return setEllipsoid(selectEllipsoid(ellipsoidID, selectBodyRotatingFrame(bodyRotatingFrameID)));
}
/** Set the reference ellipsoid.
* @param newEllipsoid reference ellipsoid
* @return the builder instance
* @see #setEllipsoid(EllipsoidId, BodyRotatingFrameId)
* @see #getEllipsoid()
*/
public RuggedBuilder setEllipsoid(final OneAxisEllipsoid newEllipsoid) {
this.ellipsoid = new ExtendedEllipsoid(newEllipsoid.getEquatorialRadius(),
newEllipsoid.getFlattening(),
newEllipsoid.getBodyFrame());
checkFramesConsistency();
return this;
}
/** Get the ellipsoid.
* @return the ellipsoid
* @see #setEllipsoid(EllipsoidId, BodyRotatingFrameId)
* @see #setEllipsoid(OneAxisEllipsoid)
*/
public ExtendedEllipsoid getEllipsoid() {
return ellipsoid;
}
/** Get the Rugged name.
* @return the Rugged name
* @since 2.0
*/
public String getName() {
return name;
}
/** Set the Rugged name.
* @param name the Rugged name
* @since 2.0
*/
public void setName(final String name) {
this.name = name;
}
/** Set the algorithm to use for Digital Elevation Model intersection.
* <p>
* Note that some algorithms require specific other methods to be called too:
* <ul>
* <li>{@link AlgorithmId#DUVENHAGE DUVENHAGE},
* {@link AlgorithmId#DUVENHAGE_FLAT_BODY DUVENHAGE_FLAT_BODY}
* and {@link AlgorithmId#BASIC_SLOW_EXHAUSTIVE_SCAN_FOR_TESTS_ONLY
* BASIC_SLOW_EXHAUSTIVE_SCAN_FOR_TESTS_ONLY} all
* require {@link #setDigitalElevationModel(TileUpdater, int) setDigitalElevationModel}
* to be called,</li>
* <li>{@link AlgorithmId#CONSTANT_ELEVATION_OVER_ELLIPSOID
* CONSTANT_ELEVATION_OVER_ELLIPSOID} requires
* {@link #setConstantElevation(double) setConstantElevation} to be called,</li>
* <li>{@link AlgorithmId#IGNORE_DEM_USE_ELLIPSOID
* IGNORE_DEM_USE_ELLIPSOID} does not require
* any methods tobe called.</li>
* </ul>
*
* @param newAlgorithmId identifier of algorithm to use for Digital Elevation Model intersection
* @return the builder instance
* @see #setDigitalElevationModel(TileUpdater, int)
* @see #getAlgorithm()
*/
public RuggedBuilder setAlgorithm(final AlgorithmId newAlgorithmId) {
this.algorithmID = newAlgorithmId;
return this;
}
/** Get the algorithm to use for Digital Elevation Model intersection.
* @return algorithm to use for Digital Elevation Model intersection
* @see #setAlgorithm(AlgorithmId)
*/
public AlgorithmId getAlgorithm() {
return algorithmID;
}
/** Set the user-provided {@link TileUpdater tile updater}.
* <p>
* Note that when the algorithm specified in {@link #setAlgorithm(AlgorithmId)}
* is either {@link AlgorithmId#CONSTANT_ELEVATION_OVER_ELLIPSOID
* CONSTANT_ELEVATION_OVER_ELLIPSOID} or {@link
* AlgorithmId#IGNORE_DEM_USE_ELLIPSOID IGNORE_DEM_USE_ELLIPSOID},
* then this method becomes irrelevant and can either be not called at all,
* or it can be called with an updater set to {@code null}. For all other
* algorithms, the updater must be properly configured.
* </p>
* @param newTileUpdater updater used to load Digital Elevation Model tiles
* @param newMaxCachedTiles maximum number of tiles stored in the cache
* @return the builder instance
* @see #setAlgorithm(AlgorithmId)
* @see #getTileUpdater()
* @see #getMaxCachedTiles()
*/
public RuggedBuilder setDigitalElevationModel(final TileUpdater newTileUpdater, final int newMaxCachedTiles) {
this.tileUpdater = newTileUpdater;
this.maxCachedTiles = newMaxCachedTiles;
return this;
}
/** Get the updater used to load Digital Elevation Model tiles.
* @return updater used to load Digital Elevation Model tiles
* @see #setDigitalElevationModel(TileUpdater, int)
* @see #getMaxCachedTiles()
*/
public TileUpdater getTileUpdater() {
return tileUpdater;
}
/** Set the user-provided constant elevation model.
* <p>
* Note that this method is relevant <em>only</em> if the algorithm specified
* in {@link #setAlgorithm(AlgorithmId)} is {@link
* AlgorithmId#CONSTANT_ELEVATION_OVER_ELLIPSOID CONSTANT_ELEVATION_OVER_ELLIPSOID}.
* If it is called for another algorithm, the elevation set here will be ignored.
* </p>
* @param newConstantElevation constant elevation to use (m)
* @return the builder instance
* @see #setAlgorithm(AlgorithmId)
* @see #getConstantElevation()
*/
public RuggedBuilder setConstantElevation(final double newConstantElevation) {
this.constantElevation = newConstantElevation;
return this;
}
/** Get the constant elevation over ellipsoid to use with {@link AlgorithmId#CONSTANT_ELEVATION_OVER_ELLIPSOID}.
* @return updater used to load Digital Elevation Model tiles
* @see #setConstantElevation(double)
*/
public double getConstantElevation() {
return constantElevation;
}
/** Get the maximum number of tiles stored in the cache.
* @return maximum number of tiles stored in the cache
* @see #setDigitalElevationModel(TileUpdater, int)
* @see #getTileUpdater()
*/
public int getMaxCachedTiles() {
return maxCachedTiles;
}
/** Set the time span to be covered for direct and inverse location calls.
* <p>
* This method set only the time span and not the trajectory, therefore it
* <em>must</em> be used together with either
* {@link #setTrajectory(InertialFrameId, List, int, CartesianDerivativesFilter, List, int, AngularDerivativesFilter)},
* {@link #setTrajectory(Frame, List, int, CartesianDerivativesFilter, List, int, AngularDerivativesFilter)},
* or {@link #setTrajectory(double, int, CartesianDerivativesFilter, AngularDerivativesFilter, Propagator)}
* but should <em>not</em> be mixed with {@link #setTrajectoryAndTimeSpan(InputStream)}.
* </p>
* @param newMinDate start of search time span
* @param newMaxDate end of search time span
* @param newTstep step to use for inertial frame to body frame transforms cache computations (s)
* @param newOvershootTolerance tolerance in seconds allowed for {@code minDate} and {@code maxDate} overshooting (s)
* @return the builder instance
* @see #setTrajectoryAndTimeSpan(InputStream)
* @see #getMinDate()
* @see #getMaxDate()
* @see #getTStep()
* @see #getOvershootTolerance()
*/
public RuggedBuilder setTimeSpan(final AbsoluteDate newMinDate, final AbsoluteDate newMaxDate,
final double newTstep, final double newOvershootTolerance) {
this.minDate = newMinDate;
this.maxDate = newMaxDate;
this.tStep = newTstep;
this.overshootTolerance = newOvershootTolerance;
this.scToBody = null;
return this;
}
/** Get the start of search time span.
* @return start of search time span
* @see #setTimeSpan(AbsoluteDate, AbsoluteDate, double, double)
*/
public AbsoluteDate getMinDate() {
return minDate;
}
/** Get the end of search time span.
* @return end of search time span
* @see #setTimeSpan(AbsoluteDate, AbsoluteDate, double, double)
*/
public AbsoluteDate getMaxDate() {
return maxDate;
}
/** Get the step to use for inertial frame to body frame transforms cache computations.
* @return step to use for inertial frame to body frame transforms cache computations
* @see #setTimeSpan(AbsoluteDate, AbsoluteDate, double, double)
*/
public double getTStep() {
return tStep;
}
/** Get the tolerance in seconds allowed for {@link #getMinDate()} and {@link #getMaxDate()} overshooting.
* @return tolerance in seconds allowed for {@link #getMinDate()} and {@link #getMaxDate()} overshooting
* @see #setTimeSpan(AbsoluteDate, AbsoluteDate, double, double)
*/
public double getOvershootTolerance() {
return overshootTolerance;
}
/** Set the spacecraft trajectory.
* <p>
* This method set only the trajectory and not the time span, therefore it
* <em>must</em> be used together with the {@link #setTimeSpan(AbsoluteDate, AbsoluteDate, double, double)}
* but should <em>not</em> be mixed with {@link #setTrajectoryAndTimeSpan(InputStream)}.
* </p>
* @param inertialFrameId inertial frame Id used for spacecraft positions/velocities/quaternions
* @param positionsVelocities satellite position and velocity (m and m/s in inertial frame)
* @param pvInterpolationNumber number of points to use for position/velocity interpolation
* @param pvFilter filter for derivatives from the sample to use in position/velocity interpolation
* @param quaternions satellite quaternions with respect to inertial frame
* @param aInterpolationNumber number of points to use for attitude interpolation
* @param aFilter filter for derivatives from the sample to use in attitude interpolation
* @return the builder instance
* @see #setTrajectory(Frame, List, int, CartesianDerivativesFilter, List, int, AngularDerivativesFilter)
* @see #setTrajectory(double, int, CartesianDerivativesFilter, AngularDerivativesFilter, Propagator)
* @see #setTrajectoryAndTimeSpan(InputStream)
* @see #getInertialFrame()
* @see #getPositionsVelocities()
* @see #getPVInterpolationNumber()
* @see #getPVInterpolationNumber()
* @see #getQuaternions()
* @see #getAInterpolationNumber()
* @see #getAFilter()
*/
public RuggedBuilder setTrajectory(final InertialFrameId inertialFrameId,
final List<TimeStampedPVCoordinates> positionsVelocities, final int pvInterpolationNumber,
final CartesianDerivativesFilter pvFilter,
final List<TimeStampedAngularCoordinates> quaternions, final int aInterpolationNumber,
final AngularDerivativesFilter aFilter) {
return setTrajectory(selectInertialFrame(inertialFrameId),
positionsVelocities, pvInterpolationNumber, pvFilter,
quaternions, aInterpolationNumber, aFilter);
}
/** Set the spacecraft trajectory.
* <p>
* This method set only the trajectory and not the time span, therefore it
* <em>must</em> be used together with the {@link #setTimeSpan(AbsoluteDate, AbsoluteDate, double, double)}
* but should <em>not</em> be mixed with {@link #setTrajectoryAndTimeSpan(InputStream)}.
* </p>
* @param inertialFrame inertial frame used for spacecraft positions/velocities/quaternions
* @param positionsVelocities satellite position and velocity (m and m/s in inertial frame)
* @param pvInterpolationNumber number of points to use for position/velocity interpolation
* @param pvFilter filter for derivatives from the sample to use in position/velocity interpolation
* @param quaternions satellite quaternions with respect to inertial frame
* @param aInterpolationNumber number of points to use for attitude interpolation
* @param aFilter filter for derivatives from the sample to use in attitude interpolation
* @return the builder instance
* @see #setTrajectory(InertialFrameId, List, int, CartesianDerivativesFilter, List, int, AngularDerivativesFilter)
* @see #setTrajectory(double, int, CartesianDerivativesFilter, AngularDerivativesFilter, Propagator)
* @see #setTrajectoryAndTimeSpan(InputStream)
* @see #getPositionsVelocities()
* @see #getPVInterpolationNumber()
* @see #getPVInterpolationNumber()
* @see #getQuaternions()
* @see #getAInterpolationNumber()
* @see #getAFilter()
*/
public RuggedBuilder setTrajectory(final Frame inertialFrame,
final List<TimeStampedPVCoordinates> positionsVelocities, final int pvInterpolationNumber,
final CartesianDerivativesFilter pvFilter,
final List<TimeStampedAngularCoordinates> quaternions, final int aInterpolationNumber,
final AngularDerivativesFilter aFilter) {
this.inertial = inertialFrame;
this.pvSample = positionsVelocities;
this.pvNeighborsSize = pvInterpolationNumber;
this.pvDerivatives = pvFilter;
this.aSample = quaternions;
this.aNeighborsSize = aInterpolationNumber;
this.aDerivatives = aFilter;
this.pvaPropagator = null;
this.iStep = Double.NaN;
this.iN = -1;
this.scToBody = null;
return this;
}
/** Set the spacecraft trajectory.
* <p>
* This method set only the trajectory and not the time span, therefore it
* <em>must</em> be used together with the {@link #setTimeSpan(AbsoluteDate, AbsoluteDate, double, double)}
* but should <em>not</em> be mixed with {@link #setTrajectoryAndTimeSpan(InputStream)}.
* </p>
* @param interpolationStep step to use for inertial/Earth/spacecraft transforms interpolations (s)
* @param interpolationNumber number of points to use for inertial/Earth/spacecraft transforms interpolations
* @param pvFilter filter for derivatives from the sample to use in position/velocity interpolation
* @param aFilter filter for derivatives from the sample to use in attitude interpolation
* @param propagator global propagator
* @return the builder instance
* @see #setTrajectory(InertialFrameId, List, int, CartesianDerivativesFilter, List, int, AngularDerivativesFilter)
* @see #setTrajectory(Frame, List, int, CartesianDerivativesFilter, List, int, AngularDerivativesFilter)
* @see #setTrajectoryAndTimeSpan(InputStream)
*/
public RuggedBuilder setTrajectory(final double interpolationStep, final int interpolationNumber,
final CartesianDerivativesFilter pvFilter, final AngularDerivativesFilter aFilter,
final Propagator propagator) {
this.inertial = propagator.getFrame();
this.pvSample = null;
this.pvNeighborsSize = -1;
this.pvDerivatives = pvFilter;
this.aSample = null;
this.aNeighborsSize = -1;
this.aDerivatives = aFilter;
this.pvaPropagator = propagator;
this.iStep = interpolationStep;
this.iN = interpolationNumber;
this.scToBody = null;
return this;
}
/** Get the inertial frame.
* @return inertial frame
*/
public Frame getInertialFrame() {
return inertial;
}
/** Get the satellite position and velocity (m and m/s in inertial frame).
* @return satellite position and velocity (m and m/s in inertial frame)
* @see #setTrajectory(double, int, CartesianDerivativesFilter, AngularDerivativesFilter, Propagator)
*/
public List<TimeStampedPVCoordinates> getPositionsVelocities() {
return pvSample;
}
/** Get the number of points to use for position/velocity interpolation.
* @return number of points to use for position/velocity interpolation
* @see #setTrajectory(double, int, CartesianDerivativesFilter, AngularDerivativesFilter, Propagator)
*/
public int getPVInterpolationNumber() {
return pvNeighborsSize;
}
/** Get the filter for derivatives from the sample to use in position/velocity interpolation.
* @return filter for derivatives from the sample to use in position/velocity interpolation
* @see #setTrajectory(double, int, CartesianDerivativesFilter, AngularDerivativesFilter, Propagator)
*/
public CartesianDerivativesFilter getPVFilter() {
return pvDerivatives;
}
/** Get the satellite quaternions with respect to inertial frame.
* @return satellite quaternions with respect to inertial frame
* @see #setTrajectory(double, int, CartesianDerivativesFilter, AngularDerivativesFilter, Propagator)
*/
public List<TimeStampedAngularCoordinates> getQuaternions() {
return aSample;
}
/** Get the number of points to use for attitude interpolation.
* @return number of points to use for attitude interpolation
* @see #setTrajectory(double, int, CartesianDerivativesFilter, AngularDerivativesFilter, Propagator)
*/
public int getAInterpolationNumber() {
return aNeighborsSize;
}
/** Get the filter for derivatives from the sample to use in attitude interpolation.
* @return filter for derivatives from the sample to use in attitude interpolation
* @see #setTrajectory(double, int, CartesianDerivativesFilter, AngularDerivativesFilter, Propagator)
*/
public AngularDerivativesFilter getAFilter() {
return aDerivatives;
}
/** Set both the spacecraft trajectory and the time span.
* <p>
* This method set both the trajectory and the time span in a tightly coupled
* way, therefore it should <em>not</em> be mixed with the individual methods
* {@link #setTrajectory(double, int, CartesianDerivativesFilter, AngularDerivativesFilter, Propagator)},
* {@link #setTrajectory(Frame, List, int, CartesianDerivativesFilter, List, int, AngularDerivativesFilter)},
* {@link #setTrajectory(InertialFrameId, List, int, CartesianDerivativesFilter, List, int, AngularDerivativesFilter)},
* or {@link #setTimeSpan(AbsoluteDate, AbsoluteDate, double, double)}.
* </p>
* @param storageStream stream from where to read previous instance {@link #storeInterpolator(OutputStream)
* stored interpolator} (caller opened it and remains responsible for closing it)
* @return the builder instance
* @see #setTrajectory(InertialFrameId, List, int, CartesianDerivativesFilter, List, int, AngularDerivativesFilter)
* @see #setTrajectory(Frame, List, int, CartesianDerivativesFilter, List, int, AngularDerivativesFilter)
* @see #setTrajectory(double, int, CartesianDerivativesFilter, AngularDerivativesFilter, Propagator)
* @see #storeInterpolator(OutputStream)
*/
public RuggedBuilder setTrajectoryAndTimeSpan(final InputStream storageStream) {
try {
this.inertial = null;
this.pvSample = null;
this.pvNeighborsSize = -1;
this.pvDerivatives = null;
this.aSample = null;
this.aNeighborsSize = -1;
this.aDerivatives = null;
this.pvaPropagator = null;
this.iStep = Double.NaN;
this.iN = -1;
this.scToBody = (SpacecraftToObservedBody) new ObjectInputStream(storageStream).readObject();
this.minDate = scToBody.getMinDate();
this.maxDate = scToBody.getMaxDate();
this.tStep = scToBody.getTStep();
this.overshootTolerance = scToBody.getOvershootTolerance();
checkFramesConsistency();
return this;
} catch (ClassNotFoundException cnfe) {
throw new RuggedException(cnfe, RuggedMessages.NOT_INTERPOLATOR_DUMP_DATA);
} catch (ClassCastException cce) {
throw new RuggedException(cce, RuggedMessages.NOT_INTERPOLATOR_DUMP_DATA);
} catch (IOException ioe) {
throw new RuggedException(ioe, RuggedMessages.NOT_INTERPOLATOR_DUMP_DATA);
}
}
/** Store frames transform interpolator.
* <p>
* This method allows to reuse the interpolator built in one instance, to build
* another instance by calling {@link #setTrajectoryAndTimeSpan(InputStream)}.
* This reduces the builder initialization time as setting up the interpolator can be long, it is
* mainly intended to be used when several runs are done (for example in an image processing chain)
* with the same configuration.
* </p>
* <p>
* This method must be called <em>after</em> both the ellipsoid and trajectory have been set.
* </p>
* @param storageStream stream where to store the interpolator
* (caller opened it and remains responsible for closing it)
* @see #setEllipsoid(EllipsoidId, BodyRotatingFrameId)
* @see #setEllipsoid(OneAxisEllipsoid)
* @see #setTrajectory(InertialFrameId, List, int, CartesianDerivativesFilter, List, int, AngularDerivativesFilter)
* @see #setTrajectory(Frame, List, int, CartesianDerivativesFilter, List, int, AngularDerivativesFilter)
* @see #setTrajectory(double, int, CartesianDerivativesFilter, AngularDerivativesFilter, Propagator)
* @see #setTrajectoryAndTimeSpan(InputStream)
*/
public void storeInterpolator(final OutputStream storageStream) {
try {
createInterpolatorIfNeeded();
new ObjectOutputStream(storageStream).writeObject(scToBody);
} catch (IOException ioe) {
throw new RuggedException(ioe, LocalizedCoreFormats.SIMPLE_MESSAGE, ioe.getMessage());
}
}
/** Check frames consistency.
*/
private void checkFramesConsistency() {
if (ellipsoid != null && scToBody != null &&
!ellipsoid.getBodyFrame().getName().equals(scToBody.getBodyFrame().getName())) {
// if frames have been set both by direct calls and by deserializing an interpolator dump and a mismatch occurs
throw new RuggedException(RuggedMessages.FRAMES_MISMATCH_WITH_INTERPOLATOR_DUMP,
ellipsoid.getBodyFrame().getName(), scToBody.getBodyFrame().getName());
}
}
/** Create a transform interpolator if needed.
*/
private void createInterpolatorIfNeeded() {
if (ellipsoid == null) {
throw new RuggedException(RuggedMessages.UNINITIALIZED_CONTEXT, "RuggedBuilder.setEllipsoid()");
}
if (scToBody == null) {
if (pvSample != null) {
scToBody = createInterpolator(inertial, ellipsoid.getBodyFrame(),
minDate, maxDate, tStep, overshootTolerance,
pvSample, pvNeighborsSize, pvDerivatives,
aSample, aNeighborsSize, aDerivatives);
} else if (pvaPropagator != null) {
scToBody = createInterpolator(inertial, ellipsoid.getBodyFrame(),
minDate, maxDate, tStep, overshootTolerance,
iStep, iN, pvDerivatives, aDerivatives, pvaPropagator);
} else {
throw new RuggedException(RuggedMessages.UNINITIALIZED_CONTEXT, "RuggedBuilder.setTrajectory()");
}
}
}
/** Create a transform interpolator from positions and quaternions lists.
* @param inertialFrame inertial frame
* @param bodyFrame observed body frame
* @param minDate start of search time span
* @param maxDate end of search time span
* @param tStep step to use for inertial frame to body frame transforms cache computations
* @param overshootTolerance tolerance in seconds allowed for {@code minDate} and {@code maxDate} overshooting
* @param positionsVelocities satellite position and velocity
* @param pvInterpolationNumber number of points to use for position/velocity interpolation
* @param pvFilter filter for derivatives from the sample to use in position/velocity interpolation
* @param quaternions satellite quaternions
* @param aInterpolationNumber number of points to use for attitude interpolation
* @param aFilter filter for derivatives from the sample to use in attitude interpolation
* @return transforms interpolator
*/
private static SpacecraftToObservedBody createInterpolator(final Frame inertialFrame, final Frame bodyFrame,
final AbsoluteDate minDate, final AbsoluteDate maxDate,
final double tStep, final double overshootTolerance,
final List<TimeStampedPVCoordinates> positionsVelocities,
final int pvInterpolationNumber,
final CartesianDerivativesFilter pvFilter,
final List<TimeStampedAngularCoordinates> quaternions,
final int aInterpolationNumber,
final AngularDerivativesFilter aFilter) {
return new SpacecraftToObservedBody(inertialFrame, bodyFrame,
minDate, maxDate, tStep, overshootTolerance,
positionsVelocities, pvInterpolationNumber,
pvFilter, quaternions, aInterpolationNumber,
aFilter);
}
/** Create a transform interpolator from a propagator.
* @param inertialFrame inertial frame
* @param bodyFrame observed body frame
* @param minDate start of search time span
* @param maxDate end of search time span
* @param tStep step to use for inertial frame to body frame transforms cache computations
* @param overshootTolerance tolerance in seconds allowed for {@code minDate} and {@code maxDate} overshooting
* @param interpolationStep step to use for inertial/Earth/spacecraft transforms interpolations
* @param interpolationNumber number of points of to use for inertial/Earth/spacecraft transforms interpolations
* @param pvFilter filter for derivatives from the sample to use in position/velocity interpolation
* @param aFilter filter for derivatives from the sample to use in attitude interpolation
* @param propagator global propagator
* @return transforms interpolator
*/
private static SpacecraftToObservedBody createInterpolator(final Frame inertialFrame, final Frame bodyFrame,
final AbsoluteDate minDate, final AbsoluteDate maxDate,
final double tStep, final double overshootTolerance,
final double interpolationStep, final int interpolationNumber,
final CartesianDerivativesFilter pvFilter,
final AngularDerivativesFilter aFilter,
final Propagator propagator) {
// extract position/attitude samples from propagator
final List<TimeStampedPVCoordinates> positionsVelocities =
new ArrayList<>();
final List<TimeStampedAngularCoordinates> quaternions =
new ArrayList<>();
propagator.getMultiplexer().add(interpolationStep,
currentState -> {
final AbsoluteDate date = currentState.getDate();
final PVCoordinates pv = currentState.getPVCoordinates(inertialFrame);
final Rotation q = currentState.getAttitude().getRotation();
positionsVelocities.add(new TimeStampedPVCoordinates(date, pv.getPosition(), pv.getVelocity(), Vector3D.ZERO));
quaternions.add(new TimeStampedAngularCoordinates(date, q, Vector3D.ZERO, Vector3D.ZERO));
});
propagator.propagate(minDate.shiftedBy(-interpolationStep), maxDate.shiftedBy(interpolationStep));
// orbit/attitude to body converter
return createInterpolator(inertialFrame, bodyFrame,
minDate, maxDate, tStep, overshootTolerance,
positionsVelocities, interpolationNumber,
pvFilter, quaternions, interpolationNumber,
aFilter);
}
/** Set flag for light time correction.
* <p>
* This methods set the flag for compensating or not light time between
* ground and spacecraft. Compensating this delay improves location
* accuracy and is <em>enabled</em> by default (i.e. not calling this
* method before building is therefore equivalent to calling it with
* a parameter set to {@code true}). Not compensating it is mainly useful
* for validation purposes against system that do not compensate it.
* </p>
* @param newLightTimeCorrection if true, the light travel time between ground
* and spacecraft is compensated for more accurate location
* @return the builder instance
* @see #setAberrationOfLightCorrection(boolean)
* @see #getLightTimeCorrection()
*/
public RuggedBuilder setLightTimeCorrection(final boolean newLightTimeCorrection) {
this.lightTimeCorrection = newLightTimeCorrection;
return this;
}
/** Get the light time correction flag.
* @return light time correction flag
* @see #setLightTimeCorrection(boolean)
*/
public boolean getLightTimeCorrection() {
return lightTimeCorrection;
}
/** Set flag for aberration of light correction.
* <p>
* This methods set the flag for compensating or not aberration of light,
* which is velocity composition between light and spacecraft when the
* light from ground points reaches the sensor.
* Compensating this velocity composition improves location
* accuracy and is <em>enabled</em> by default (i.e. not calling this
* method before building is therefore equivalent to calling it with
* a parameter set to {@code true}). Not compensating it is useful
* in two cases: for validation purposes against system that do not
* compensate it or when the pixels line of sight already include the
* correction.
* </p>
* @param newAberrationOfLightCorrection if true, the aberration of light
* is corrected for more accurate location
* @return the builder instance
* @see #setLightTimeCorrection(boolean)
* @see #getAberrationOfLightCorrection()
*/
public RuggedBuilder setAberrationOfLightCorrection(final boolean newAberrationOfLightCorrection) {
this.aberrationOfLightCorrection = newAberrationOfLightCorrection;
return this;
}
/** Get the aberration of light correction flag.
* @return aberration of light correction flag
* @see #setAberrationOfLightCorrection(boolean)
*/
public boolean getAberrationOfLightCorrection() {
return aberrationOfLightCorrection;
}
/** Set atmospheric refraction for line of sight correction.
* <p>
* This method sets an atmospheric refraction model to be used between
* spacecraft and ground for the correction of intersected points on ground.
* Compensating for the effect of atmospheric refraction improves location
* accuracy.
* </p>
* @param newAtmosphericRefraction the atmospheric refraction model to be used for more accurate location
* @return the builder instance
* @see #getRefractionCorrection()
*/
public RuggedBuilder setRefractionCorrection(final AtmosphericRefraction newAtmosphericRefraction) {
this.atmosphericRefraction = newAtmosphericRefraction;
return this;
}
/** Get the atmospheric refraction model.
* @return atmospheric refraction model
* @see #setRefractionCorrection(AtmosphericRefraction)
*/
public AtmosphericRefraction getRefractionCorrection() {
return atmosphericRefraction;
}
/** Set up line sensor model.
* @param lineSensor line sensor model
* @return the builder instance
*/
public RuggedBuilder addLineSensor(final LineSensor lineSensor) {
sensors.add(lineSensor);
return this;
}
/** Remove all line sensors.
* @return the builder instance
*/
public RuggedBuilder clearLineSensors() {
sensors.clear();
return this;
}
/** Get all line sensors.
* @return all line sensors (in an unmodifiable list)
*/
public List<LineSensor> getLineSensors() {
return Collections.unmodifiableList(sensors);
}
/** Select inertial frame.
* @param inertialFrameId inertial frame identifier
* @return inertial frame
*/
private static Frame selectInertialFrame(final InertialFrameId inertialFrameId) {
// set up the inertial frame
switch (inertialFrameId) {
case GCRF :
return FramesFactory.getGCRF();
case EME2000 :
return FramesFactory.getEME2000();
case MOD :
return FramesFactory.getMOD(IERSConventions.IERS_1996);
case TOD :
return FramesFactory.getTOD(IERSConventions.IERS_1996, true);
case VEIS1950 :
return FramesFactory.getVeis1950();
default :
// this should never happen
throw new RuggedInternalError(null);
}
}
/** Select body rotating frame.
* @param bodyRotatingFrame body rotating frame identifier
* @return body rotating frame
*/
private static Frame selectBodyRotatingFrame(final BodyRotatingFrameId bodyRotatingFrame) {
// set up the rotating frame
switch (bodyRotatingFrame) {
case ITRF :
return FramesFactory.getITRF(IERSConventions.IERS_2010, true);
case ITRF_EQUINOX :
return FramesFactory.getITRFEquinox(IERSConventions.IERS_1996, true);
case GTOD :
return FramesFactory.getGTOD(IERSConventions.IERS_1996, true);
default :
// this should never happen
throw new RuggedInternalError(null);
}
}
/** Select ellipsoid.
* @param ellipsoidID reference ellipsoid identifier
* @param bodyFrame body rotating frame
* @return selected ellipsoid
*/
private static OneAxisEllipsoid selectEllipsoid(final EllipsoidId ellipsoidID, final Frame bodyFrame) {
// set up the ellipsoid
switch (ellipsoidID) {
case GRS80 :
return new OneAxisEllipsoid(Constants.GRS80_EARTH_EQUATORIAL_RADIUS,
Constants.GRS80_EARTH_FLATTENING,
bodyFrame);
case WGS84 :
return new OneAxisEllipsoid(Constants.WGS84_EARTH_EQUATORIAL_RADIUS,
Constants.WGS84_EARTH_FLATTENING,
bodyFrame);
case IERS96 :
return new OneAxisEllipsoid(Constants.IERS96_EARTH_EQUATORIAL_RADIUS,
Constants.IERS96_EARTH_FLATTENING,
bodyFrame);
case IERS2003 :
return new OneAxisEllipsoid(Constants.IERS2003_EARTH_EQUATORIAL_RADIUS,
Constants.IERS2003_EARTH_FLATTENING,
bodyFrame);
default :
// this should never happen
throw new RuggedInternalError(null);
}
}
/** Create DEM intersection algorithm.
* @param algorithmID intersection algorithm identifier
* @param updater updater used to load Digital Elevation Model tiles
* @param maxCachedTiles maximum number of tiles stored in the cache
* @param constantElevation constant elevation over ellipsoid
* @return selected algorithm
*/
private static IntersectionAlgorithm createAlgorithm(final AlgorithmId algorithmID,
final TileUpdater updater, final int maxCachedTiles,
final double constantElevation) {
// set up the algorithm
switch (algorithmID) {
case DUVENHAGE :
return new DuvenhageAlgorithm(updater, maxCachedTiles, false);
case DUVENHAGE_FLAT_BODY :
return new DuvenhageAlgorithm(updater, maxCachedTiles, true);
case BASIC_SLOW_EXHAUSTIVE_SCAN_FOR_TESTS_ONLY :
return new BasicScanAlgorithm(updater, maxCachedTiles);
case CONSTANT_ELEVATION_OVER_ELLIPSOID :
return new ConstantElevationAlgorithm(constantElevation);
case IGNORE_DEM_USE_ELLIPSOID :
return new IgnoreDEMAlgorithm();
default :
// this should never happen
throw new RuggedInternalError(null);
}
}
/** Build a {@link Rugged} instance.
* @return built instance
*/
public Rugged build() {
if (algorithmID == null) {
throw new RuggedException(RuggedMessages.UNINITIALIZED_CONTEXT, "RuggedBuilder.setAlgorithmID()");
}
if (algorithmID == AlgorithmId.CONSTANT_ELEVATION_OVER_ELLIPSOID) {
if (Double.isNaN(constantElevation)) {
throw new RuggedException(RuggedMessages.UNINITIALIZED_CONTEXT, "RuggedBuilder.setConstantElevation()");
}
} else if (algorithmID != AlgorithmId.IGNORE_DEM_USE_ELLIPSOID) {
if (tileUpdater == null) {
throw new RuggedException(RuggedMessages.UNINITIALIZED_CONTEXT, "RuggedBuilder.setDigitalElevationModel()");
}
}
createInterpolatorIfNeeded();
return new Rugged(createAlgorithm(algorithmID, tileUpdater, maxCachedTiles, constantElevation), ellipsoid,
lightTimeCorrection, aberrationOfLightCorrection, atmosphericRefraction, scToBody, sensors, name);
}
}
/* Copyright 2013-2014 CS Systèmes d'Information
* Licensed to CS Systèmes d'Information (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.rugged.api;
import java.io.Serializable;
/** Container for satellite position and velocity.
* <p>
* Instances of this class are guaranteed to be immutable.
* </p>
* @author Luc Maisonobe
*/
public class SatellitePV implements Serializable {
/** Serializable UID. */
private static final long serialVersionUID = 20140309L;
/** Date offset from reference (s). */
private final double date;
/** Position along x axis(m). */
private final double px;
/** Position along y axis(m). */
private final double py;
/** Position along z axis(m). */
private final double pz;
/** Velocity along x axis (m/s). */
private final double vx;
/** Velocity along y axis (m/s). */
private final double vy;
/** Velocity along z axis (m/s). */
private final double vz;
/**
* Build a new instance.
*
* @param date date offset from reference (s)
* @param px position along x axis (m)
* @param py position along y axis (m)
* @param pz position along z axis (m)
* @param vx velocity along x axis (m/s)
* @param vy velocity along y axis (m/s)
* @param vz velocity along z axis (m/s)
*/
public SatellitePV(final double date,
final double px, final double py, final double pz,
final double vx, final double vy, final double vz) {
this.date = date;
this.px = px;
this.py = py;
this.pz = pz;
this.vx = vx;
this.vy = vy;
this.vz = vz;
}
/** Get the date offset from reference.
* @return date offset from reference
*/
public double getDate() {
return date;
}
/** Get the position along x axis.
* @return position along x axis
*/
public double getPx() {
return px;
}
/** Get the position along y axis.
* @return position along y axis
*/
public double getPy() {
return py;
}
/** Get the position along z axis.
* @return position along z axis
*/
public double getPz() {
return pz;
}
/** Get the velocity along x axis.
* @return velocity along x axis
*/
public double getVx() {
return vx;
}
/** Get the velocity along y axis.
* @return velocity along y axis
*/
public double getVy() {
return vy;
}
/** Get the velocity along z axis.
* @return velocity along z axis
*/
public double getVz() {
return vz;
}
}
/* Copyright 2013-2022 CS GROUP
* Licensed to CS GROUP (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* This package provides the principal class of Rugged library API, as well as
* the builder for Rugged instances.
*
* @author Luc Maisonobe
* @author Guylaine Prat
*
*/
package org.orekit.rugged.api;
/* Copyright 2013-2014 CS Systèmes d'Information
* Licensed to CS Systèmes d'Information (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.rugged.core;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.math3.geometry.euclidean.threed.Rotation;
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
import org.orekit.attitudes.Attitude;
import org.orekit.attitudes.AttitudeProvider;
import org.orekit.attitudes.TabulatedProvider;
import org.orekit.bodies.GeodeticPoint;
import org.orekit.data.DataProvidersManager;
import org.orekit.data.DirectoryCrawler;
import org.orekit.errors.OrekitException;
import org.orekit.frames.Frame;
import org.orekit.frames.FramesFactory;
import org.orekit.frames.Transform;
import org.orekit.orbits.CartesianOrbit;
import org.orekit.orbits.Orbit;
import org.orekit.propagation.Propagator;
import org.orekit.rugged.api.GroundPoint;
import org.orekit.rugged.api.LineDatation;
import org.orekit.rugged.api.PixelLOS;
import org.orekit.rugged.api.Rugged;
import org.orekit.rugged.api.RuggedException;
import org.orekit.rugged.api.RuggedMessages;
import org.orekit.rugged.api.SatellitePV;
import org.orekit.rugged.api.SatelliteQ;
import org.orekit.rugged.api.SensorPixel;
import org.orekit.rugged.api.TileUpdater;
import org.orekit.rugged.core.duvenhage.DuvenhageAlgorithm;
import org.orekit.rugged.core.raster.IntersectionAlgorithm;
import org.orekit.time.AbsoluteDate;
import org.orekit.time.TimeScale;
import org.orekit.time.TimeScalesFactory;
import org.orekit.utils.Constants;
import org.orekit.utils.IERSConventions;
import org.orekit.utils.ImmutableTimeStampedCache;
import org.orekit.utils.PVCoordinates;
import org.orekit.utils.PVCoordinatesProvider;
/** Top level Rugged class.
* @author Luc Maisonobe
*/
public class RuggedImpl implements Rugged {
/** UTC time scale. */
private TimeScale utc;
/** Reference date. */
private AbsoluteDate referenceDate;
/** Inertial frame. */
private Frame frame;
/** Reference ellipsoid. */
private ExtendedEllipsoid ellipsoid;
/** Converter between spacecraft and body. */
private SpacecraftToObservedBody scToBody;
/** Sensors. */
private final Map<String, Sensor> sensors;
/** DEM intersection algorithm. */
private IntersectionAlgorithm algorithm;
/** Simple constructor.
*/
protected RuggedImpl() {
sensors = new HashMap<String, Sensor>();
}
/** {@inheritDoc} */
@Override
public void setGeneralContext(final File orekitDataDir, final String newReferenceDate,
final Algorithm algorithmID, final Ellipsoid ellipsoidID,
final InertialFrame inertialFrameID,
final BodyRotatingFrame bodyRotatingFrameID,
final List<SatellitePV> positionsVelocities, final int pvInterpolationOrder,
final List<SatelliteQ> quaternions, final int aInterpolationOrder)
throws RuggedException {
try {
// time reference
utc = selectTimeScale(orekitDataDir);
this.referenceDate = new AbsoluteDate(newReferenceDate, utc);
// space reference
frame = selectInertialFrame(inertialFrameID);
ellipsoid = selectEllipsoid(ellipsoidID, selectBodyRotatingFrame(bodyRotatingFrameID));
// orbit/attitude to body converter
final PVCoordinatesProvider pvProvider = selectPVCoordinatesProvider(positionsVelocities, pvInterpolationOrder);
final AttitudeProvider aProvider = selectAttitudeProvider(quaternions, aInterpolationOrder);
scToBody = new SpacecraftToObservedBody(frame, ellipsoid.getBodyFrame(), pvProvider, aProvider);
// intersection algorithm
algorithm = selectAlgorithm(algorithmID);
} catch (OrekitException oe) {
throw new RuggedException(oe, oe.getSpecifier(), oe.getParts().clone());
}
}
/** Set up general context.
* <p>
* This method is the first one that must be called, otherwise the
* other methods will fail due to uninitialized context.
* </p>
* @param orekitDataDir top directory for Orekit data
* @param newReferenceDate reference date from which all other dates are computed
* @param algorithmID identifier of algorithm to use for Digital Elevation Model intersection
* @param ellipsoidID identifier of reference ellipsoid
* @param inertialFrameID identifier of inertial frame
* @param bodyRotatingFrameID identifier of body rotating frame
* @param propagator global propagator
* @exception RuggedException if data needed for some frame cannot be loaded
*/
public void setGeneralContext(final File orekitDataDir, final AbsoluteDate newReferenceDate,
final Algorithm algorithmID, final Ellipsoid ellipsoidID,
final InertialFrame inertialFrameID,
final BodyRotatingFrame bodyRotatingFrameID,
final Propagator propagator)
throws RuggedException {
try {
// time reference
utc = selectTimeScale(orekitDataDir);
this.referenceDate = newReferenceDate;
// space reference
frame = selectInertialFrame(inertialFrameID);
ellipsoid = selectEllipsoid(ellipsoidID, selectBodyRotatingFrame(bodyRotatingFrameID));
// orbit/attitude to body converter
scToBody = new SpacecraftToObservedBody(frame, ellipsoid.getBodyFrame(),
propagator, propagator.getAttitudeProvider());
// intersection algorithm
algorithm = selectAlgorithm(algorithmID);
} catch (OrekitException oe) {
throw new RuggedException(oe, oe.getSpecifier(), oe.getParts().clone());
}
}
/** Get the reference date.
* @return reference date
*/
public AbsoluteDate getReferenceDate() {
return referenceDate;
}
/** {@inheritDoc} */
@Override
public void setUpTilesManagement(final TileUpdater updater, final int maxCachedTiles) {
algorithm.setUpTilesManagement(updater, maxCachedTiles);
}
/** {@inheritDoc} */
@Override
public void setLineSensor(final String sensorName, final List<PixelLOS> linesOfSigth, final LineDatation datationModel) {
final List<Vector3D> positions = new ArrayList<Vector3D>(linesOfSigth.size());
final List<Vector3D> los = new ArrayList<Vector3D>(linesOfSigth.size());
for (final PixelLOS plos : linesOfSigth) {
positions.add(new Vector3D(plos.getPx(), plos.getPy(), plos.getPz()));
los.add(new Vector3D(plos.getDx(), plos.getDy(), plos.getDz()));
}
final Sensor sensor = new Sensor(sensorName, referenceDate, datationModel, positions, los);
sensors.put(sensor.getName(), sensor);
}
/** Select time scale Orekit data.
* @param orekitDataDir top directory for Orekit data (if null, Orekit has already been configured)
* @return utc time scale
* @exception OrekitException if data needed for some frame cannot be loaded
*/
private TimeScale selectTimeScale(final File orekitDataDir)
throws OrekitException {
if (orekitDataDir != null) {
// set up Orekit data
DataProvidersManager.getInstance().addProvider(new DirectoryCrawler(orekitDataDir));
}
return TimeScalesFactory.getUTC();
}
/** Select inertial frame.
* @param inertialFrame inertial frame identifier
* @return inertial frame
* @exception OrekitException if data needed for some frame cannot be loaded
*/
private Frame selectInertialFrame(final InertialFrame inertialFrame)
throws OrekitException {
// set up the inertial frame
switch (inertialFrame) {
case GCRF :
return FramesFactory.getGCRF();
case EME2000 :
return FramesFactory.getEME2000();
case MOD :
return FramesFactory.getMOD(IERSConventions.IERS_1996);
case TOD :
return FramesFactory.getTOD(IERSConventions.IERS_1996, true);
case VEIS1950 :
return FramesFactory.getVeis1950();
default :
// this should never happen
throw RuggedException.createInternalError(null);
}
}
/** Select body rotating frame.
* @param bodyRotatingFrame body rotating frame identifier
* @return body rotating frame
* @exception OrekitException if data needed for some frame cannot be loaded
*/
private Frame selectBodyRotatingFrame(final BodyRotatingFrame bodyRotatingFrame)
throws OrekitException {
// set up the rotating frame
switch (bodyRotatingFrame) {
case ITRF :
return FramesFactory.getITRF(IERSConventions.IERS_2010, true);
case GTOD :
return FramesFactory.getGTOD(IERSConventions.IERS_1996, true);
default :
// this should never happen
throw RuggedException.createInternalError(null);
}
}
/** Select ellipsoid.
* @param ellipsoidID reference ellipsoid identifier
* @param bodyFrame body rotating frame
* @return selected ellipsoid
* @exception OrekitException if data needed for some frame cannot be loaded
*/
private ExtendedEllipsoid selectEllipsoid(final Ellipsoid ellipsoidID, final Frame bodyFrame)
throws OrekitException {
// set up the ellipsoid
switch (ellipsoidID) {
case GRS80 :
return new ExtendedEllipsoid(6378137.0, 1.0 / 298.257222101, bodyFrame);
case WGS84 :
return new ExtendedEllipsoid(Constants.WGS84_EARTH_EQUATORIAL_RADIUS,
Constants.WGS84_EARTH_FLATTENING,
bodyFrame);
case IERS96 :
return new ExtendedEllipsoid(6378136.49, 1.0 / 298.25645, bodyFrame);
case IERS2003 :
return new ExtendedEllipsoid(6378136.6, 1.0 / 298.25642, bodyFrame);
default :
// this should never happen
throw RuggedException.createInternalError(null);
}
}
/** Select attitude provider.
* @param quaternions satellite quaternions
* @param interpolationOrder order to use for interpolation
* @return selected attitude provider
* @exception OrekitException if data needed for some frame cannot be loaded
*/
private AttitudeProvider selectAttitudeProvider(final List<SatelliteQ> quaternions,
final int interpolationOrder)
throws OrekitException {
// set up the attitude provider
final List<Attitude> attitudes = new ArrayList<Attitude>(quaternions.size());
for (final SatelliteQ sq : quaternions) {
final AbsoluteDate date = referenceDate.shiftedBy(sq.getDate());
final Rotation rotation = new Rotation(sq.getQ0(), sq.getQ1(), sq.getQ2(), sq.getQ3(), true);
attitudes.add(new Attitude(date, frame, rotation, Vector3D.ZERO));
}
return new TabulatedProvider(attitudes, interpolationOrder, false);
}
/** Select position/velocity provider.
* @param positionsVelocities satellite position and velocity
* @param interpolationOrder order to use for interpolation
* @return selected position/velocity provider
* @exception OrekitException if data needed for some frame cannot be loaded
*/
private PVCoordinatesProvider selectPVCoordinatesProvider(final List<SatellitePV> positionsVelocities,
final int interpolationOrder)
throws OrekitException {
// set up the ephemeris
final List<Orbit> orbits = new ArrayList<Orbit>(positionsVelocities.size());
for (final SatellitePV pv : positionsVelocities) {
final AbsoluteDate date = referenceDate.shiftedBy(pv.getDate());
final Vector3D position = new Vector3D(pv.getPx(), pv.getPy(), pv.getPz());
final Vector3D velocity = new Vector3D(pv.getVx(), pv.getVy(), pv.getVz());
final CartesianOrbit orbit = new CartesianOrbit(new PVCoordinates(position, velocity),
frame, date, Constants.EIGEN5C_EARTH_MU);
orbits.add(orbit);
}
final ImmutableTimeStampedCache<Orbit> cache =
new ImmutableTimeStampedCache<Orbit>(interpolationOrder, orbits);
return new PVCoordinatesProvider() {
/** {@inhritDoc} */
@Override
public PVCoordinates getPVCoordinates(final AbsoluteDate date, final Frame f)
throws OrekitException {
final List<Orbit> sample = cache.getNeighbors(date);
final Orbit interpolated = sample.get(0).interpolate(date, sample);
return interpolated.getPVCoordinates(date, f);
}
};
}
/** Select DEM intersection algorithm.
* @param algorithmID intersection algorithm identifier
* @return selected algorithm
*/
private IntersectionAlgorithm selectAlgorithm(final Algorithm algorithmID) {
// set up the algorithm
switch (algorithmID) {
case DUVENHAGE :
return new DuvenhageAlgorithm();
case BASIC_SLOW_EXHAUSTIVE_SCAN_FOR_TESTS_ONLY :
return new BasicScanAlgorithm();
case IGNORE_DEM_USE_ELLIPSOID :
return new IgnoreDEMAlgorithm();
default :
// this should never happen
throw RuggedException.createInternalError(null);
}
}
/** {@inheritDoc} */
@Override
public GroundPoint[] directLocalization(final String sensorName, final double lineNumber)
throws RuggedException {
try {
checkContext();
// select the sensor
final Sensor sensor = getSensor(sensorName);
// compute once the transform between spacecraft and observed body
final AbsoluteDate date = sensor.getDate(lineNumber);
final Transform t = scToBody.getTransform(date);
// compute localization of each pixel
final GroundPoint[] gp = new GroundPoint[sensor.getNbPixels()];
for (int i = 0; i < gp.length; ++i) {
final GeodeticPoint geodetic =
algorithm.intersection(ellipsoid,
t.transformPosition(sensor.getPosition(i)),
t.transformVector(sensor.getLos(i)));
gp[i] = new GroundPoint(geodetic.getLatitude(),
geodetic.getLongitude(),
geodetic.getAltitude());
}
return gp;
} catch (OrekitException oe) {
throw new RuggedException(oe, oe.getSpecifier(), oe.getParts());
}
}
/** {@inheritDoc} */
@Override
public SensorPixel inverseLocalization(final String sensorName, final GroundPoint groundPoint)
throws RuggedException {
checkContext();
final Sensor sensor = getSensor(sensorName);
// TODO: implement direct localization
throw RuggedException.createInternalError(null);
}
/** Check if context has been initialized.
* @exception RuggedException if context has not been initialized
*/
private void checkContext() throws RuggedException {
if (frame == null) {
throw new RuggedException(RuggedMessages.UNINITIALIZED_CONTEXT);
}
}
/** Get a sensor.
* @param sensorName sensor name
* @return selected sensor
* @exception RuggedException if sensor is not known
*/
private Sensor getSensor(final String sensorName) throws RuggedException {
final Sensor sensor = sensors.get(sensorName);
if (sensor == null) {
throw new RuggedException(RuggedMessages.UNKNOWN_SENSOR, sensorName);
}
return sensor;
}
}