# Copyright 2021 United States Government as represented by the Administrator of the National Aeronautics and Space
# Administration. No copyright is claimed in the United States under Title 17, U.S. Code. All Other Rights Reserved.
"""
This module provides a subclass of the :class:`.OpNav` class for performing stellar OpNav.
Interface Description
---------------------
In GIANT, Stellar OpNav refers to the process of identifying stars in an image and then extracting the attitude
information from those stars. This module combines these two processes into the single process of Stellar OpNav.
The :class:`StellarOpNav` class is the main interface for performing stellar OpNav in GIANT, and in general is all
the user will need to interact with in order to process the images. It provides direct access to the
:class:`.ImageProcessing`, :class:`.StarID`, and :mod:`.stellar_opnav.estimators` objects and automatically performs the
required data transfer between the objects for you. To begin you simply provide the :class:`StellarOpNav` constructor
a :class:`.Camera` instance, a :class:`.ImageProcessing` instance or the keyword arguments to creation one,
a :class:`.StarID` instance or the keyword arguments to creation one, and the attitude estimation object you wish to use
to perform the attitude estimation. You can then use the :class:`StellarOpNav` instance to perform all of the aspects of
stellar OpNav with never having to interact with the internal objects again.
For example, we could do something like the following (from the directory containing ``sample_data`` as generated by a
call to :mod:`.generate_sample_data`):
>>> import pickle
>>> from giant.stellar_opnav import StellarOpNav
>>> with open('sample_data/camera.pickle', 'rb') as in_file:
... camera = pickle.load(in_file)
>>> camera.only_long_on()
>>> my_sid = StellarOpNav(camera)
>>> my_sid.id_stars() # id the stars for each image
>>> my_sid.sid_summary() # print out a summary of the star identification success for each image
>>> my_sid.estimate_attitude() # estimate an updated attitude for each image
In order to identify stars and estimate the image attitude based off of those stars for all of our images.
For a more general description of the steps needed to perform stellar OpNav, refer to the :mod:`.stellar_opnav`
documentation. For a more in-depth examination of the :class:`.StellarOpNav` class continue through the following class
documentation.
"""
import warnings
import time
from typing import Union, Iterable, List, Optional
import numpy as np
import pandas as pd
from matplotlib.path import Path
from giant.stellar_opnav import estimators as est
from giant.stellar_opnav.star_identification import StarID
from giant.utilities.outlier_identifier import get_outliers
from giant.opnav_class import OpNav
from giant.camera import Camera
from giant.image_processing import ImageProcessing
from giant.ray_tracer.scene import Scene
from giant._typing import ARRAY_LIKE, ARRAY_LIKE_2D, PATH, Real, NONEARRAY
_ANGLES: np.ndarray = np.linspace(0, 2 * np.pi, 360)
"""
An internal array used for checking for interior points in extended bodies.
"""
_SCAN_VECTORS: np.ndarray = np.vstack([np.cos(_ANGLES), np.sin(_ANGLES), np.zeros(_ANGLES.size)])
"""
An internal array used for checking for interior points in extended bodies.
"""
[docs]class StellarOpNav(OpNav):
"""
This class serves as the main user interface for performing Stellar Optical Navigation.
The class acts as a container for the :class:`.Camera`, :class:`.ImageProcessing`, and
:mod:`.stellar_opnav.estimators` objects and also passes the correct and up-to-date data from one
object to the other. In general, this class will be the exclusive interface to the mentioned objects and models
for the user.
This class provides a number of features that make doing stellar OpNav easy. The first is it provides aliases to
the image processing, star id, and attitude estimation objects. These
aliases make it easy to quickly change/update the various tuning parameters that are necessary to make star
identification a success. In addition to providing convenient access to the underlying settings, some of these
aliases also update internal flags that specify whether individual images need to be reprocessed, saving computation
time when you're trying to find the best tuning.
This class also provides simple methods for performing star identification and attitude estimation after
you have set the tuning parameters. These methods (:meth:`id_stars`, :meth:`sid_summary`, and
:meth:`estimate_attitude`) combine all of the required steps into a few simple calls, and pass the resulting data
from one object to the next. They also store off the results of the star identification in the
:attr:`queried_catalogue_star_records`, :attr:`queried_catalogue_image_points`,
:attr:`queried_catalogue_unit_vectors`, :attr:`ip_extracted_image_points`, :attr:`ip_image_illums`,
:attr:`ip_psfs`, :attr:`ip_stats`, :attr:`ip_snrs`
:attr:`unmatched_catalogue_image_points`, :attr:`unmatched_image_illums`,
:attr:`unmatched_psfs`, :attr:`unmatched_stats`, :attr:`unmatched_snrs`
:attr:`unmatched_catalogue_star_records`,
:attr:`unmatched_catalogue_unit_vectors`,
:attr:`unmatched_extracted_image_points`,
:attr:`matched_catalogue_image_points`, :attr:`matched_image_illums`,
:attr:`matched_psfs`, :attr:`matched_stats`, :attr:`matched_snrs`
:attr:`matched_catalogue_star_records`,
:attr:`matched_catalogue_unit_vectors_inertial`,
:attr:`matched_catalogue_unit_vectors_camera`, and
:attr:`matched_extracted_image_points` attributes, enabling more advanced analysis to be performed external to the
class.
Finally, this class stores the updated attitude solutions in the image objects themselves, allowing you to directly
pass your images from stellar OpNav to the :mod:`.relative_opnav` routines with updated attitude solutions. It also
respects the :attr:`.image_mask` attribute of the :class:`.Camera` object, only considering images that are
currently turned on.
When initializing this class, most of the initial options can be set using the ``*_kwargs`` inputs with
dictionaries specifying the keyword arguments and values. Alternatively, you can provide already initialized
instances of the :class:`.ImageProcessing`, :class:`.AttitudeEstimator`, or :class:`.StarID` classes or subclasses
if you want a little more control. You should see the documentation for the
:class:`.ImageProcessing`, :class:`.AttitudeEstimator`, and :class:`.StarID` classes for more details on what you
can specify.
"""
def __init__(self, camera: Camera, use_weights: bool = False, scene: Optional[Scene] = None,
image_processing: Optional[ImageProcessing] = None, image_processing_kwargs: Optional[dict] = None,
star_id: Optional[StarID] = None, star_id_kwargs: Optional[dict] = None,
attitude_estimator: Optional[est.AttitudeEstimator] = None,
attitude_estimator_kwargs: Optional[dict] = None):
"""
:param camera: The :class:`.Camera` object containing the camera model and images to be utilized
:param use_weights: A flag specifying whether to use weighted estimation for attitude estimation
:param scene: Optionally, a scene defining the targets that may be in the FOV of the camera used to reject
points interior to a body as stars. If ``None`` then no attempt is made to reject points that
might be interior to a body. If not ``None`` then we will attempt to reject these points using
a priori knowledge
:param image_processing: An already initialized instance of :class:`.ImageProcessing` (or a subclass). If not
``None`` then ``image_processing_kwargs`` are ignored.
:param image_processing_kwargs: The keyword arguments to pass to the :class:`.ImageProcessing` class
constructor. These are ignored if argument ``image_processing`` is not ``None``
:param star_id: An already initialized instance of :class:`.StarID` (or a subclass). If not
``None`` then ``star_id_kwargs`` are ignored.
:param star_id_kwargs: The keyword arguments to pass to the :class:`.StarID` class constructor as
a dictionary. These are ignored if argument ``star_id`` is not ``None``.
:param attitude_estimator: An already initialized instance of :class:`.AttitudeEstimator` (or a subclass). If
not ``None`` then ``attitude_estimator_kwargs`` are ignored.
:param attitude_estimator_kwargs: The keyword arguments to pass to the :class:`.DavenportQMethod`
constructor as a dictionary. If argument ``attitude_estimator`` is not
``None`` then this is ignored.
"""
# initialize the various objects we need for the star identification and attitude estimation
super().__init__(camera, image_processing=image_processing, image_processing_kwargs=image_processing_kwargs)
self.use_weights = use_weights # type: bool
"""
A flag specifying whether to compute weights/use them in the attitude estimation routine
"""
self.scene: Optional[Scene] = scene
"""
Optionally, a scene defining the targets that may be in the FOV of the camera used to reject points interior to
a body as stars.
If ``None`` then no attempt is made to reject points that might be interior to a body. If not ``None`` then we
will attempt to reject these points using a priori knowledge.
"""
if star_id is None:
if star_id_kwargs is not None:
self._star_id = StarID(self._camera.model, **star_id_kwargs)
else:
self._star_id = StarID(self._camera.model)
else:
self._star_id = star_id
if attitude_estimator is None:
if attitude_estimator_kwargs is not None:
self._attitude_est = est.DavenportQMethod(**attitude_estimator_kwargs)
else:
self._attitude_est = est.DavenportQMethod()
else:
self._attitude_est = attitude_estimator
self._initial_star_id_kwargs = star_id_kwargs
self._initial_attitude_est = self._attitude_est.__class__
self._initial_attitude_est_kwargs = attitude_estimator_kwargs
# initialize the various lists for storing the results and specifying whether stars need to be extracted from
# the images or not
self.process_stars: List[bool] = [True] * len(self._camera.images)
"""
This list contains a boolean specifying whether the corresponding image needs to be processed using image
processing again.
This typically is automatically updated and you shouldn't have to worry about it. It is included for speed.
"""
self._ip_extracted_image_points: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._ip_image_illums: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._ip_psfs: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._ip_stats: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._ip_snrs: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._queried_catalogue_star_records: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._queried_catalogue_image_points: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._queried_catalogue_unit_vectors: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._queried_weights_inertial: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._queried_weights_picture: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._extracted_image_points: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._unmatched_catalogue_image_points: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._unmatched_catalogue_star_records: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._unmatched_catalogue_unit_vectors: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._unmatched_weights_inertial: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._unmatched_weights_picture: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._unmatched_extracted_image_points: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._unmatched_image_illums: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._unmatched_psfs: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._unmatched_ip_stats: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._unmatched_ip_snrs: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._matched_catalogue_image_points: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._matched_catalogue_star_records: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._matched_catalogue_unit_vectors_inertial: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._matched_catalogue_unit_vectors_camera: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._matched_weights_inertial: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._matched_weights_picture: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._matched_extracted_image_points: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._matched_image_illums: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._matched_psfs: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._matched_ip_stats: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
self._matched_ip_snrs: List[Optional[np.ndarray]] = [None] * len(self._camera.images)
# ____________________________________________Camera Properties____________________________________________
[docs] def add_images(self, data: Union[Iterable[Union[PATH, ARRAY_LIKE_2D]], PATH, ARRAY_LIKE_2D],
parse_data: bool = True, preprocessor: bool = True):
"""
This is essentially an alias to the :meth:`.Camera.add_images` method, but it also expands various lists to
account for the new number of images.
When you have already initialized a :class:`StellarOpNav` class you should *always* use this method to add
images for consideration.
The lists that are extended by this method are:
* :attr:`ip_extracted_image_points`
* :attr:`ip_image_illums`
* :attr:`ip_psfs`
* :attr:`ip_stats`
* :attr:`ip_snrs`
* :attr:`queried_catalogue_star_records`
* :attr:`queried_catalogue_image_points`
* :attr:`queried_catalogue_unit_vectors`
* :attr:`queried_weights_inertial`
* :attr:`queried_weights_picture`
* :attr:`unmatched_catalogue_star_records`
* :attr:`unmatched_catalogue_image_points`
* :attr:`unmatched_catalogue_unit_vectors`
* :attr:`unmatched_extracted_image_points`
* :attr:`unmatched_psfs`
* :attr:`unmatched_stats`
* :attr:`unmatched_snrs`
* :attr:`unmatched_image_illums`
* :attr:`unmatched_weights_inertial`
* :attr:`unmatched_weights_picture`
* :attr:`matched_catalogue_star_records`
* :attr:`matched_catalogue_image_points`
* :attr:`matched_catalogue_unit_vectors_inertial`
* :attr:`matched_extracted_image_points`
* :attr:`matched_weights_inertial`
* :attr:`matched_weights_picture`
* :attr:`matched_psfs`
* :attr:`matched_stats`
* :attr:`matched_snrs`
* :attr:`matched_image_illums`
* :attr:`matched_catalogue_unit_vectors_camera`
* :attr:`process_stars`
See the :meth:`.Camera.add_images` for a description of the valid input for `data`
:param data: The image data to be stored in the :attr:`.images` list
:param parse_data: A flag to specify whether to attempt to parse the metadata automatically for the images
:param preprocessor: A flag to specify whether to run the preprocessor after loading an image.
"""
super().add_images(data, parse_data=parse_data, preprocessor=preprocessor)
if isinstance(data, (list, tuple)):
for _ in data:
self._ip_extracted_image_points.append(None)
self._ip_image_illums.append(None)
self._ip_psfs.append(None)
self._ip_stats.append(None)
self._ip_snrs.append(None)
self.process_stars.append(True)
self._queried_catalogue_star_records.append(None)
self._queried_catalogue_image_points.append(None)
self._queried_catalogue_unit_vectors.append(None)
self._queried_weights_inertial.append(None)
self._queried_weights_picture.append(None)
self._unmatched_catalogue_star_records.append(None)
self._unmatched_catalogue_image_points.append(None)
self._unmatched_catalogue_unit_vectors.append(None)
self._unmatched_extracted_image_points.append(None)
self._unmatched_psfs.append(None)
self._unmatched_image_illums.append(None)
self._unmatched_weights_inertial.append(None)
self._unmatched_weights_picture.append(None)
self._unmatched_ip_stats.append(None)
self._unmatched_ip_snrs.append(None)
self._matched_catalogue_star_records.append(None)
self._matched_catalogue_image_points.append(None)
self._matched_catalogue_unit_vectors_inertial.append(None)
self._matched_extracted_image_points.append(None)
self._matched_weights_inertial.append(None)
self._matched_weights_picture.append(None)
self._matched_psfs.append(None)
self._matched_image_illums.append(None)
self._matched_catalogue_unit_vectors_camera.append(None)
self._matched_ip_stats.append(None)
self._matched_ip_snrs.append(None)
else:
self._ip_extracted_image_points.append(None)
self._ip_image_illums.append(None)
self._ip_psfs.append(None)
self._ip_stats.append(None)
self._ip_snrs.append(None)
self.process_stars.append(True)
self._queried_catalogue_star_records.append(None)
self._queried_catalogue_image_points.append(None)
self._queried_catalogue_unit_vectors.append(None)
self._queried_weights_inertial.append(None)
self._queried_weights_picture.append(None)
self._unmatched_catalogue_star_records.append(None)
self._unmatched_catalogue_image_points.append(None)
self._unmatched_catalogue_unit_vectors.append(None)
self._unmatched_extracted_image_points.append(None)
self._unmatched_psfs.append(None)
self._unmatched_image_illums.append(None)
self._unmatched_weights_inertial.append(None)
self._unmatched_weights_picture.append(None)
self._unmatched_ip_stats.append(None)
self._unmatched_ip_snrs.append(None)
self._matched_catalogue_star_records.append(None)
self._matched_catalogue_image_points.append(None)
self._matched_catalogue_unit_vectors_inertial.append(None)
self._matched_extracted_image_points.append(None)
self._matched_weights_inertial.append(None)
self._matched_weights_picture.append(None)
self._matched_psfs.append(None)
self._matched_ip_stats.append(None)
self._matched_ip_snrs.append(None)
self._matched_image_illums.append(None)
self._matched_catalogue_unit_vectors_camera.append(None)
@OpNav.model.setter
def model(self, val):
# don't know why we can't dispatch here...
self._camera.model = val
self._star_id.model = val
# ____________________________________________ Aliases ____________________________________________
@property
def star_id(self):
"""
The StarID instance to use when doing star identification
This should be an instance of the :class:`.StarID` class.
See the :class:`.StarID` class documentation for more details
"""
return self._star_id
@star_id.setter
def star_id(self, val):
if isinstance(val, StarID):
self._star_id = val
else:
warnings.warn("The star_id object should probably subclass the StarID class\n"
"We'll assume you know what you're doing for now, but "
"see the StarID documentation for details")
self._star_id = val
@property
def attitude_estimator(self):
"""
The attitude estimator to use in the RANSAC algorithm
This should typically be an instance of the :class:`.DavenportQMethod` class.
See the :class:`.DavenportQMethod` class documentation for more details
"""
return self._attitude_est
@attitude_estimator.setter
def attitude_estimator(self, val):
if isinstance(val, est.DavenportQMethod):
self._attitude_est = val
else:
warnings.warn("The star_id object should probably subclass the DavenportQMethod class\n"
"We'll assume you know what you're doing for now, but "
"see the DavenportQMethod documentation for details")
self._attitude_est = val
@OpNav.image_processing.getter
def image_processing(self) -> ImageProcessing:
"""
The ImageProcessing instance to use when doing image processing on the images
This must be an instance of the :class:`.ImageProcessing` class.
See the :class:`.ImageProcessing` class documentation for more details
"""
for ind in range(len(self.process_stars)):
self.process_stars[ind] = True
return self._image_processing
# ___________________________________________________ PROPERTIES ______________________________________________
# queried information
@property
def queried_catalogue_star_records(self) -> List[Optional[pd.DataFrame]]:
"""
This list contains all of the star records queried from the star catalogue for each image in :attr:`camera` for
the most recent query to star catalogue (this gets overwritten when the star catalogue is re-queried for the
image)
Each list element is a pandas DataFrame containing GIANT star records. A GIANT star record has the following
columns:
===================== ======== =================================================================================
column units description
===================== ======== =================================================================================
`'ra'` deg The right ascension of the star after correcting for proper motion
`'dec'` deg The declination of the star after correcting for proper motion
`'distance'` km The distance to the star from the Solar system barycenter (converted from
parallax). This column has a default value of 5.428047027e15 if no parallax
information is provided by the catalogue.
`'ra_proper_motion'` deg/year The proper motion for the right ascension
`'dec_proper_motion'` deg/year The proper motion for the declination
`'mag'` N/A The apparent magnitude of the star according to the star catalogue
`'ra_sigma'` deg The formal uncertainty in the right ascension according to the catalogue
`'dec_sigma'` deg The formal uncertainty in the declination according to the catalogue
`'distance_sigma'` km The formal uncertainty in the distance according to the catalogue
(converted from parallax). This has a default value of
1.9949433041226756e+19 km for stars with no parallax information.
`'ra_pm_sigma'` deg/year The formal uncertainty in the right ascension proper motion according to
the catalogue
`'dec_pm_sigma'` deg/year The formal uncertainty in the declination proper motion according to the
catalogue
===================== ======== =================================================================================
Each row of the data frame represents a star record. Each row of the DataFrame matches to the corresponding
column in the :attr:`queried_catalogue_unit_vectors` and :attr:`queried_catalogue_image_points` arrays.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
"""
return self._queried_catalogue_star_records
@property
def queried_catalogue_image_points(self) -> List[Optional[np.ndarray]]:
"""
This list contains the projections of all of the star records queried from the star catalogue for each image in
:attr:`camera` from the most recent query to star catalogue (this gets overwritten when the star catalogue is
re-queried for the image, and also when the attitude is updated for an image).
Each list element is a 2xn array of image points with the x locations (columns) down the first row and the y
locations (rows) down the second row. Each column of the array matches to the corresponding
column in the :attr:`queried_catalogue_unit_vectors` array and the corresponding row in the
:attr:`queried_catalogue_star_records` DataFrame.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
"""
return self._queried_catalogue_image_points
@property
def queried_catalogue_unit_vectors(self) -> List[Optional[np.ndarray]]:
"""
This list contains the inertial unit vectors of all of the star records queried from the star catalogue for each
image in :attr:`camera` from the most recent query to star catalogue (this gets overwritten when the star
catalogue is re-queried for the image). The unit vectors are the directions to the stars after correcting for
proper motion, stellar aberration, and parallax.
Each list element is a 3xn array of unit vectors with each unit vector stored as a column.
Each column of the array matches to the corresponding column in the :attr:`queried_catalogue_image_points`
array and the corresponding row in the :attr:`queried_catalogue_star_records` DataFrame.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
"""
return self._queried_catalogue_unit_vectors
@property
def queried_weights_inertial(self) -> List[Optional[np.ndarray]]:
"""
This list contains the uncertainty of all of the star records queried from the star catalogue for each
image in :attr:`camera` from the most recent query to star catalogue (this gets overwritten when the star
catalogue is re-queried for the image). The uncertainties are based on the formal uncertainty of the star
location according to the star catalogue and are for the unit vector representation.
A single value is provided for each unit vector representing the sum of the squares of the uncertainty (the
trace of the covariance matrix). Each element of the array matches to the corresponding column in the
:attr:`queried_catalogue_image_points` array and the corresponding row in the
:attr:`queried_catalogue_star_records` DataFrame.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
"""
return self._queried_weights_inertial
@property
def queried_weights_picture(self) -> List[Optional[np.ndarray]]:
"""
This list contains the uncertainty of all of the star records queried from the star catalogue for each
image in :attr:`camera` from the most recent query to star catalogue (this gets overwritten when the star
catalogue is re-queried for the image). The uncertainties are based on the formal uncertainty of the star
location according to the star catalogue and are for the pixel representation of the star location.
A single value is provided for each unit vector representing the sum of the squares of the uncertainty (the
trace of the covariance matrix). Each element of the array matches to the corresponding column in the
:attr:`queried_catalogue_image_points` array and the corresponding row in the
:attr:`queried_catalogue_star_records` DataFrame.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
"""
return self._queried_weights_picture
@property
def ip_extracted_image_points(self) -> List[Optional[np.ndarray]]:
"""
This list contains all of the image points of interest that were identified by the image processing routines for
all of the images in :attr:`camera` from the most recent processing of the image (this gets overwritten when the
image is reprocessed through the image processing routines).
Each list element is a 2xn array of image points with the x locations (cols) down the first row and the y
locations (rows) down the second row. Each column of this array matches with the corresponding row in the
:attr:`ip_image_illums` array.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
"""
return self._ip_extracted_image_points
@property
def ip_image_illums(self) -> List[Optional[np.ndarray]]:
"""
This list contains the DN values of the pixels containing the image points of interest
for each image in :attr:`camera` that were identified by the image processing routines as potential stars
(this gets overwritten when the image is reprocessed through the image processing routines).
Each list element is a length n array of image DN values. Each row of the array matches to the corresponding
column in the :attr:`ip_extracted_image_points` array.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
"""
return self._ip_image_illums
@property
def ip_psfs(self) -> List[Optional[np.ndarray]]:
"""
This list contains point spread function object for each object identified as a potential star
for each image in :attr:`camera` that were identified by the image processing routines as potential stars
(this gets overwritten when the image is reprocessed through the image processing routines).
This list is only updated if :attr:`.ImageProcessing.save_psf` is set to ``True``.
Each list element is a length n array of PSF objects. Each element of the array matches to the corresponding
column in the :attr:`ip_extracted_image_points` array.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
"""
return self._ip_psfs
@property
def ip_stats(self) -> List[Optional[np.ndarray]]:
"""
This list contains the connected component stats for each object identified as a potential star
for each image in :attr:`camera` that were identified by the image processing routines as potential stars
(this gets overwritten when the image is reprocessed through the image processing routines).
Each list element is a length n list of stats. Each element of the list matches to the corresponding
column in the :attr:`ip_extracted_image_points` array.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
"""
return self._ip_stats
@property
def ip_snrs(self) -> List[Optional[np.ndarray]]:
"""
This list contains the peak snr value for each object identified as a potential star
for each image in :attr:`camera` that were identified by the image processing routines as potential stars
(this gets overwritten when the image is reprocessed through the image processing routines).
Each list element is a length n list of SNR values. Each element of the sublist matches to the corresponding
column in the :attr:`ip_extracted_image_points` array.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
"""
return self._ip_snrs
# unmatched information
@property
def unmatched_catalogue_star_records(self) -> List[Optional[pd.DataFrame]]:
"""
This list contains the star records queried from the star catalogue that were not matched with an image point of
interest for each image in :attr:`camera` for the most recent star identification attempt (this gets
overwritten when a new attempt is made at identifying the stars in the image)
Each list element is a pandas DataFrame containing GIANT star records. A GIANT star record has the following
columns:
===================== ======== =================================================================================
column units description
===================== ======== =================================================================================
`'ra'` deg The right ascension of the star after correcting for proper motion
`'dec'` deg The declination of the star after correcting for proper motion
`'distance'` km The distance to the star from the Solar system barycenter (converted from
parallax). This column has a default value of 5.428047027e15 if no parallax
information is provided by the catalogue.
`'ra_proper_motion'` deg/year The proper motion for the right ascension
`'dec_proper_motion'` deg/year The proper motion for the declination
`'mag'` N/A The apparent magnitude of the star according to the star catalogue
`'ra_sigma'` deg The formal uncertainty in the right ascension according to the catalogue
`'dec_sigma'` deg The formal uncertainty in the declination according to the catalogue
`'distance_sigma'` km The formal uncertainty in the distance according to the catalogue
(converted from parallax). This has a default value of
1.9949433041226756e+19 km for stars with no parallax information.
`'ra_pm_sigma'` deg/year The formal uncertainty in the right ascension proper motion according to
the catalogue
`'dec_pm_sigma'` deg/year The formal uncertainty in the declination proper motion according to the
catalogue
===================== ======== =================================================================================
Each row of the data frame represents a star record. Each row of the DataFrame matches to the corresponding
column in the :attr:`unmatched_catalogue_unit_vectors` and :attr:`unmatched_catalogue_image_points` arrays.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
"""
return self._unmatched_catalogue_star_records
@property
def unmatched_catalogue_image_points(self) -> List[Optional[np.ndarray]]:
"""
This list contains the projections of the star records queried from the star catalogue that were not matched
with an image point of interest for each image in :attr:`camera` from the most recent star identification
attempt (this gets overwritten when a new attempt is made at identifying the stars in the image, and also when
a new attitude is solved for the image).
Each list element is a 2xn array of image points with the x locations (columns) down the first row and the y
locations (rows) down the second row. Each column of the array matches to the corresponding
column in the :attr:`unmatched_catalogue_unit_vectors` array and the corresponding row in the
:attr:`unmatched_catalogue_star_records` DataFrame.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
"""
return self._unmatched_catalogue_image_points
@property
def unmatched_catalogue_unit_vectors(self) -> List[Optional[np.ndarray]]:
"""
This list contains the inertial unit vectors of the star records queried from the star catalogue that were not
matched with an image point of interest for each image in :attr:`camera` from the most recent star
identification attempt (this gets overwritten when a new attempt is made at identifying the stars in the
image). The unit vectors are the directions to the stars after correcting for
proper motion, stellar aberration, and parallax.
Each list element is a 3xn array of unit vectors with each unit vector stored as a column.
Each column of the array matches to the corresponding column in the :attr:`unmatched_catalogue_image_points`
array and the corresponding row in the :attr:`unmatched_catalogue_star_records` DataFrame.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
"""
return self._unmatched_catalogue_unit_vectors
@property
def unmatched_weights_inertial(self) -> List[Optional[np.ndarray]]:
"""
This list contains the uncertainty of all of the star records queried from the star catalogue that were not
matched with an image point of interest for each image in :attr:`camera` from the most recent query to star
catalogue (this gets overwritten when the star catalogue is re-queried for the image). The uncertainties are
based on the formal uncertainty of the star location according to the star catalogue and are for the unit
vector representation.
A single value is provided for each unit vector representing the sum of the squares of the uncertainty (the
trace of the covariance matrix). Each element of the array matches to the corresponding column in the
:attr:`unmatched_catalogue_image_points` array and the corresponding row in the
:attr:`unmatched_catalogue_star_records` DataFrame.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
"""
return self._unmatched_weights_inertial
@property
def unmatched_weights_picture(self) -> List[Optional[np.ndarray]]:
"""
This list contains the uncertainty of all of the star records queried from the star catalogue that were not
matched with with an image point of interest for each image in :attr:`camera` from the most recent query to
star catalogue (this gets overwritten when the star catalogue is re-queried for the image). The
uncertainties are based on the formal uncertainty of the star location according to the star catalogue and
are for the pixel representation of the star location.
A single value is provided for each unit vector representing the sum of the squares of the uncertainty (the
trace of the covariance matrix). Each element of the array matches to the corresponding column in the
:attr:`unmatched_catalogue_image_points` array and the corresponding row in the
:attr:`unmatched_catalogue_star_records` DataFrame.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
"""
return self._unmatched_weights_picture
@property
def unmatched_extracted_image_points(self) -> List[Optional[np.ndarray]]:
"""
This list contains the image points of interest that were not matched with a catalogue star for each image in
:attr:`camera` from the most recent star identification attempt (this gets overwritten when a new attempt is
made at identifying the stars in the image).
Each list element is a 2xn array of image points with the x locations (columns) down the first row and the y
locations (rows) down the second row.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
"""
return self._unmatched_extracted_image_points
@property
def unmatched_image_illums(self) -> List[Optional[np.ndarray]]:
"""
This list contains the DN values of the pixel containing the image points of interest that are not matched with
a catalogue star for each image in :attr:`camera` from the most recent star identification attempt
(this gets overwritten when a new attempt is made at identifying the stars in the image).
Each list element is a length n array of image DN values. Each row of the array matches to the corresponding
column in the :attr:`unmatched_extracted_image_points` array.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
If no stars have been successfully identified for an image then the corresponding index of the list will be set
to ``None``.
"""
return self._unmatched_image_illums
@property
def unmatched_stats(self) -> List[Optional[np.ndarray]]:
"""
This list contains the connected component stats of each object that is not matched with a
catalogue star for each image in :attr:`camera` from the most recent star identification attempt
(this gets overwritten when a new attempt is made at identifying the stars in the image).
Each list element is a length n array of OpenCV connected components statistics. Each row of the array matches
to the corresponding column in the :attr:`unmatched_extracted_image_points` array.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
If no stars have been successfully identified for an image then the corresponding index of the list will be set
to ``None``.
"""
return self._unmatched_ip_stats
@property
def unmatched_snrs(self) -> List[Optional[np.ndarray]]:
"""
This list contains the peak snr value of each object that is not matched with a
catalogue star for each image in :attr:`camera` from the most recent star identification attempt
(this gets overwritten when a new attempt is made at identifying the stars in the image).
Each list element is a length n array of OpenCV connected components statistics. Each row of the array matches
to the corresponding column in the :attr:`unmatched_extracted_image_points` array.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
If no stars have been successfully identified for an image then the corresponding index of the list will be set
to ``None``.
"""
return self._unmatched_ip_snrs
@property
def unmatched_psfs(self) -> List[Optional[np.ndarray]]:
"""
This list contains the fit PSF object for each unmatched image point from the most recent star identification
attempt (this gets overwritten when a new attempt is made at identifying the stars in the image).
This list is only updated if :attr:`.ImageProcessing.save_psf` is set to ``True``.
Each list element is a shape n array of PSF objects. Each element of the array matches to the corresponding
column in the :attr:`unmatched_catalogue_unit_vectors`,
:attr:`unmatched_catalogue_image_points`, and the
:attr:`unmatched_extracted_image_points` arrays. The PSF object in each element is the same type as
:attr:`.ImageProcessing.centroiding`. See the documentation for the :mod:`.point_spread_functions` package for
details.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
"""
return self._unmatched_psfs
# matched information
@property
def matched_catalogue_star_records(self) -> List[Optional[pd.DataFrame]]:
"""
This list contains the star records queried from the star catalogue that were matched with an image point of
interest for each image in :attr:`camera` for the most recent star identification attempt (this gets
overwritten when a new attempt is made at identifying the stars in the image)
Each list element is a pandas DataFrame containing GIANT star records. A GIANT star record has the following
columns:
===================== ======== =================================================================================
column units description
===================== ======== =================================================================================
`'ra'` deg The right ascension of the star after correcting for proper motion
`'dec'` deg The declination of the star after correcting for proper motion
`'distance'` km The distance to the star from the Solar system barycenter (converted from
parallax). This column has a default value of 5.428047027e15 if no parallax
information is provided by the catalogue.
`'ra_proper_motion'` deg/year The proper motion for the right ascension
`'dec_proper_motion'` deg/year The proper motion for the declination
`'mag'` N/A The apparent magnitude of the star according to the star catalogue
`'ra_sigma'` deg The formal uncertainty in the right ascension according to the catalogue
`'dec_sigma'` deg The formal uncertainty in the declination according to the catalogue
`'distance_sigma'` km The formal uncertainty in the distance according to the catalogue
(converted from parallax). This has a default value of
1.9949433041226756e+19 km for stars with no parallax information.
`'ra_pm_sigma'` deg/year The formal uncertainty in the right ascension proper motion according to
the catalogue
`'dec_pm_sigma'` deg/year The formal uncertainty in the declination proper motion according to the
catalogue
===================== ======== =================================================================================
Each row of the data frame represents a star record. Each row of the DataFrame matches to the corresponding
column in the :attr:`matched_catalogue_unit_vectors_inertial`, :attr:`matched_catalogue_unit_vectors_camera`,
:attr:`matched_catalogue_image_points`, and :attr:`matched_extracted_image_points` arrays, and the
corresponding row in the :attr:`matched_image_illums` array.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
If no stars have been successfully identified for an image then the corresponding index of the list will be set
to ``None``.
"""
return self._matched_catalogue_star_records
@property
def matched_catalogue_image_points(self) -> List[Optional[np.ndarray]]:
"""
This list contains the projections of the star records queried from the star catalogue that were matched
with an image point of interest for each image in :attr:`camera` from the most recent star identification
attempt (this gets overwritten when a new attempt is made at identifying the stars in the image, and also when
a new attitude is solved for the image).
Each list element is a 2xn array of image points with the x locations (columns) down the first row and the y
locations (rows) down the second row. Each column of the array matches to the corresponding column in the
:attr:`matched_catalogue_unit_vectors_inertial`, :attr:`matched_catalogue_unit_vectors_camera`,
and :attr:`matched_extracted_image_points` arrays, and the corresponding row in the :attr:`matched_image_illums`
and :attr:`matched_catalogue_star_records` arrays.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
If no stars have been successfully identified for an image then the corresponding index of the list will be set
to ``None``.
"""
return self._matched_catalogue_image_points
@property
def matched_catalogue_unit_vectors_inertial(self) -> List[Optional[np.ndarray]]:
"""
This list contains the inertial unit vectors of the star records queried from the star catalogue that were
matched with an image point of interest for each image in :attr:`camera` from the most recent star
identification attempt (this gets overwritten when a new attempt is made at identifying the stars in the
image). The unit vectors are the directions to the stars after correcting for
proper motion, stellar aberration, and parallax.
Each list element is a 3xn array of unit vectors with each unit vector stored as a column.
Each column of the array matches to the corresponding column in the
:attr:`matched_catalogue_unit_vectors_camera`, :attr:`matched_catalogue_image_points`,
and :attr:`matched_extracted_image_points` arrays, and the corresponding rows in the
:attr:`matched_image_illums` and :attr:`matched_catalogue_star_records` arrays.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
If no stars have been successfully identified for an image then the corresponding index of the list will be set
to ``None``.
"""
return self._matched_catalogue_unit_vectors_inertial
@property
def matched_catalogue_unit_vectors_camera(self) -> List[Optional[np.ndarray]]:
"""
This list contains the camera frame unit vectors of the star records queried from the star catalogue that were
matched with an image point of interest for each image in :attr:`camera` from the most recent star
identification attempt (this gets overwritten when a new attempt is made at identifying the stars in the
image). The unit vectors are the directions to the stars after correcting for
proper motion, stellar aberration, and parallax.
Each list element is a 3xn array of unit vectors with each unit vector stored as a column.
Each column of the array matches to the corresponding column in the
:attr:`matched_catalogue_unit_vectors_inertial`, :attr:`matched_catalogue_image_points`,
and :attr:`matched_extracted_image_points` arrays, and the corresponding rows in the
:attr:`matched_image_illums` and :attr:`matched_catalogue_star_records` arrays.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
If no stars have been successfully identified for an image then the corresponding index of the list will be set
to ``None``.
"""
return self._matched_catalogue_unit_vectors_camera
@property
def matched_weights_inertial(self) -> List[Optional[np.ndarray]]:
"""
This list contains the uncertainty of all of the star records queried from the star catalogue that were
matched with an image point of interest for each image in :attr:`camera` from the most recent query to star
catalogue (this gets overwritten when the star catalogue is re-queried for the image). The uncertainties are
based on the formal uncertainty of the star location according to the star catalogue and are for the unit
vector representation.
A single value is provided for each unit vector representing the sum of the squares of the uncertainty (the
trace of the covariance matrix). Each element of the array matches to the corresponding column in the
:attr:`matched_catalogue_image_points` array and the corresponding row in the
:attr:`matched_catalogue_star_records` DataFrame.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
"""
return self._matched_weights_inertial
@property
def matched_weights_picture(self) -> List[Optional[np.ndarray]]:
"""
This list contains the uncertainty of all of the star records queried from the star catalogue that were
matched with with an image point of interest for each image in :attr:`camera` from the most recent query to
star catalogue (this gets overwritten when the star catalogue is re-queried for the image). The
uncertainties are based on the formal uncertainty of the star location according to the star catalogue and
are for the pixel representation of the star location.
A single value is provided for each unit vector representing the sum of the squares of the uncertainty (the
trace of the covariance matrix). Each element of the array matches to the corresponding column in the
:attr:`matched_catalogue_image_points` array and the corresponding row in the
:attr:`matched_catalogue_star_records` DataFrame.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
"""
return self._matched_weights_picture
@property
def matched_extracted_image_points(self) -> List[Optional[np.ndarray]]:
"""
This list contains the image points of interest that are matched with a catalogue star for each image in
:attr:`camera` from the most recent star identification attempt (this gets overwritten when a new attempt is
made at identifying the stars in the image).
Each list element is a 2xn array of image points with the x locations (columns) down the first row and the y
locations (rows) down the second row. Each column of the array matches to the corresponding column in the
:attr:`matched_catalogue_unit_vectors_inertial`, :attr:`matched_catalogue_unit_vectors_camera`,
and :attr:`matched_catalogue_image_points` arrays, and the corresponding row in the :attr:`matched_image_illums`
and :attr:`matched_catalogue_star_records` arrays.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
If no stars have been successfully identified for an image then the corresponding index of the list will be set
to ``None``.
"""
return self._matched_extracted_image_points
@property
def matched_image_illums(self) -> List[Optional[np.ndarray]]:
"""
This list contains the DN values of the pixel containing the image points of interest that are matched with a
catalogue star for each image in :attr:`camera` from the most recent star identification attempt
(this gets overwritten when a new attempt is made at identifying the stars in the image).
Each list element is a length n array of image DN values. Each row of the array matches to the corresponding
column in the :attr:`matched_catalogue_unit_vectors_inertial`, :attr:`matched_catalogue_unit_vectors_camera`,
:attr:`matched_catalogue_image_points`, and :attr:`matched_extracted_image_points` arrays, and the
corresponding row in the :attr:`matched_catalogue_star_records` DataFrame.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
If no stars have been successfully identified for an image then the corresponding index of the list will be set
to ``None``.
"""
return self._matched_image_illums
@property
def matched_stats(self) -> List[Optional[np.ndarray]]:
"""
This list contains the connected component stats of each object that is matched with a
catalogue star for each image in :attr:`camera` from the most recent star identification attempt
(this gets overwritten when a new attempt is made at identifying the stars in the image).
Each list element is a length n array of opencv connect components statistics. Each row of the array matches to
the corresponding column in the :attr:`matched_catalogue_unit_vectors_inertial`,
:attr:`matched_catalogue_unit_vectors_camera`, :attr:`matched_catalogue_image_points`, and
:attr:`matched_extracted_image_points` arrays, and the corresponding row in the
:attr:`matched_catalogue_star_records` DataFrame.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
If no stars have been successfully identified for an image then the corresponding index of the list will be set
to ``None``.
"""
return self._matched_ip_stats
@property
def matched_snrs(self) -> List[Optional[np.ndarray]]:
"""
This list contains the peak snr value of each object that is matched with a
catalogue star for each image in :attr:`camera` from the most recent star identification attempt
(this gets overwritten when a new attempt is made at identifying the stars in the image).
Each list element is a length n array of opencv connect components statistics. Each row of the array matches to
the corresponding column in the :attr:`matched_catalogue_unit_vectors_inertial`,
:attr:`matched_catalogue_unit_vectors_camera`, :attr:`matched_catalogue_image_points`, and
:attr:`matched_extracted_image_points` arrays, and the corresponding row in the
:attr:`matched_catalogue_star_records` DataFrame.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
If no stars have been successfully identified for an image then the corresponding index of the list will be set
to ``None``.
"""
return self._matched_ip_snrs
@property
def matched_psfs(self) -> List[Optional[np.ndarray]]:
"""
This list contains the fit PSF objects for each matched image point from the most recent star identification
attempt (this gets overwritten when a new attempt is made at identifying the stars in the image).
This list is only updated if :attr:`.ImageProcessing.save_psf` is set to ``True``.
Each list element is a shape n array of PSF objects. Each element of the array matches to the corresponding
column in the :attr:`matched_catalogue_unit_vectors_inertial`, :attr:`matched_catalogue_unit_vectors_camera`,
:attr:`matched_catalogue_image_points`, and :attr:`matched_extracted_image_points` arrays, and the corresponding
row in the :attr:`matched_catalogue_star_records` DataFrame. The PSF object in each element is the same type as
:attr:`.ImageProcessing.centroiding`. See the documentation for the :mod:`.point_spread_functions` package for
details.
This list should always be the same length as the :attr:`.Camera.images` list and each element of this list
corresponds to the image in the same element in the :attr:`.Camera.images` list.
If no stars have been successfully identified for an image then the corresponding index of the list will be set
to ``None``.
"""
return self._matched_psfs
# ____________________________________________________ METHODS ________________________________________________
[docs] def reset_star_id(self):
"""
This method replaces the existing star ID instance with a new instance
using the initial ``star_id_kwargs`` argument passed to the constructor.
A new instance of the object is created, therefore there is no backwards reference whatsoever to the state
before a call to this method.
"""
if self._initial_star_id_kwargs is not None:
self._star_id = StarID(self._camera.model, **self._initial_star_id_kwargs)
else:
self._star_id = StarID(self._camera.model)
[docs] def update_star_id(self, star_id_update: Optional[dict] = None):
"""
This method updates the attributes of the :attr:`star_id` attribute.
See the :class:`.StellarOpNav` class for accepted attribute values.
If a supplied attribute is not found in the :attr:`star_id` attribute then this will print a warning
and ignore the attribute. Any attributes that are not supplied are left alone.
:param star_id_update: A dictionary of attribute->value pairs to update the attributes of the
:attr:`star_id` attribute with.
"""
if star_id_update is not None:
for key, val in star_id_update.items():
if hasattr(self._star_id, key):
setattr(self._star_id, key, val)
else:
warnings.warn("The attribute {0} was not found.\n Cannot update StarID instance".format(key))
[docs] def reset_image_processing(self):
"""
This method replaces the existing image processing instance with a new instance
using the initial ``image_processing_kwargs`` argument passed to the constructor.
A new instance of the object is created, therefore there is no backwards reference whatsoever to the state
before a call to this method.
This method also updates all of the elements of :attr:`process_stars` to ``True``.
"""
super(StellarOpNav, self).reset_image_processing()
for ind in range(len(self.process_stars)):
self.process_stars[ind] = True
[docs] def update_image_processing(self, image_processing_update: Optional[dict] = None):
"""
This method updates the attributes of the :attr:`image_processing` attribute.
See the :class:`.ImageProcessing` class for accepted attribute values.
If a supplied attribute is not found in the :attr:`image_processing` attribute then this will print a warning
and ignore the attribute. Any attributes that are not supplied are left alone.
This method also updates all of the elements of :attr:`process_stars` to ``True``.
:param image_processing_update: A dictionary of attribute->value pairs to update the attributes of the
:attr:`image_processing` attribute with.
"""
super().update_image_processing(image_processing_update)
if image_processing_update is not None:
for ind in range(len(self.process_stars)):
self.process_stars[ind] = True
[docs] def reset_attitude_estimator(self):
"""
This method replaces the existing attitude estimator instance with a new instance
using the initial ``attitude_estimator_kwargs`` argument passed to the constructor.
A new instance of the object is created, therefore there is no backwards reference whatsoever to the state
before a call to this method.
"""
if self._initial_attitude_est_kwargs is not None:
self._attitude_est = self._initial_attitude_est(**self._initial_attitude_est_kwargs)
else:
self._attitude_est = self._initial_attitude_est()
[docs] def update_attitude_estimator(self, attitude_estimator_update: dict):
"""
This method updates the attributes of the :attr:`attitude_estimator` attribute.
See the :mod:`.stellar_opnav.estimators` documentation for accepted attribute values.
If a supplied attribute is not found in the :attr:`attitude_estimator` attribute then this will print a warning
and ignore the attribute. Any attributes that are not supplied are left alone.
:param attitude_estimator_update: A dictionary of attribute->value pairs to update the
:attr:`attitude_estimator` attribute with
"""
if attitude_estimator_update is not None:
for key, val in attitude_estimator_update.items():
if hasattr(self._attitude_est, key):
setattr(self._attitude_est, key, val)
else:
warnings.warn("The attribute {0} was not found.\n"
"Cannot update attitude estimation instance".format(key))
# noinspection PyTypeChecker
[docs] def id_stars(self):
"""
This method identifies stars for each image turned on in the :attr:`camera` attribute.
The steps for performing the star identification are as follows:
#. Potential star locations are extracted from each image using the
:meth:`.ImageProcessing.locate_subpixel_poi_in_roi` method for each image (if this step has not been already
performed). The subpixel points of interest from this step are then passed to the :class:`.StarID` object
as the :attr:`.StarID.extracted_image_points` attribute.
#. The :class:`.StarID` object is further updated with the *a priori* attitude, the camera position and
velocity, and the observation date for each image.
#. The stars are identified using the routines in the :meth:`.StarID.id_stars` method.
#. The results from the star identification are stored into the various properties of this
class for each image.
For more information about the star identification process see the :class:`.StarID` documentation
"""
image_count_index = 1
number_of_images = sum(self._camera.image_mask)
# walk through each turned on image
for ind, image in self._camera:
start = time.time()
# if we need to reprocess the stars then send them through the image processing pipeline
if self.process_stars[ind]:
# extract subpixel points of interest as potential star locations
if self._image_processing.save_psf:
if self._image_processing.return_stats:
(self._ip_extracted_image_points[ind], self._ip_image_illums[ind], self._ip_psfs[ind],
self._ip_stats[ind], self._ip_snrs[ind]) = \
self._image_processing.locate_subpixel_poi_in_roi(image)
else:
(self._ip_extracted_image_points[ind], self._ip_image_illums[ind],
self._ip_psfs[ind]) = self._image_processing.locate_subpixel_poi_in_roi(image)
else:
if self._image_processing.return_stats:
(self._ip_extracted_image_points[ind], self._ip_image_illums[ind], self._ip_stats[ind],
self._ip_snrs[ind]) = self._image_processing.locate_subpixel_poi_in_roi(image)
else:
self._ip_extracted_image_points[ind], self._ip_image_illums[ind] = \
self._image_processing.locate_subpixel_poi_in_roi(image)
# discard points inside of extended bodies)
if self.scene is not None:
if self._ip_extracted_image_points[ind] is not None:
self.scene.update(image)
for target in self.scene.target_objs:
# check that we're close to the FOV
boresight_angle = np.arccos(target.position.ravel()[-1] /
np.linalg.norm(target.position)) * 180 / np.pi
if boresight_angle <= 1.25 * self.camera.model.field_of_view:
# get the limbs
limbs = target.shape.find_limbs(target.position/np.linalg.norm(target.position),
_SCAN_VECTORS)
limbs_pix = self.camera.model.project_onto_image(limbs, image=ind,
temperature=image.temperature)
# make a path for the limb
path = Path(limbs_pix.T, closed=True)
# check if our points are inside of the limb or not
interior = path.contains_points(self._ip_extracted_image_points[ind].T)
# throw out interior points
self._ip_extracted_image_points[ind] = self._ip_extracted_image_points[ind][:,
~interior]
self._ip_image_illums[ind] = self._ip_image_illums[ind][~interior]
if self._image_processing.return_stats:
self._ip_stats[ind] = np.array(self._ip_stats[ind])[~interior]
self._ip_snrs[ind] = np.array(self._ip_snrs[ind])[~interior]
if self._image_processing.save_psf:
self._ip_psfs[ind] = self._ip_psfs[ind][~interior]
# set a flag saying that we don't need to pass this image through the image processing again
self.process_stars[ind] = False
# supply the star id class with the proper information
self._star_id.extracted_image_points = self._ip_extracted_image_points[ind].copy()
self._star_id.a_priori_rotation_cat2camera = image.rotation_inertial_to_camera
self._star_id.camera_velocity = image.velocity.reshape(3, 1)
self._star_id.camera_position = image.position.reshape(3, 1)
# identify the stars
keep_stars, keep_inliers = self._star_id.id_stars(epoch=image.observation_date,
compute_weights=self.use_weights,
temperature=image.temperature,
image_number=ind)
# store the required information
self._queried_catalogue_star_records[ind] = self._star_id.queried_catalogue_star_records
self._queried_catalogue_image_points[ind] = self._star_id.queried_catalogue_image_points
self._queried_catalogue_unit_vectors[ind] = self._star_id.queried_catalogue_unit_vectors
self._unmatched_catalogue_star_records[ind] = self._star_id.unmatched_catalogue_star_records
self._unmatched_catalogue_image_points[ind] = self._star_id.unmatched_catalogue_image_points
self._unmatched_catalogue_unit_vectors[ind] = self._star_id.unmatched_catalogue_unit_vectors
self._unmatched_extracted_image_points[ind] = self._star_id.unmatched_extracted_image_points
if self.use_weights:
self._queried_weights_inertial[ind] = self._star_id.queried_weights_inertial
self._queried_weights_picture[ind] = self._star_id.queried_weights_picture
self._unmatched_weights_inertial[ind] = self._star_id.unmatched_weights_inertial
self._unmatched_weights_picture[ind] = self._star_id.unmatched_weights_picture
# if we didn't identify any stars then set the matched variables to None
if keep_inliers is None:
self._matched_catalogue_star_records[ind] = None
self._matched_catalogue_image_points[ind] = None
self._matched_catalogue_unit_vectors_inertial[ind] = None
self._matched_catalogue_unit_vectors_camera[ind] = None
self._matched_extracted_image_points[ind] = None
self._matched_image_illums[ind] = None
self._matched_psfs[ind] = None
self._unmatched_psfs[ind] = None
self._matched_ip_stats[ind] = None
self._matched_ip_snrs[ind] = None
self._unmatched_ip_stats[ind] = self._ip_stats[ind]
self._unmatched_ip_snrs[ind] = self._ip_snrs[ind]
if self.use_weights:
self._matched_weights_inertial[ind] = None
self._matched_weights_picture[ind] = None
else:
self._matched_catalogue_star_records[ind] = self._star_id.matched_catalogue_star_records
self._matched_catalogue_image_points[ind] = self._star_id.matched_catalogue_image_points
self._matched_catalogue_unit_vectors_inertial[ind] = self._star_id.matched_catalogue_unit_vectors
self._matched_catalogue_unit_vectors_camera[ind] = np.matmul(
image.rotation_inertial_to_camera.matrix, self._matched_catalogue_unit_vectors_inertial[ind]
)
self._matched_extracted_image_points[ind] = self._star_id.matched_extracted_image_points
self._matched_image_illums[ind] = self._ip_image_illums[ind][keep_stars][keep_inliers].copy()
camera_inds = np.arange(self._ip_extracted_image_points[ind].shape[1])
unmatched_centroid_inds = list({*camera_inds} - {*camera_inds[keep_stars][keep_inliers]})
if self._image_processing.save_psf:
self._matched_psfs[ind] = self._ip_psfs[ind][keep_stars][keep_inliers].copy()
self._unmatched_psfs[ind] = self._ip_psfs[ind][unmatched_centroid_inds].copy()
if self.use_weights:
self._matched_weights_inertial[ind] = self._star_id.matched_weights_inertial
self._matched_weights_picture[ind] = self._star_id.matched_weights_picture
if self._image_processing.return_stats:
stat_array = np.array(self._ip_stats[ind])
snr_array = np.array(self._ip_snrs[ind])
self._matched_ip_stats[ind] = stat_array[keep_stars][keep_inliers]
self._matched_ip_snrs[ind] = snr_array[keep_stars][keep_inliers]
self._unmatched_ip_stats[ind] = stat_array[unmatched_centroid_inds]
self._unmatched_ip_snrs[ind] = snr_array[unmatched_centroid_inds]
print('image {} of {} done in {:.4g} seconds'.format(image_count_index, number_of_images,
time.time() - start), flush=True)
image_count_index += 1
[docs] def reproject_stars(self):
"""
This method updates the unit vectors and reprojects the stars using updated camera and attitude models.
The following are updated:
* :attr:`matched_catalogue_unit_vectors_camera`
* :attr:`matched_catalogue_image_points`
* :attr:`queried_catalogue_image_points`
* :attr:`unmatched_catalogue_image_points`
"""
for ind, image in self.camera:
if self._matched_catalogue_unit_vectors_inertial[ind] is None:
warnings.warn("No stars identified for image {0}.\n Cannot perform update".format(ind))
else:
self._matched_catalogue_unit_vectors_camera[ind] = np.matmul(
image.rotation_inertial_to_camera.matrix, self._matched_catalogue_unit_vectors_inertial[ind]
)
self._matched_catalogue_image_points[ind] = self._camera.model.project_onto_image(
self._matched_catalogue_unit_vectors_camera[ind], image=ind, temperature=image.temperature
)
self._queried_catalogue_image_points[ind] = self.camera.model.project_onto_image(
np.matmul(image.rotation_inertial_to_camera.matrix, self._queried_catalogue_unit_vectors[ind]),
image=ind, temperature=image.temperature
)
self._unmatched_catalogue_image_points[ind] = self.camera.model.project_onto_image(
np.matmul(image.rotation_inertial_to_camera.matrix, self._unmatched_catalogue_unit_vectors[ind]),
image=ind, temperature=image.temperature
)
[docs] def estimate_attitude(self):
"""
This method estimates an updated attitude for each image based on the identified stars in the image.
For each turned on image in the :attr:`camera` attribute this method provides the attitude estimation routines
with the :attr:`matched_catalogue_unit_vectors_inertial`, and the camera frame unit vectors corresponding to the
:attr:`matched_extracted_image_points`. The :meth:`~.AttitudeEstimator.estimate()` method of the attitude
estimation class is then called, and the resulting solved for rotation is stored as the
:attr:`.rotation_inertial_to_camera` attribute for each image. Finally, the updated attitude information is
used to update the following:
* :attr:`matched_catalogue_unit_vectors_camera`
* :attr:`matched_catalogue_image_points`
* :attr:`queried_catalogue_image_points`
* :attr:`unmatched_catalogue_image_points`
The unit vectors in the camera frame for the matched image points are determined using the
:meth:`~.CameraModel.pixels_to_unit` method of the camera model.
For a more thorough description of the attitude estimation routine see the
:mod:`.stellar_opnav.estimators` documentation.
When attitude estimation is successful for an image (that is it was attempted and didn't error), then the
:attr:`.OpNavImage.pointing_post_fit` flag is updated to ``True``. When attitude estimation isn't successful
for an image (it wasn't attempted due to a lack of stars or it failed) the the
:attr:`.OpNavImage.pointing_post_fit` is set to ``False``
.. warning::
This method overwrites the attitude information in the :attr:`.rotation_inertial_to_camera` attribute and
does not save old information anywhere. If you want this information saved be sure to store it yourself
for each image.
"""
for ind, image in self._camera:
if self._matched_catalogue_unit_vectors_inertial[ind] is None:
warnings.warn("No stars identified for image {0}.\n Cannot perform attitude estimation".format(ind))
image.pointing_post_fit = False
elif self._matched_catalogue_unit_vectors_inertial[ind].reshape(3, -1).shape[-1] < 3:
warnings.warn("Not enough stars identified for image {0}.\n "
"Cannot perform attitude estimation".format(ind))
image.pointing_post_fit = False
else:
self._attitude_est.base_frame_directions = self._matched_catalogue_unit_vectors_inertial[ind]
self._attitude_est.target_frame_directions = self._camera.model.pixels_to_unit(
self._matched_extracted_image_points[ind], image=ind, temperature=image.temperature
)
if self.use_weights:
# noinspection PyTypeChecker
self._attitude_est.weights = 1 / self._matched_weights_inertial[ind]
self._attitude_est.weighted_estimation = True
else:
self._attitude_est.weighted_estimation = False
self._attitude_est.estimate()
image.rotation_inertial_to_camera = self._attitude_est.rotation
image.pointing_post_fit = True
self.reproject_stars()
# noinspection PyTypeChecker
[docs] def sid_summary(self, width: int = 15):
"""
This method generate a summary of the star identification results overall and for each image and prints it to
stdout.
The summary is a fixed width table with the following columns
date, queried, q inside, q outside, poi, matched, m inside, m outside, unmatched q, u q inside, u
q outside, u poi. The meaning of each column is specified in the following table:
=============== ================================================================================================
column description
=============== ================================================================================================
``date`` The UTC observation date for the image this line corresponds to
``queried`` The total number of stars that were queried from the catalogue
``q inside`` The number of queried catalogue stars that were within the FOV of the camera
``q outside`` The number of queried catalogue stars that were outside the FOV of the camera
``poi`` The total number of points of interest that were identified in the image by the image processing
algorithms
``matched`` The total number of matched (identified) stars
``m inside`` The number of matched stars that fall within the field of view of the camera using the current
attitude
``m outside`` The number of matched stars that fall outside of the field of view of the camera using the
current attitude
``unmatched q`` The total number of unmatched catalogue locations
``u q inside`` The number of unmatched catalogue locations that fall inside of the field of view of the camera
using the current attitude
``u q outside`` The number of unmatched catalogue locations that fall outside of the field of view of the camera
using the current attitude
``u poi`` The number of points of interested that where identified by the image processing routines in the
image that were not matched with a catalogue star.
=============== ================================================================================================
This method also prints out a total row which sums each column using the same format.
There is one optional input for this method, ``width`` which specifies the width for each column of the table.
You should not set this below 15 due to the length of the UTC date strings
:param width: The width of the fixed width table columns in characters
"""
# initialize the sum variables
total_queried = 0
total_inside_fov = 0
total_outside_fov = 0
total_extracted_image_points = 0
total_matched = 0
total_matched_inside_fov = 0
total_matched_outside_fov = 0
total_catalogue_unmatched = 0
total_catalogue_unmatched_inside_fov = 0
total_catalogue_unmatched_outside_fov = 0
total_image_poi_unmatched = 0
# form and print the header
header = (('{:^' + str(width) + '} ') * 12).format('date', 'queried', 'q inside', 'q outside',
'poi', 'matched', 'm inside', 'm outside',
'unmatched q', 'u q inside', 'u q outside',
'u poi')
print(header)
# form the template for the data rows
template = '{:^' + str(width) + '} ' + ('{:^' + str(width) + 'd} ') * 11
# loop through each turned on image
for ind, image in self.camera:
# get the number of queried stars inside and outside the field of view)
if self._queried_catalogue_star_records[ind] is None:
number_queried = 0
number_inside_fov = 0
number_outside_fov = 0
else:
number_queried = self._queried_catalogue_star_records[ind].shape[0]
inside_fov = ((self._queried_catalogue_image_points[ind] >= 0) &
(self._queried_catalogue_image_points[ind] <= [[self.camera.model.n_cols],
[self.camera.model.n_rows]])).all(axis=0)
number_inside_fov = inside_fov.sum()
number_outside_fov = (~inside_fov).sum()
# get the number of image points
if self._ip_extracted_image_points[ind] is None:
number_extracted_image_points = 0
else:
number_extracted_image_points = self._ip_extracted_image_points[ind].shape[1]
# get the number of matched points
if self._matched_extracted_image_points[ind] is not None:
number_matched = self._matched_extracted_image_points[ind].shape[1]
else:
number_matched = 0
# get the number of unmatched stars inside and outside the field of view
if self._unmatched_catalogue_star_records[ind] is None:
number_unmatched_catalogue = 0
number_unmatched_catalogue_inside_fov = 0
number_unmatched_catalogue_outside_fov = 0
else:
number_unmatched_catalogue = self._unmatched_catalogue_star_records[ind].shape[0]
unmatched_catalogue_inside_fov = (
(self._unmatched_catalogue_image_points[ind] >= 0) &
(self._unmatched_catalogue_image_points[ind] <= [[self.camera.model.n_cols],
[self.camera.model.n_rows]])
).all(axis=0)
number_unmatched_catalogue_inside_fov = unmatched_catalogue_inside_fov.sum()
number_unmatched_catalogue_outside_fov = (~unmatched_catalogue_inside_fov).sum()
# get the number of matched stars inside and outside the field of view
if self._matched_catalogue_image_points[ind] is None:
number_matched_catalogue_inside_fov = 0
number_matched_catalogue_outside_fov = 0
else:
matched_catalogue_inside_fov = (
(self._matched_catalogue_image_points[ind] >= 0) &
(self._matched_catalogue_image_points[ind] <= [[self.camera.model.n_cols],
[self.camera.model.n_rows]])
).all(axis=0)
number_matched_catalogue_inside_fov = matched_catalogue_inside_fov.sum()
number_matched_catalogue_outside_fov = (~matched_catalogue_inside_fov).sum()
# get the number of unmatched points
if self._unmatched_extracted_image_points[ind] is None:
number_unmatched_poi = 0
else:
number_unmatched_poi = self._unmatched_extracted_image_points[ind].shape[1]
# print the row for the current image
print(template.format(image.observation_date.strftime('%m/%d/%y %H:%M'), number_queried, number_inside_fov,
number_outside_fov,
number_extracted_image_points, number_matched, number_matched_catalogue_inside_fov,
number_matched_catalogue_outside_fov, number_unmatched_catalogue,
number_unmatched_catalogue_inside_fov, number_unmatched_catalogue_outside_fov,
number_unmatched_poi))
# update the totals
total_queried += number_queried
total_inside_fov += number_inside_fov
total_outside_fov += number_outside_fov
total_extracted_image_points += number_extracted_image_points
total_matched += number_matched
total_matched_inside_fov += number_matched_catalogue_inside_fov
total_matched_outside_fov += number_matched_catalogue_outside_fov
total_catalogue_unmatched += number_unmatched_catalogue
total_catalogue_unmatched_inside_fov += number_unmatched_catalogue_inside_fov
total_catalogue_unmatched_outside_fov += number_unmatched_catalogue_outside_fov
total_image_poi_unmatched += number_unmatched_poi
# print the total row
print('\n\n')
print(template.format('total', total_queried, total_inside_fov, total_outside_fov,
total_extracted_image_points, total_matched, total_matched_inside_fov,
total_matched_outside_fov, total_catalogue_unmatched,
total_catalogue_unmatched_inside_fov, total_catalogue_unmatched_outside_fov,
total_image_poi_unmatched))
# noinspection PyTypeChecker
[docs] def matched_star_residuals(self, image_num: int) -> NONEARRAY:
"""
This method calculates the residuals for matched stars for a given image number in the :attr:`camera` attribute.
:param image_num: The index of the image in the :attr:`camera` attribute.
"""
if self._matched_catalogue_image_points[image_num] is None:
warnings.warn("No stars identified for image {0}.\n Cannot calculate residuals".format(image_num))
else:
cat_locs = self._matched_catalogue_image_points[image_num] # type: np.ndarray
return cat_locs - self._matched_extracted_image_points[image_num]
# noinspection PyUnresolvedReferences
[docs] def remove_matched_stars(self, image_num: int, star_indices: ARRAY_LIKE):
"""
This method removes specified matched stars for a given image number in the :attr:`camera` attribute.
This method updates the following:
* :attr:`matched_catalogue_star_records`
* :attr:`matched_catalogue_image_points`
* :attr:`matched_catalogue_unit_vectors_inertial`
* :attr:`matched_catalogue_unit_vectors_camera`
* :attr:`matched_extracted_image_points`
* :attr:`matched_image_illums`
* :attr:`matched_stats`
* :attr:`matched_snrs`
* :attr:`matched_psfs`
* :attr:`matched_weights_inertial`
* :attr:`matched_weights_picture`
:param image_num: The index of the image in the :attr:`camera` attribute.
:param star_indices: The list of indices of matched stars to be removed from the specified image in the
:attr:`camera` attribute.
"""
if self._matched_catalogue_image_points[image_num] is None:
warnings.warn("No stars identified for image {0}.\n Cannot remove matched stars".format(image_num))
else:
indices_desired = np.ones(self._matched_catalogue_image_points[image_num].shape[1], dtype=bool)
indices_desired[star_indices] = False
self._matched_catalogue_image_points[image_num] = \
self._matched_catalogue_image_points[image_num][:, indices_desired]
self._matched_catalogue_unit_vectors_camera[image_num] = \
self._matched_catalogue_unit_vectors_camera[image_num][:, indices_desired]
self._matched_catalogue_unit_vectors_inertial[image_num] = \
self._matched_catalogue_unit_vectors_inertial[image_num][:, indices_desired]
self._matched_catalogue_star_records[image_num] = \
self._matched_catalogue_star_records[image_num].loc[indices_desired]
self._matched_extracted_image_points[image_num] = \
self._matched_extracted_image_points[image_num][:, indices_desired]
self._matched_image_illums[image_num] = self._matched_image_illums[image_num][indices_desired]
if self._image_processing.return_stats:
self._matched_ip_snrs[image_num] = self._matched_ip_snrs[image_num][indices_desired]
self._matched_ip_stats[image_num] = self._matched_ip_stats[image_num][indices_desired]
if self._image_processing.save_psf:
self._matched_psfs[image_num] = self._matched_psfs[image_num][indices_desired]
if self._matched_weights_inertial[image_num] is not None:
self._matched_weights_inertial[image_num] = self._matched_weights_inertial[image_num][indices_desired]
if self._matched_weights_picture[image_num] is not None:
self._matched_weights_picture[image_num] = self._matched_weights_picture[image_num][indices_desired]
@staticmethod
def _get_outliers(data: np.ndarray, sigma_cutoff: Real, hard_threshold: Real) -> np.ndarray:
"""
Get possible outliers using both a Median Absolute Deviation (MAD) check and a hard threshold check.
:param data: The data to check for outliers
:param sigma_cutoff: The sigma value for the MAD check
:param hard_threshold: The hard threshold to use on the norm of the residuals
:return: A boolean array of possible outliers
"""
x_outliers = get_outliers(data[0], sigma_cutoff=sigma_cutoff)
y_outliers = get_outliers(data[1], sigma_cutoff=sigma_cutoff)
total = np.linalg.norm(data, axis=0)
total_outliers = get_outliers(total, sigma_cutoff=sigma_cutoff)
return x_outliers | y_outliers | total_outliers | (total >= hard_threshold)
def _review_image_outliers(self, image_num: int, sigma_cutoff: Real, hard_threshold: Real) -> List[int]:
"""
This method identifies potential outliers for the specified image and shows the possible outliers to the user
for manual review.
Any outliers the user chooses to remove will be returns as a list of the indices of the removed outliers
:param image_num: image number
:return: The indices to remove within image
"""
from .visualizer import show_outlier
indices_to_remove = []
residuals = self.matched_star_residuals(image_num)
outliers = self._get_outliers(residuals, sigma_cutoff, hard_threshold)
# Plot stars with residuals above threshold
for i in range(len(residuals[0])):
if outliers[i]:
shown_outlier = show_outlier(self, i, image_num, residuals)
if shown_outlier.removed:
indices_to_remove.append(i)
return indices_to_remove
[docs] def review_outliers(self, sigma_cutoff: Real = 3, hard_threshold: Real = 5):
"""
This method review outliers for all images within a camera object and allows for them to be removed through a
simple gui.
Possible outliers are identified either using :func:`.get_outliers` or through a hard threshold. The
``sigma_cutoff`` is passed to the same argument in the :func:`.get_outliers`. The ``hard_threshold`` is used to
identify possible outliers by simply thresholding the total pixel error (sqrt(x**2+y**2)). All possible
outliers from either method are shown to the user for possible manual removal using the :func:`.show_outlier`
function.
For automated outlier removal see the :meth:`remove_outliers` method.
:param sigma_cutoff: the sigma threshold to use in the Median Absolute Deviation check
:param hard_threshold: The hard threshold to use in units of pixels
"""
for image_num, _ in self.camera:
indices_to_remove = self._review_image_outliers(image_num, sigma_cutoff, hard_threshold)
# Remove any stars that were identified for removal in outlier_check
if indices_to_remove:
self.remove_matched_stars(image_num, indices_to_remove)
[docs] def remove_outliers(self, sigma_cutoff: Real = 3, hard_threshold: Real = 5):
"""
This method removes outliers for all images within a camera object.
Outliers are identified using :func:`.get_outliers` and through a hard threshold. The
``sigma_cutoff`` is passed to the same argument in the :func:`.get_outliers`. The ``hard_threshold`` is used to
identify possible outliers by simply thresholding the total pixel error (sqrt(x**2+y**2)). All possible
outliers from either method are removed.
:param sigma_cutoff: the sigma threshold to use in the Median Absolute Deviation check
:param hard_threshold: The hard threshold to use in units of pixels
"""
for image_num, _ in self.camera:
residuals = self.matched_star_residuals(image_num)
if residuals.size:
outliers = self._get_outliers(residuals, sigma_cutoff, hard_threshold)
if outliers.any():
indices = np.argwhere(outliers).ravel()
self.remove_matched_stars(image_num, indices)
def clear_results(self):
"""
This clears all results arrays and resets them to their original values.
Note that you cannot retrieve data anymore once this is called unless you have saved it yourself, therefore
caution is urged.
"""
number_images = len(self._camera.images)
self._ip_extracted_image_points = [None] * number_images
self._ip_image_illums = [None] * number_images
self._ip_psfs = [None] * number_images
self._ip_stats = [None] * number_images
self._ip_snrs = [None] * number_images
self.process_stars = [True] * number_images
self._queried_catalogue_star_records = [None] * number_images
self._queried_catalogue_image_points = [None] * number_images
self._queried_catalogue_unit_vectors = [None] * number_images
self._queried_weights_inertial = [None] * number_images
self._queried_weights_picture = [None] * number_images
self._unmatched_catalogue_star_records = [None] * number_images
self._unmatched_catalogue_image_points = [None] * number_images
self._unmatched_catalogue_unit_vectors = [None] * number_images
self._unmatched_extracted_image_points = [None] * number_images
self._unmatched_psfs = [None] * number_images
self._unmatched_image_illums = [None] * number_images
self._unmatched_weights_inertial = [None] * number_images
self._unmatched_weights_picture = [None] * number_images
self._unmatched_ip_stats = [None] * number_images
self._unmatched_ip_snrs = [None] * number_images
self._matched_catalogue_star_records = [None] * number_images
self._matched_catalogue_image_points = [None] * number_images
self._matched_catalogue_unit_vectors_inertial = [None] * number_images
self._matched_extracted_image_points = [None] * number_images
self._matched_weights_inertial = [None] * number_images
self._matched_weights_picture = [None] * number_images
self._matched_psfs = [None] * number_images
self._matched_image_illums = [None] * number_images
self._matched_catalogue_unit_vectors_camera = [None] * number_images
self._matched_ip_stats = [None] * number_images
self._matched_ip_snrs = [None] * number_images