gtsam-develop 4.3a0.dev202510091957__cp312-cp312-macosx_11_0_arm64.whl → 4.3a0.dev202510101043__cp312-cp312-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of gtsam-develop might be problematic. Click here for more details.
- gtsam/.dylibs/{libgtsam.4.3a0.dev202510091957.dylib → libgtsam.4.3a0.dev202510101043.dylib} +0 -0
- gtsam/.dylibs/{libgtsam_unstable.4.3a0.dev202510091957.dylib → libgtsam_unstable.4.3a0.dev202510101043.dylib} +0 -0
- gtsam/__init__.pyi +62 -17
- gtsam/gtsam/__init__.pyi +2674 -1051
- gtsam/gtsam.cpython-312-darwin.so +0 -0
- {gtsam_develop-4.3a0.dev202510091957.dist-info → gtsam_develop-4.3a0.dev202510101043.dist-info}/METADATA +1 -1
- {gtsam_develop-4.3a0.dev202510091957.dist-info → gtsam_develop-4.3a0.dev202510101043.dist-info}/RECORD +10 -10
- gtsam_unstable/gtsam_unstable.cpython-312-darwin.so +0 -0
- {gtsam_develop-4.3a0.dev202510091957.dist-info → gtsam_develop-4.3a0.dev202510101043.dist-info}/WHEEL +0 -0
- {gtsam_develop-4.3a0.dev202510091957.dist-info → gtsam_develop-4.3a0.dev202510101043.dist-info}/top_level.txt +0 -0
gtsam/gtsam/__init__.pyi
CHANGED
|
@@ -11,7 +11,7 @@ from . import noiseModel
|
|
|
11
11
|
from . import so3
|
|
12
12
|
from . import symbol_shorthand
|
|
13
13
|
from . import utilities
|
|
14
|
-
__all__: list[str] = ['AHRSFactor', 'AcceleratingScenario', 'AlgebraicDecisionTreeKey', 'AssignmentKey', 'BarometricFactor', 'BatchFixedLagSmoother', 'BearingFactor2D', 'BearingFactor3D', 'BearingFactorPose2', 'BearingRange2D', 'BearingRange3D', 'BearingRangeFactor2D', 'BearingRangeFactor3D', 'BearingRangeFactorPose2', 'BearingRangeFactorPose3', 'BearingRangePose2', 'BearingRangePose3', 'BetweenFactorConstantBias', 'BetweenFactorDouble', 'BetweenFactorPoint2', 'BetweenFactorPoint3', 'BetweenFactorPose2', 'BetweenFactorPose3', 'BetweenFactorRot2', 'BetweenFactorRot3', 'BetweenFactorSL4', 'BetweenFactorSO3', 'BetweenFactorSO4', 'BetweenFactorSimilarity2', 'BetweenFactorSimilarity3', 'BetweenFactorVector', 'BinaryMeasurementPoint3', 'BinaryMeasurementRot3', 'BinaryMeasurementUnit3', 'BlockJacobiPreconditionerParameters', 'Cal3', 'Cal3Bundler', 'Cal3DS2', 'Cal3DS2_Base', 'Cal3Fisheye', 'Cal3Unified', 'Cal3_S2', 'Cal3_S2Stereo', 'Cal3f', 'CalibratedCamera', 'CameraSetCal3Bundler', 'CameraSetCal3DS2', 'CameraSetCal3Fisheye', 'CameraSetCal3Unified', 'CameraSetCal3_S2', 'CameraSetPinholePoseCal3_S2', 'Chebyshev1Basis', 'Chebyshev2', 'Chebyshev2Basis', 'CombinedImuFactor', 'ComponentDerivativeFactorChebyshev1Basis', 'ComponentDerivativeFactorChebyshev2', 'ComponentDerivativeFactorChebyshev2Basis', 'ComponentDerivativeFactorFourierBasis', 'ConjugateGradientParameters', 'ConstantTwistScenario', 'ConstantVelocityFactor', 'ConvertNoiseModel', 'CustomFactor', 'DSFMapIndexPair', 'DecisionTreeFactor', 'DegeneracyMode', 'DerivativeFactorChebyshev1Basis', 'DerivativeFactorChebyshev2', 'DerivativeFactorChebyshev2Basis', 'DerivativeFactorFourierBasis', 'DiscreteBayesNet', 'DiscreteBayesTree', 'DiscreteBayesTreeClique', 'DiscreteCluster', 'DiscreteConditional', 'DiscreteDistribution', 'DiscreteEliminationTree', 'DiscreteFactor', 'DiscreteFactorGraph', 'DiscreteJunctionTree', 'DiscreteKeys', 'DiscreteLookupDAG', 'DiscreteLookupTable', 'DiscreteMarginals', 'DiscreteScenario', 'DiscreteSearch', 'DiscreteSearchSolution', 'DiscreteValues', 'DoglegOptimizer', 'DoglegParams', 'DotWriter', 'DummyPreconditionerParameters', 'EdgeKey', 'EliminateDiscrete', 'EliminateForMPE', 'EliminateQR', 'EpipolarTransfer', 'EssentialMatrix', 'EssentialMatrixConstraint', 'EssentialMatrixFactor', 'EssentialMatrixFactor2', 'EssentialMatrixFactor3', 'EssentialMatrixFactor4Cal3Bundler', 'EssentialMatrixFactor4Cal3DS2', 'EssentialMatrixFactor4Cal3Fisheye', 'EssentialMatrixFactor4Cal3Unified', 'EssentialMatrixFactor4Cal3_S2', 'EssentialMatrixFactor4Cal3f', 'EssentialMatrixFactor5Cal3Bundler', 'EssentialMatrixFactor5Cal3DS2', 'EssentialMatrixFactor5Cal3Fisheye', 'EssentialMatrixFactor5Cal3Unified', 'EssentialMatrixFactor5Cal3_S2', 'EssentialMatrixFactor5Cal3f', 'EssentialTransferFactorCal3Bundler', 'EssentialTransferFactorCal3_S2', 'EssentialTransferFactorCal3f', 'EssentialTransferFactorKCal3Bundler', 'EssentialTransferFactorKCal3_S2', 'EssentialTransferFactorKCal3f', 'EvaluationFactorChebyshev1Basis', 'EvaluationFactorChebyshev2', 'EvaluationFactorChebyshev2Basis', 'EvaluationFactorFourierBasis', 'Event', 'ExtendedKalmanFilterConstantBias', 'ExtendedKalmanFilterGal3', 'ExtendedKalmanFilterNavState', 'ExtendedKalmanFilterPoint2', 'ExtendedKalmanFilterPoint3', 'ExtendedKalmanFilterPose2', 'ExtendedKalmanFilterPose3', 'ExtendedKalmanFilterRot2', 'ExtendedKalmanFilterRot3', 'ExtendedKalmanFilterSL4', 'ExtendedKalmanFilterSimilarity2', 'ExtendedKalmanFilterSimilarity3', 'Factor', 'FindKarcherMeanPoint2', 'FindKarcherMeanPoint3', 'FindKarcherMeanPose2', 'FindKarcherMeanPose3', 'FindKarcherMeanRot2', 'FindKarcherMeanRot3', 'FindKarcherMeanSO3', 'FindKarcherMeanSO4', 'FitBasisChebyshev1Basis', 'FitBasisChebyshev2', 'FitBasisChebyshev2Basis', 'FitBasisFourierBasis', 'FixedLagSmoother', 'FixedLagSmootherResult', 'FourierBasis', 'FrobeniusBetweenFactorGal3', 'FrobeniusBetweenFactorNLGal3', 'FrobeniusBetweenFactorNLPose2', 'FrobeniusBetweenFactorNLPose3', 'FrobeniusBetweenFactorNLRot2', 'FrobeniusBetweenFactorNLRot3', 'FrobeniusBetweenFactorNLSL4', 'FrobeniusBetweenFactorNLSO3', 'FrobeniusBetweenFactorNLSO4', 'FrobeniusBetweenFactorNLSimilarity2', 'FrobeniusBetweenFactorNLSimilarity3', 'FrobeniusBetweenFactorPose2', 'FrobeniusBetweenFactorPose3', 'FrobeniusBetweenFactorRot2', 'FrobeniusBetweenFactorRot3', 'FrobeniusBetweenFactorSO3', 'FrobeniusBetweenFactorSO4', 'FrobeniusFactorGal3', 'FrobeniusFactorPose2', 'FrobeniusFactorPose3', 'FrobeniusFactorRot2', 'FrobeniusFactorRot3', 'FrobeniusFactorSL4', 'FrobeniusFactorSO3', 'FrobeniusFactorSO4', 'FrobeniusFactorSimilarity2', 'FrobeniusFactorSimilarity3', 'FrobeniusPriorGal3', 'FrobeniusPriorPose2', 'FrobeniusPriorPose3', 'FrobeniusPriorRot2', 'FrobeniusPriorRot3', 'FrobeniusPriorSL4', 'FrobeniusPriorSO3', 'FrobeniusPriorSO4', 'FrobeniusPriorSimilarity2', 'FrobeniusPriorSimilarity3', 'FundamentalMatrix', 'GPSFactor', 'GPSFactor2', 'GPSFactor2Arm', 'GPSFactor2ArmCalib', 'GPSFactorArm', 'GPSFactorArmCalib', 'Gal3', 'Gal3ImuEKF', 'GaussNewtonOptimizer', 'GaussNewtonParams', 'GaussianBayesNet', 'GaussianBayesTree', 'GaussianBayesTreeClique', 'GaussianConditional', 'GaussianDensity', 'GaussianEliminationTree', 'GaussianFactor', 'GaussianFactorGraph', 'GaussianISAM', 'GeneralSFMFactor2Cal3Bundler', 'GeneralSFMFactor2Cal3DS2', 'GeneralSFMFactor2Cal3Fisheye', 'GeneralSFMFactor2Cal3Unified', 'GeneralSFMFactor2Cal3_S2', 'GeneralSFMFactor2Cal3f', 'GeneralSFMFactorCal3Bundler', 'GeneralSFMFactorCal3DS2', 'GeneralSFMFactorCal3Fisheye', 'GeneralSFMFactorCal3Unified', 'GeneralSFMFactorCal3_S2', 'GeneralSFMFactorPoseCal3Bundler', 'GeneralSFMFactorPoseCal3DS2', 'GeneralSFMFactorPoseCal3Fisheye', 'GeneralSFMFactorPoseCal3Unified', 'GeneralSFMFactorPoseCal3_S2', 'GenericProjectionFactorCal3DS2', 'GenericProjectionFactorCal3Fisheye', 'GenericProjectionFactorCal3Unified', 'GenericProjectionFactorCal3_S2', 'GenericStereoFactor3D', 'GenericValueCal3Bundler', 'GenericValueCal3DS2', 'GenericValueCal3Fisheye', 'GenericValueCal3Unified', 'GenericValueCal3_S2', 'GenericValueCalibratedCamera', 'GenericValueConstantBias', 'GenericValueEssentialMatrix', 'GenericValueMatrix', 'GenericValuePoint2', 'GenericValuePoint3', 'GenericValuePose2', 'GenericValuePose3', 'GenericValueRot2', 'GenericValueRot3', 'GenericValueStereoPoint2', 'GenericValueVector', 'GncGaussNewtonOptimizer', 'GncGaussNewtonParams', 'GncLMOptimizer', 'GncLMParams', 'GncLossType', 'GraphvizFormatting', 'HessianFactor', 'HybridBayesNet', 'HybridBayesTree', 'HybridBayesTreeClique', 'HybridConditional', 'HybridFactor', 'HybridGaussianConditional', 'HybridGaussianFactor', 'HybridGaussianFactorGraph', 'HybridNonlinearFactor', 'HybridNonlinearFactorGraph', 'HybridOrdering', 'HybridSmoother', 'HybridValues', 'ISAM2', 'ISAM2Clique', 'ISAM2DoglegParams', 'ISAM2GaussNewtonParams', 'ISAM2Params', 'ISAM2Result', 'ISAM2ThresholdMap', 'ImuFactor', 'ImuFactor2', 'IncrementalFixedLagSmoother', 'IndexPair', 'IndexPairSetAsArray', 'InitializePose3', 'InvariantEKFGal3', 'InvariantEKFNavState', 'InvariantEKFPose2', 'InvariantEKFPose3', 'InvariantEKFRot3', 'IterativeOptimizationParameters', 'JacobianFactor', 'JacobianVector', 'JointMarginal', 'KalmanFilter', 'KarcherMeanFactorGal3', 'KarcherMeanFactorPoint2', 'KarcherMeanFactorPoint3', 'KarcherMeanFactorPose2', 'KarcherMeanFactorPose3', 'KarcherMeanFactorRot2', 'KarcherMeanFactorRot3', 'KarcherMeanFactorSL4', 'KarcherMeanFactorSO3', 'KarcherMeanFactorSO4', 'KarcherMeanFactorSimilarity2', 'KarcherMeanFactorSimilarity3', 'KernelFunctionType', 'KeyGroupMap', 'KeyList', 'KeySet', 'LabeledSymbol', 'LeftLinearEKFGal3', 'LeftLinearEKFNavState', 'LeftLinearEKFPose2', 'LeftLinearEKFPose3', 'LeftLinearEKFRot3', 'LevenbergMarquardtOptimizer', 'LevenbergMarquardtParams', 'LieGroupEKFGal3', 'LieGroupEKFNavState', 'LieGroupEKFPose2', 'LieGroupEKFPose3', 'LieGroupEKFRot3', 'LinearContainerFactor', 'LinearizationMode', 'MFAS', 'MT19937', 'MagFactor', 'MagFactor1', 'MagPoseFactorPose2', 'MagPoseFactorPose3', 'ManifoldEKFGal3', 'ManifoldEKFNavState', 'ManifoldEKFPose2', 'ManifoldEKFPose3', 'ManifoldEKFRot3', 'ManifoldEKFUnit3', 'ManifoldEvaluationFactorChebyshev1BasisPose2', 'ManifoldEvaluationFactorChebyshev1BasisPose3', 'ManifoldEvaluationFactorChebyshev1BasisRot2', 'ManifoldEvaluationFactorChebyshev1BasisRot3', 'ManifoldEvaluationFactorChebyshev2BasisPose2', 'ManifoldEvaluationFactorChebyshev2BasisPose3', 'ManifoldEvaluationFactorChebyshev2BasisRot2', 'ManifoldEvaluationFactorChebyshev2BasisRot3', 'ManifoldEvaluationFactorChebyshev2Pose2', 'ManifoldEvaluationFactorChebyshev2Pose3', 'ManifoldEvaluationFactorChebyshev2Rot2', 'ManifoldEvaluationFactorChebyshev2Rot3', 'ManifoldEvaluationFactorFourierBasisPose2', 'ManifoldEvaluationFactorFourierBasisPose3', 'ManifoldEvaluationFactorFourierBasisRot2', 'ManifoldEvaluationFactorFourierBasisRot3', 'Marginals', 'NavState', 'NavStateImuEKF', 'NoiseFormat', 'NoiseModelFactor', 'NonlinearEquality2Cal3_S2', 'NonlinearEquality2CalibratedCamera', 'NonlinearEquality2ConstantBias', 'NonlinearEquality2Gal3', 'NonlinearEquality2PinholeCameraCal3Bundler', 'NonlinearEquality2PinholeCameraCal3Fisheye', 'NonlinearEquality2PinholeCameraCal3Unified', 'NonlinearEquality2PinholeCameraCal3_S2', 'NonlinearEquality2Point2', 'NonlinearEquality2Point3', 'NonlinearEquality2Pose2', 'NonlinearEquality2Pose3', 'NonlinearEquality2Rot2', 'NonlinearEquality2Rot3', 'NonlinearEquality2SL4', 'NonlinearEquality2SO3', 'NonlinearEquality2SO4', 'NonlinearEquality2SOn', 'NonlinearEquality2Similarity2', 'NonlinearEquality2Similarity3', 'NonlinearEquality2StereoPoint2', 'NonlinearEqualityCal3_S2', 'NonlinearEqualityCalibratedCamera', 'NonlinearEqualityConstantBias', 'NonlinearEqualityGal3', 'NonlinearEqualityPinholeCameraCal3Bundler', 'NonlinearEqualityPinholeCameraCal3Fisheye', 'NonlinearEqualityPinholeCameraCal3Unified', 'NonlinearEqualityPinholeCameraCal3_S2', 'NonlinearEqualityPoint2', 'NonlinearEqualityPoint3', 'NonlinearEqualityPose2', 'NonlinearEqualityPose3', 'NonlinearEqualityRot2', 'NonlinearEqualityRot3', 'NonlinearEqualitySL4', 'NonlinearEqualitySO3', 'NonlinearEqualitySO4', 'NonlinearEqualitySOn', 'NonlinearEqualitySimilarity2', 'NonlinearEqualitySimilarity3', 'NonlinearEqualityStereoPoint2', 'NonlinearFactor', 'NonlinearFactorGraph', 'NonlinearISAM', 'NonlinearOptimizer', 'NonlinearOptimizerParams', 'Ordering', 'OrientedPlane3', 'OrientedPlane3DirectionPrior', 'OrientedPlane3Factor', 'PCGSolverParameters', 'PinholeCameraCal3Bundler', 'PinholeCameraCal3DS2', 'PinholeCameraCal3Fisheye', 'PinholeCameraCal3Unified', 'PinholeCameraCal3_S2', 'PinholeCameraCal3f', 'PinholePoseCal3Bundler', 'PinholePoseCal3DS2', 'PinholePoseCal3Fisheye', 'PinholePoseCal3Unified', 'PinholePoseCal3_S2', 'PlanarProjectionFactor1', 'PlanarProjectionFactor2', 'PlanarProjectionFactor3', 'Pose2', 'Pose3', 'Pose3AttitudeFactor', 'PoseRotationPrior2D', 'PoseRotationPrior3D', 'PoseTranslationPrior2D', 'PoseTranslationPrior3D', 'PreconditionerParameters', 'PreintegratedAhrsMeasurements', 'PreintegratedCombinedMeasurements', 'PreintegratedImuMeasurements', 'PreintegratedRotation', 'PreintegratedRotationParams', 'PreintegrationCombinedParams', 'PreintegrationParams', 'PrintDiscreteValues', 'PrintKeyList', 'PrintKeySet', 'PrintKeyVector', 'PriorFactorCal3Bundler', 'PriorFactorCal3DS2', 'PriorFactorCal3Fisheye', 'PriorFactorCal3Unified', 'PriorFactorCal3_S2', 'PriorFactorCalibratedCamera', 'PriorFactorConstantBias', 'PriorFactorDouble', 'PriorFactorGal3', 'PriorFactorNavState', 'PriorFactorPinholeCameraCal3Bundler', 'PriorFactorPinholeCameraCal3Fisheye', 'PriorFactorPinholeCameraCal3Unified', 'PriorFactorPinholeCameraCal3_S2', 'PriorFactorPoint2', 'PriorFactorPoint3', 'PriorFactorPose2', 'PriorFactorPose3', 'PriorFactorRot2', 'PriorFactorRot3', 'PriorFactorSL4', 'PriorFactorSO3', 'PriorFactorSO4', 'PriorFactorSOn', 'PriorFactorSimilarity2', 'PriorFactorSimilarity3', 'PriorFactorStereoPoint2', 'PriorFactorUnit3', 'PriorFactorVector', 'Quaternion', 'RangeFactor2', 'RangeFactor2D', 'RangeFactor3', 'RangeFactor3D', 'RangeFactorCalibratedCamera', 'RangeFactorCalibratedCameraPoint', 'RangeFactorPose2', 'RangeFactorPose3', 'RangeFactorSimpleCamera', 'RangeFactorSimpleCameraPoint', 'RangeFactorWithTransform2D', 'RangeFactorWithTransform3D', 'RangeFactorWithTransformPose2', 'RangeFactorWithTransformPose3', 'RedirectCout', 'ReferenceFrameFactorPoint3Pose3', 'Rot2', 'Rot3', 'Rot3AttitudeFactor', 'RotateDirectionsFactor', 'RotateFactor', 'SL4', 'SO3', 'SO4', 'SOn', 'Sampler', 'Scenario', 'ScenarioRunner', 'SfmData', 'SfmTrack', 'SfmTrack2d', 'ShonanAveraging2', 'ShonanAveraging3', 'ShonanAveragingParameters2', 'ShonanAveragingParameters3', 'ShonanFactor3', 'Similarity2', 'Similarity3', 'SimpleFundamentalMatrix', 'SmartFactorBasePinholeCameraCal3Bundler', 'SmartFactorBasePinholeCameraCal3DS2', 'SmartFactorBasePinholeCameraCal3Fisheye', 'SmartFactorBasePinholeCameraCal3Unified', 'SmartFactorBasePinholeCameraCal3_S2', 'SmartFactorBasePinholePoseCal3Bundler', 'SmartFactorBasePinholePoseCal3DS2', 'SmartFactorBasePinholePoseCal3Fisheye', 'SmartFactorBasePinholePoseCal3Unified', 'SmartFactorBasePinholePoseCal3_S2', 'SmartProjectionFactorPinholeCameraCal3Bundler', 'SmartProjectionFactorPinholeCameraCal3DS2', 'SmartProjectionFactorPinholeCameraCal3Fisheye', 'SmartProjectionFactorPinholeCameraCal3Unified', 'SmartProjectionFactorPinholeCameraCal3_S2', 'SmartProjectionFactorPinholePoseCal3Bundler', 'SmartProjectionFactorPinholePoseCal3DS2', 'SmartProjectionFactorPinholePoseCal3Fisheye', 'SmartProjectionFactorPinholePoseCal3Unified', 'SmartProjectionFactorPinholePoseCal3_S2', 'SmartProjectionParams', 'SmartProjectionPoseFactorCal3Bundler', 'SmartProjectionPoseFactorCal3DS2', 'SmartProjectionPoseFactorCal3Fisheye', 'SmartProjectionPoseFactorCal3Unified', 'SmartProjectionPoseFactorCal3_S2', 'SmartProjectionRigFactorPinholePoseCal3Bundler', 'SmartProjectionRigFactorPinholePoseCal3DS2', 'SmartProjectionRigFactorPinholePoseCal3Fisheye', 'SmartProjectionRigFactorPinholePoseCal3Unified', 'SmartProjectionRigFactorPinholePoseCal3_S2', 'StereoCamera', 'StereoPoint2', 'SubgraphSolver', 'SubgraphSolverParameters', 'Symbol', 'SymbolicBayesNet', 'SymbolicBayesTree', 'SymbolicBayesTreeClique', 'SymbolicCluster', 'SymbolicConditional', 'SymbolicEliminationTree', 'SymbolicFactor', 'SymbolicFactorGraph', 'SymbolicJunctionTree', 'TableDistribution', 'TableFactor', 'TransferFactorFundamentalMatrix', 'TransferFactorSimpleFundamentalMatrix', 'TranslationRecovery', 'TriangulationFactorCal3Bundler', 'TriangulationFactorCal3DS2', 'TriangulationFactorCal3Fisheye', 'TriangulationFactorCal3Unified', 'TriangulationFactorCal3_S2', 'TriangulationFactorPoseCal3Bundler', 'TriangulationFactorPoseCal3DS2', 'TriangulationFactorPoseCal3Fisheye', 'TriangulationFactorPoseCal3Unified', 'TriangulationFactorPoseCal3_S2', 'TriangulationParameters', 'TriangulationResult', 'Unit3', 'Value', 'Values', 'VariableIndex', 'VectorComponentFactorChebyshev1Basis', 'VectorComponentFactorChebyshev2', 'VectorComponentFactorChebyshev2Basis', 'VectorComponentFactorFourierBasis', 'VectorDerivativeFactorChebyshev1Basis', 'VectorDerivativeFactorChebyshev2', 'VectorDerivativeFactorChebyshev2Basis', 'VectorDerivativeFactorFourierBasis', 'VectorEvaluationFactorChebyshev1Basis', 'VectorEvaluationFactorChebyshev2', 'VectorEvaluationFactorChebyshev2Basis', 'VectorEvaluationFactorFourierBasis', 'VectorValues', 'cartesianProduct', 'checkConvergence', 'gtsfm', 'html', 'imuBias', 'initialCamerasAndPointsEstimate', 'initialCamerasEstimate', 'isDebugVersion', 'lago', 'linear_independent', 'load2D', 'load3D', 'markdown', 'mrsymbol', 'mrsymbolChr', 'mrsymbolIndex', 'mrsymbolLabel', 'noiseModel', 'parse2DFactors', 'parse3DFactors', 'readBal', 'readG2o', 'save2D', 'so3', 'symbol', 'symbolChr', 'symbolIndex', 'symbol_shorthand', 'triangulateNonlinear', 'triangulatePoint3', 'triangulateSafe', 'utilities', 'writeBAL', 'writeG2o']
|
|
14
|
+
__all__: list[str] = ['AHRSFactor', 'AcceleratingScenario', 'AlgebraicDecisionTreeKey', 'AssignmentKey', 'BarometricFactor', 'BatchFixedLagSmoother', 'BearingFactor2D', 'BearingFactor3D', 'BearingFactorPose2', 'BearingRange2D', 'BearingRange3D', 'BearingRangeFactor2D', 'BearingRangeFactor3D', 'BearingRangeFactorPose2', 'BearingRangeFactorPose3', 'BearingRangePose2', 'BearingRangePose3', 'BetweenFactorConstantBias', 'BetweenFactorDouble', 'BetweenFactorPoint2', 'BetweenFactorPoint3', 'BetweenFactorPose2', 'BetweenFactorPose3', 'BetweenFactorRot2', 'BetweenFactorRot3', 'BetweenFactorSL4', 'BetweenFactorSO3', 'BetweenFactorSO4', 'BetweenFactorSimilarity2', 'BetweenFactorSimilarity3', 'BetweenFactorVector', 'BinaryMeasurementPoint3', 'BinaryMeasurementRot3', 'BinaryMeasurementUnit3', 'BlockJacobiPreconditionerParameters', 'Cal3', 'Cal3Bundler', 'Cal3DS2', 'Cal3DS2_Base', 'Cal3Fisheye', 'Cal3Unified', 'Cal3_S2', 'Cal3_S2Stereo', 'Cal3f', 'CalibratedCamera', 'CameraSetCal3Bundler', 'CameraSetCal3DS2', 'CameraSetCal3Fisheye', 'CameraSetCal3Unified', 'CameraSetCal3_S2', 'CameraSetPinholePoseCal3_S2', 'Chebyshev1Basis', 'Chebyshev2', 'Chebyshev2Basis', 'CombinedImuFactor', 'ComponentDerivativeFactorChebyshev1Basis', 'ComponentDerivativeFactorChebyshev2', 'ComponentDerivativeFactorChebyshev2Basis', 'ComponentDerivativeFactorFourierBasis', 'ConcentratedGaussianDouble', 'ConcentratedGaussianGal3', 'ConcentratedGaussianPoint2', 'ConcentratedGaussianPoint3', 'ConcentratedGaussianPose2', 'ConcentratedGaussianPose3', 'ConcentratedGaussianRot2', 'ConcentratedGaussianRot3', 'ConcentratedGaussianSL4', 'ConcentratedGaussianSO3', 'ConcentratedGaussianSO4', 'ConcentratedGaussianSOn', 'ConcentratedGaussianSimilarity2', 'ConcentratedGaussianSimilarity3', 'ConcentratedGaussianStereoPoint2', 'ConcentratedGaussianVector', 'ConjugateGradientParameters', 'ConstantTwistScenario', 'ConstantVelocityFactor', 'ConvertNoiseModel', 'CustomFactor', 'DSFMapIndexPair', 'DecisionTreeFactor', 'DegeneracyMode', 'DerivativeFactorChebyshev1Basis', 'DerivativeFactorChebyshev2', 'DerivativeFactorChebyshev2Basis', 'DerivativeFactorFourierBasis', 'DiscreteBayesNet', 'DiscreteBayesTree', 'DiscreteBayesTreeClique', 'DiscreteCluster', 'DiscreteConditional', 'DiscreteDistribution', 'DiscreteEliminationTree', 'DiscreteFactor', 'DiscreteFactorGraph', 'DiscreteJunctionTree', 'DiscreteKeys', 'DiscreteLookupDAG', 'DiscreteLookupTable', 'DiscreteMarginals', 'DiscreteScenario', 'DiscreteSearch', 'DiscreteSearchSolution', 'DiscreteValues', 'DoglegOptimizer', 'DoglegParams', 'DotWriter', 'DummyPreconditionerParameters', 'EdgeKey', 'EliminateDiscrete', 'EliminateForMPE', 'EliminateQR', 'EpipolarTransfer', 'EssentialMatrix', 'EssentialMatrixConstraint', 'EssentialMatrixFactor', 'EssentialMatrixFactor2', 'EssentialMatrixFactor3', 'EssentialMatrixFactor4Cal3Bundler', 'EssentialMatrixFactor4Cal3DS2', 'EssentialMatrixFactor4Cal3Fisheye', 'EssentialMatrixFactor4Cal3Unified', 'EssentialMatrixFactor4Cal3_S2', 'EssentialMatrixFactor4Cal3f', 'EssentialMatrixFactor5Cal3Bundler', 'EssentialMatrixFactor5Cal3DS2', 'EssentialMatrixFactor5Cal3Fisheye', 'EssentialMatrixFactor5Cal3Unified', 'EssentialMatrixFactor5Cal3_S2', 'EssentialMatrixFactor5Cal3f', 'EssentialTransferFactorCal3Bundler', 'EssentialTransferFactorCal3_S2', 'EssentialTransferFactorCal3f', 'EssentialTransferFactorKCal3Bundler', 'EssentialTransferFactorKCal3_S2', 'EssentialTransferFactorKCal3f', 'EvaluationFactorChebyshev1Basis', 'EvaluationFactorChebyshev2', 'EvaluationFactorChebyshev2Basis', 'EvaluationFactorFourierBasis', 'Event', 'ExtendedKalmanFilterConstantBias', 'ExtendedKalmanFilterGal3', 'ExtendedKalmanFilterNavState', 'ExtendedKalmanFilterPoint2', 'ExtendedKalmanFilterPoint3', 'ExtendedKalmanFilterPose2', 'ExtendedKalmanFilterPose3', 'ExtendedKalmanFilterRot2', 'ExtendedKalmanFilterRot3', 'ExtendedKalmanFilterSL4', 'ExtendedKalmanFilterSimilarity2', 'ExtendedKalmanFilterSimilarity3', 'ExtendedPriorFactorCal3Bundler', 'ExtendedPriorFactorCal3DS2', 'ExtendedPriorFactorCal3Fisheye', 'ExtendedPriorFactorCal3Unified', 'ExtendedPriorFactorCal3_S2', 'ExtendedPriorFactorCalibratedCamera', 'ExtendedPriorFactorConstantBias', 'ExtendedPriorFactorDouble', 'ExtendedPriorFactorGal3', 'ExtendedPriorFactorNavState', 'ExtendedPriorFactorPinholeCameraCal3Bundler', 'ExtendedPriorFactorPinholeCameraCal3Fisheye', 'ExtendedPriorFactorPinholeCameraCal3Unified', 'ExtendedPriorFactorPinholeCameraCal3_S2', 'ExtendedPriorFactorPoint2', 'ExtendedPriorFactorPoint3', 'ExtendedPriorFactorPose2', 'ExtendedPriorFactorPose3', 'ExtendedPriorFactorRot2', 'ExtendedPriorFactorRot3', 'ExtendedPriorFactorSL4', 'ExtendedPriorFactorSO3', 'ExtendedPriorFactorSO4', 'ExtendedPriorFactorSOn', 'ExtendedPriorFactorSimilarity2', 'ExtendedPriorFactorSimilarity3', 'ExtendedPriorFactorStereoPoint2', 'ExtendedPriorFactorUnit3', 'ExtendedPriorFactorVector', 'Factor', 'FindKarcherMeanPoint2', 'FindKarcherMeanPoint3', 'FindKarcherMeanPose2', 'FindKarcherMeanPose3', 'FindKarcherMeanRot2', 'FindKarcherMeanRot3', 'FindKarcherMeanSO3', 'FindKarcherMeanSO4', 'FitBasisChebyshev1Basis', 'FitBasisChebyshev2', 'FitBasisChebyshev2Basis', 'FitBasisFourierBasis', 'FixedLagSmoother', 'FixedLagSmootherResult', 'FourierBasis', 'FrobeniusBetweenFactorGal3', 'FrobeniusBetweenFactorNLGal3', 'FrobeniusBetweenFactorNLPose2', 'FrobeniusBetweenFactorNLPose3', 'FrobeniusBetweenFactorNLRot2', 'FrobeniusBetweenFactorNLRot3', 'FrobeniusBetweenFactorNLSL4', 'FrobeniusBetweenFactorNLSO3', 'FrobeniusBetweenFactorNLSO4', 'FrobeniusBetweenFactorNLSimilarity2', 'FrobeniusBetweenFactorNLSimilarity3', 'FrobeniusBetweenFactorPose2', 'FrobeniusBetweenFactorPose3', 'FrobeniusBetweenFactorRot2', 'FrobeniusBetweenFactorRot3', 'FrobeniusBetweenFactorSO3', 'FrobeniusBetweenFactorSO4', 'FrobeniusFactorGal3', 'FrobeniusFactorPose2', 'FrobeniusFactorPose3', 'FrobeniusFactorRot2', 'FrobeniusFactorRot3', 'FrobeniusFactorSL4', 'FrobeniusFactorSO3', 'FrobeniusFactorSO4', 'FrobeniusFactorSimilarity2', 'FrobeniusFactorSimilarity3', 'FrobeniusPriorGal3', 'FrobeniusPriorPose2', 'FrobeniusPriorPose3', 'FrobeniusPriorRot2', 'FrobeniusPriorRot3', 'FrobeniusPriorSL4', 'FrobeniusPriorSO3', 'FrobeniusPriorSO4', 'FrobeniusPriorSimilarity2', 'FrobeniusPriorSimilarity3', 'FundamentalMatrix', 'GPSFactor', 'GPSFactor2', 'GPSFactor2Arm', 'GPSFactor2ArmCalib', 'GPSFactorArm', 'GPSFactorArmCalib', 'Gal3', 'Gal3ImuEKF', 'GaussNewtonOptimizer', 'GaussNewtonParams', 'GaussianBayesNet', 'GaussianBayesTree', 'GaussianBayesTreeClique', 'GaussianConditional', 'GaussianDensity', 'GaussianEliminationTree', 'GaussianFactor', 'GaussianFactorGraph', 'GaussianISAM', 'GeneralSFMFactor2Cal3Bundler', 'GeneralSFMFactor2Cal3DS2', 'GeneralSFMFactor2Cal3Fisheye', 'GeneralSFMFactor2Cal3Unified', 'GeneralSFMFactor2Cal3_S2', 'GeneralSFMFactor2Cal3f', 'GeneralSFMFactorCal3Bundler', 'GeneralSFMFactorCal3DS2', 'GeneralSFMFactorCal3Fisheye', 'GeneralSFMFactorCal3Unified', 'GeneralSFMFactorCal3_S2', 'GeneralSFMFactorPoseCal3Bundler', 'GeneralSFMFactorPoseCal3DS2', 'GeneralSFMFactorPoseCal3Fisheye', 'GeneralSFMFactorPoseCal3Unified', 'GeneralSFMFactorPoseCal3_S2', 'GenericProjectionFactorCal3DS2', 'GenericProjectionFactorCal3Fisheye', 'GenericProjectionFactorCal3Unified', 'GenericProjectionFactorCal3_S2', 'GenericStereoFactor3D', 'GenericValueCal3Bundler', 'GenericValueCal3DS2', 'GenericValueCal3Fisheye', 'GenericValueCal3Unified', 'GenericValueCal3_S2', 'GenericValueCalibratedCamera', 'GenericValueConstantBias', 'GenericValueEssentialMatrix', 'GenericValueMatrix', 'GenericValuePoint2', 'GenericValuePoint3', 'GenericValuePose2', 'GenericValuePose3', 'GenericValueRot2', 'GenericValueRot3', 'GenericValueStereoPoint2', 'GenericValueVector', 'GncGaussNewtonOptimizer', 'GncGaussNewtonParams', 'GncLMOptimizer', 'GncLMParams', 'GncLossType', 'GraphvizFormatting', 'HessianFactor', 'HybridBayesNet', 'HybridBayesTree', 'HybridBayesTreeClique', 'HybridConditional', 'HybridFactor', 'HybridGaussianConditional', 'HybridGaussianFactor', 'HybridGaussianFactorGraph', 'HybridNonlinearFactor', 'HybridNonlinearFactorGraph', 'HybridOrdering', 'HybridSmoother', 'HybridValues', 'ISAM2', 'ISAM2Clique', 'ISAM2DoglegParams', 'ISAM2GaussNewtonParams', 'ISAM2Params', 'ISAM2Result', 'ISAM2ThresholdMap', 'ImuFactor', 'ImuFactor2', 'IncrementalFixedLagSmoother', 'IndexPair', 'IndexPairSetAsArray', 'InitializePose3', 'InvariantEKFGal3', 'InvariantEKFNavState', 'InvariantEKFPose2', 'InvariantEKFPose3', 'InvariantEKFRot3', 'IterativeOptimizationParameters', 'JacobianFactor', 'JacobianVector', 'JointMarginal', 'KalmanFilter', 'KarcherMeanFactorGal3', 'KarcherMeanFactorPoint2', 'KarcherMeanFactorPoint3', 'KarcherMeanFactorPose2', 'KarcherMeanFactorPose3', 'KarcherMeanFactorRot2', 'KarcherMeanFactorRot3', 'KarcherMeanFactorSL4', 'KarcherMeanFactorSO3', 'KarcherMeanFactorSO4', 'KarcherMeanFactorSimilarity2', 'KarcherMeanFactorSimilarity3', 'KernelFunctionType', 'KeyGroupMap', 'KeyList', 'KeySet', 'LabeledSymbol', 'LeftLinearEKFGal3', 'LeftLinearEKFNavState', 'LeftLinearEKFPose2', 'LeftLinearEKFPose3', 'LeftLinearEKFRot3', 'LevenbergMarquardtOptimizer', 'LevenbergMarquardtParams', 'LieGroupEKFGal3', 'LieGroupEKFNavState', 'LieGroupEKFPose2', 'LieGroupEKFPose3', 'LieGroupEKFRot3', 'LinearContainerFactor', 'LinearizationMode', 'MFAS', 'MT19937', 'MagFactor', 'MagFactor1', 'MagPoseFactorPose2', 'MagPoseFactorPose3', 'ManifoldEKFGal3', 'ManifoldEKFNavState', 'ManifoldEKFPose2', 'ManifoldEKFPose3', 'ManifoldEKFRot3', 'ManifoldEKFUnit3', 'ManifoldEvaluationFactorChebyshev1BasisPose2', 'ManifoldEvaluationFactorChebyshev1BasisPose3', 'ManifoldEvaluationFactorChebyshev1BasisRot2', 'ManifoldEvaluationFactorChebyshev1BasisRot3', 'ManifoldEvaluationFactorChebyshev2BasisPose2', 'ManifoldEvaluationFactorChebyshev2BasisPose3', 'ManifoldEvaluationFactorChebyshev2BasisRot2', 'ManifoldEvaluationFactorChebyshev2BasisRot3', 'ManifoldEvaluationFactorChebyshev2Pose2', 'ManifoldEvaluationFactorChebyshev2Pose3', 'ManifoldEvaluationFactorChebyshev2Rot2', 'ManifoldEvaluationFactorChebyshev2Rot3', 'ManifoldEvaluationFactorFourierBasisPose2', 'ManifoldEvaluationFactorFourierBasisPose3', 'ManifoldEvaluationFactorFourierBasisRot2', 'ManifoldEvaluationFactorFourierBasisRot3', 'Marginals', 'NavState', 'NavStateImuEKF', 'NoiseFormat', 'NoiseModelFactor', 'NonlinearEquality2Cal3_S2', 'NonlinearEquality2CalibratedCamera', 'NonlinearEquality2ConstantBias', 'NonlinearEquality2Gal3', 'NonlinearEquality2PinholeCameraCal3Bundler', 'NonlinearEquality2PinholeCameraCal3Fisheye', 'NonlinearEquality2PinholeCameraCal3Unified', 'NonlinearEquality2PinholeCameraCal3_S2', 'NonlinearEquality2Point2', 'NonlinearEquality2Point3', 'NonlinearEquality2Pose2', 'NonlinearEquality2Pose3', 'NonlinearEquality2Rot2', 'NonlinearEquality2Rot3', 'NonlinearEquality2SL4', 'NonlinearEquality2SO3', 'NonlinearEquality2SO4', 'NonlinearEquality2SOn', 'NonlinearEquality2Similarity2', 'NonlinearEquality2Similarity3', 'NonlinearEquality2StereoPoint2', 'NonlinearEqualityCal3_S2', 'NonlinearEqualityCalibratedCamera', 'NonlinearEqualityConstantBias', 'NonlinearEqualityGal3', 'NonlinearEqualityPinholeCameraCal3Bundler', 'NonlinearEqualityPinholeCameraCal3Fisheye', 'NonlinearEqualityPinholeCameraCal3Unified', 'NonlinearEqualityPinholeCameraCal3_S2', 'NonlinearEqualityPoint2', 'NonlinearEqualityPoint3', 'NonlinearEqualityPose2', 'NonlinearEqualityPose3', 'NonlinearEqualityRot2', 'NonlinearEqualityRot3', 'NonlinearEqualitySL4', 'NonlinearEqualitySO3', 'NonlinearEqualitySO4', 'NonlinearEqualitySOn', 'NonlinearEqualitySimilarity2', 'NonlinearEqualitySimilarity3', 'NonlinearEqualityStereoPoint2', 'NonlinearFactor', 'NonlinearFactorGraph', 'NonlinearISAM', 'NonlinearOptimizer', 'NonlinearOptimizerParams', 'Ordering', 'OrientedPlane3', 'OrientedPlane3DirectionPrior', 'OrientedPlane3Factor', 'PCGSolverParameters', 'PinholeCameraCal3Bundler', 'PinholeCameraCal3DS2', 'PinholeCameraCal3Fisheye', 'PinholeCameraCal3Unified', 'PinholeCameraCal3_S2', 'PinholeCameraCal3f', 'PinholePoseCal3Bundler', 'PinholePoseCal3DS2', 'PinholePoseCal3Fisheye', 'PinholePoseCal3Unified', 'PinholePoseCal3_S2', 'PlanarProjectionFactor1', 'PlanarProjectionFactor2', 'PlanarProjectionFactor3', 'Pose2', 'Pose3', 'Pose3AttitudeFactor', 'PoseRotationPrior2D', 'PoseRotationPrior3D', 'PoseTranslationPrior2D', 'PoseTranslationPrior3D', 'PreconditionerParameters', 'PreintegratedAhrsMeasurements', 'PreintegratedCombinedMeasurements', 'PreintegratedImuMeasurements', 'PreintegratedRotation', 'PreintegratedRotationParams', 'PreintegrationCombinedParams', 'PreintegrationParams', 'PrintDiscreteValues', 'PrintKeyList', 'PrintKeySet', 'PrintKeyVector', 'PriorFactorCal3Bundler', 'PriorFactorCal3DS2', 'PriorFactorCal3Fisheye', 'PriorFactorCal3Unified', 'PriorFactorCal3_S2', 'PriorFactorCalibratedCamera', 'PriorFactorConstantBias', 'PriorFactorDouble', 'PriorFactorGal3', 'PriorFactorNavState', 'PriorFactorPinholeCameraCal3Bundler', 'PriorFactorPinholeCameraCal3Fisheye', 'PriorFactorPinholeCameraCal3Unified', 'PriorFactorPinholeCameraCal3_S2', 'PriorFactorPoint2', 'PriorFactorPoint3', 'PriorFactorPose2', 'PriorFactorPose3', 'PriorFactorRot2', 'PriorFactorRot3', 'PriorFactorSL4', 'PriorFactorSO3', 'PriorFactorSO4', 'PriorFactorSOn', 'PriorFactorSimilarity2', 'PriorFactorSimilarity3', 'PriorFactorStereoPoint2', 'PriorFactorUnit3', 'PriorFactorVector', 'Quaternion', 'RangeFactor2', 'RangeFactor2D', 'RangeFactor3', 'RangeFactor3D', 'RangeFactorCalibratedCamera', 'RangeFactorCalibratedCameraPoint', 'RangeFactorPose2', 'RangeFactorPose3', 'RangeFactorSimpleCamera', 'RangeFactorSimpleCameraPoint', 'RangeFactorWithTransform2D', 'RangeFactorWithTransform3D', 'RangeFactorWithTransformPose2', 'RangeFactorWithTransformPose3', 'RedirectCout', 'ReferenceFrameFactorPoint3Pose3', 'Rot2', 'Rot3', 'Rot3AttitudeFactor', 'RotateDirectionsFactor', 'RotateFactor', 'SL4', 'SO3', 'SO4', 'SOn', 'Sampler', 'Scenario', 'ScenarioRunner', 'SfmData', 'SfmTrack', 'SfmTrack2d', 'ShonanAveraging2', 'ShonanAveraging3', 'ShonanAveragingParameters2', 'ShonanAveragingParameters3', 'ShonanFactor3', 'Similarity2', 'Similarity3', 'SimpleFundamentalMatrix', 'SmartFactorBasePinholeCameraCal3Bundler', 'SmartFactorBasePinholeCameraCal3DS2', 'SmartFactorBasePinholeCameraCal3Fisheye', 'SmartFactorBasePinholeCameraCal3Unified', 'SmartFactorBasePinholeCameraCal3_S2', 'SmartFactorBasePinholePoseCal3Bundler', 'SmartFactorBasePinholePoseCal3DS2', 'SmartFactorBasePinholePoseCal3Fisheye', 'SmartFactorBasePinholePoseCal3Unified', 'SmartFactorBasePinholePoseCal3_S2', 'SmartProjectionFactorPinholeCameraCal3Bundler', 'SmartProjectionFactorPinholeCameraCal3DS2', 'SmartProjectionFactorPinholeCameraCal3Fisheye', 'SmartProjectionFactorPinholeCameraCal3Unified', 'SmartProjectionFactorPinholeCameraCal3_S2', 'SmartProjectionFactorPinholePoseCal3Bundler', 'SmartProjectionFactorPinholePoseCal3DS2', 'SmartProjectionFactorPinholePoseCal3Fisheye', 'SmartProjectionFactorPinholePoseCal3Unified', 'SmartProjectionFactorPinholePoseCal3_S2', 'SmartProjectionParams', 'SmartProjectionPoseFactorCal3Bundler', 'SmartProjectionPoseFactorCal3DS2', 'SmartProjectionPoseFactorCal3Fisheye', 'SmartProjectionPoseFactorCal3Unified', 'SmartProjectionPoseFactorCal3_S2', 'SmartProjectionRigFactorPinholePoseCal3Bundler', 'SmartProjectionRigFactorPinholePoseCal3DS2', 'SmartProjectionRigFactorPinholePoseCal3Fisheye', 'SmartProjectionRigFactorPinholePoseCal3Unified', 'SmartProjectionRigFactorPinholePoseCal3_S2', 'StereoCamera', 'StereoPoint2', 'SubgraphSolver', 'SubgraphSolverParameters', 'Symbol', 'SymbolicBayesNet', 'SymbolicBayesTree', 'SymbolicBayesTreeClique', 'SymbolicCluster', 'SymbolicConditional', 'SymbolicEliminationTree', 'SymbolicFactor', 'SymbolicFactorGraph', 'SymbolicJunctionTree', 'TableDistribution', 'TableFactor', 'TransferFactorFundamentalMatrix', 'TransferFactorSimpleFundamentalMatrix', 'TranslationRecovery', 'TriangulationFactorCal3Bundler', 'TriangulationFactorCal3DS2', 'TriangulationFactorCal3Fisheye', 'TriangulationFactorCal3Unified', 'TriangulationFactorCal3_S2', 'TriangulationFactorPoseCal3Bundler', 'TriangulationFactorPoseCal3DS2', 'TriangulationFactorPoseCal3Fisheye', 'TriangulationFactorPoseCal3Unified', 'TriangulationFactorPoseCal3_S2', 'TriangulationParameters', 'TriangulationResult', 'Unit3', 'Value', 'Values', 'VariableIndex', 'VectorComponentFactorChebyshev1Basis', 'VectorComponentFactorChebyshev2', 'VectorComponentFactorChebyshev2Basis', 'VectorComponentFactorFourierBasis', 'VectorDerivativeFactorChebyshev1Basis', 'VectorDerivativeFactorChebyshev2', 'VectorDerivativeFactorChebyshev2Basis', 'VectorDerivativeFactorFourierBasis', 'VectorEvaluationFactorChebyshev1Basis', 'VectorEvaluationFactorChebyshev2', 'VectorEvaluationFactorChebyshev2Basis', 'VectorEvaluationFactorFourierBasis', 'VectorValues', 'cartesianProduct', 'checkConvergence', 'gtsfm', 'html', 'imuBias', 'initialCamerasAndPointsEstimate', 'initialCamerasEstimate', 'isDebugVersion', 'lago', 'linear_independent', 'load2D', 'load3D', 'markdown', 'mrsymbol', 'mrsymbolChr', 'mrsymbolIndex', 'mrsymbolLabel', 'noiseModel', 'parse2DFactors', 'parse3DFactors', 'readBal', 'readG2o', 'save2D', 'so3', 'symbol', 'symbolChr', 'symbolIndex', 'symbol_shorthand', 'triangulateNonlinear', 'triangulatePoint3', 'triangulateSafe', 'utilities', 'writeBAL', 'writeG2o']
|
|
15
15
|
class AHRSFactor(NonlinearFactor):
|
|
16
16
|
def __getstate__(self) -> tuple:
|
|
17
17
|
...
|
|
@@ -2003,1554 +2003,3177 @@ class ComponentDerivativeFactorFourierBasis(NoiseModelFactor):
|
|
|
2003
2003
|
@typing.overload
|
|
2004
2004
|
def __init__(self, key: int, z: float, model: noiseModel.Base, P: int, N: int, i: int, x: float, a: float, b: float) -> None:
|
|
2005
2005
|
...
|
|
2006
|
-
class
|
|
2007
|
-
|
|
2008
|
-
epsilon_rel: float
|
|
2009
|
-
maxIterations: int
|
|
2010
|
-
minIterations: int
|
|
2011
|
-
reset: int
|
|
2006
|
+
class ConcentratedGaussianDouble(ExtendedPriorFactorDouble):
|
|
2007
|
+
@typing.overload
|
|
2012
2008
|
def __init__(self) -> None:
|
|
2013
2009
|
...
|
|
2014
|
-
class ConstantTwistScenario(Scenario):
|
|
2015
2010
|
@typing.overload
|
|
2016
|
-
def __init__(self,
|
|
2011
|
+
def __init__(self, key: int, origin: float, noiseModel: noiseModel.Gaussian) -> None:
|
|
2017
2012
|
...
|
|
2018
2013
|
@typing.overload
|
|
2019
|
-
def __init__(self,
|
|
2014
|
+
def __init__(self, key: int, origin: float, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Gaussian) -> None:
|
|
2020
2015
|
...
|
|
2021
|
-
|
|
2022
|
-
def __init__(self,
|
|
2016
|
+
@typing.overload
|
|
2017
|
+
def __init__(self, key: int, origin: float, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2023
2018
|
...
|
|
2024
|
-
|
|
2019
|
+
@typing.overload
|
|
2020
|
+
def __init__(self, key: int, origin: float, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2021
|
+
...
|
|
2022
|
+
def __mul__(self, arg0: ConcentratedGaussianDouble) -> ConcentratedGaussianDouble:
|
|
2025
2023
|
...
|
|
2026
|
-
class CustomFactor(NoiseModelFactor):
|
|
2027
2024
|
@typing.overload
|
|
2028
|
-
def
|
|
2025
|
+
def evaluate(self, x: float) -> float:
|
|
2029
2026
|
...
|
|
2030
2027
|
@typing.overload
|
|
2031
|
-
def
|
|
2028
|
+
def evaluate(self, values: ...) -> float:
|
|
2032
2029
|
...
|
|
2033
|
-
|
|
2030
|
+
@typing.overload
|
|
2031
|
+
def logProbability(self, x: float) -> float:
|
|
2034
2032
|
...
|
|
2035
|
-
|
|
2036
|
-
|
|
2037
|
-
print
|
|
2038
|
-
"""
|
|
2039
|
-
class DSFMapIndexPair:
|
|
2040
|
-
def __init__(self) -> None:
|
|
2033
|
+
@typing.overload
|
|
2034
|
+
def logProbability(self, values: ...) -> float:
|
|
2041
2035
|
...
|
|
2042
|
-
def
|
|
2036
|
+
def negLogConstant(self) -> float:
|
|
2043
2037
|
...
|
|
2044
|
-
def
|
|
2038
|
+
def reset(self) -> ConcentratedGaussianDouble:
|
|
2045
2039
|
...
|
|
2046
|
-
def
|
|
2040
|
+
def retractMean(self) -> float:
|
|
2047
2041
|
...
|
|
2048
|
-
|
|
2049
|
-
def __call__(self, arg0: DiscreteValues) -> float:
|
|
2042
|
+
def transportTo(self, x_hat: float) -> ConcentratedGaussianDouble:
|
|
2050
2043
|
...
|
|
2044
|
+
class ConcentratedGaussianGal3(ExtendedPriorFactorGal3):
|
|
2051
2045
|
@typing.overload
|
|
2052
2046
|
def __init__(self) -> None:
|
|
2053
2047
|
...
|
|
2054
2048
|
@typing.overload
|
|
2055
|
-
def __init__(self, key:
|
|
2049
|
+
def __init__(self, key: int, origin: Gal3, noiseModel: noiseModel.Gaussian) -> None:
|
|
2056
2050
|
...
|
|
2057
2051
|
@typing.overload
|
|
2058
|
-
def __init__(self, key: tuple[
|
|
2052
|
+
def __init__(self, key: int, origin: Gal3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Gaussian) -> None:
|
|
2059
2053
|
...
|
|
2060
2054
|
@typing.overload
|
|
2061
|
-
def __init__(self,
|
|
2055
|
+
def __init__(self, key: int, origin: Gal3, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2062
2056
|
...
|
|
2063
2057
|
@typing.overload
|
|
2064
|
-
def __init__(self,
|
|
2058
|
+
def __init__(self, key: int, origin: Gal3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2059
|
+
...
|
|
2060
|
+
def __mul__(self, arg0: ConcentratedGaussianGal3) -> ConcentratedGaussianGal3:
|
|
2065
2061
|
...
|
|
2066
2062
|
@typing.overload
|
|
2067
|
-
def
|
|
2063
|
+
def evaluate(self, x: Gal3) -> float:
|
|
2068
2064
|
...
|
|
2069
2065
|
@typing.overload
|
|
2070
|
-
def
|
|
2066
|
+
def evaluate(self, values: ...) -> float:
|
|
2071
2067
|
...
|
|
2072
2068
|
@typing.overload
|
|
2073
|
-
def
|
|
2069
|
+
def logProbability(self, x: Gal3) -> float:
|
|
2074
2070
|
...
|
|
2075
|
-
|
|
2071
|
+
@typing.overload
|
|
2072
|
+
def logProbability(self, values: ...) -> float:
|
|
2076
2073
|
...
|
|
2077
|
-
def
|
|
2074
|
+
def negLogConstant(self) -> float:
|
|
2078
2075
|
...
|
|
2079
|
-
def
|
|
2076
|
+
def reset(self) -> ConcentratedGaussianGal3:
|
|
2077
|
+
...
|
|
2078
|
+
def retractMean(self) -> Gal3:
|
|
2080
2079
|
...
|
|
2080
|
+
def transportTo(self, x_hat: Gal3) -> ConcentratedGaussianGal3:
|
|
2081
|
+
...
|
|
2082
|
+
class ConcentratedGaussianPoint2(ExtendedPriorFactorPoint2):
|
|
2081
2083
|
@typing.overload
|
|
2082
|
-
def
|
|
2084
|
+
def __init__(self) -> None:
|
|
2083
2085
|
...
|
|
2084
2086
|
@typing.overload
|
|
2085
|
-
def
|
|
2086
|
-
|
|
2087
|
-
Render as html table.
|
|
2088
|
-
|
|
2089
|
-
Args:
|
|
2090
|
-
keyFormatter: GTSAM-style Key formatter.
|
|
2091
|
-
names: optional, category names corresponding to choices.
|
|
2092
|
-
|
|
2093
|
-
Returns: std::string a html string.
|
|
2094
|
-
"""
|
|
2087
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Gaussian) -> None:
|
|
2088
|
+
...
|
|
2095
2089
|
@typing.overload
|
|
2096
|
-
def
|
|
2090
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Gaussian) -> None:
|
|
2097
2091
|
...
|
|
2098
2092
|
@typing.overload
|
|
2099
|
-
def
|
|
2100
|
-
|
|
2101
|
-
Render as markdown table.
|
|
2102
|
-
|
|
2103
|
-
Args:
|
|
2104
|
-
keyFormatter: GTSAM-style Key formatter.
|
|
2105
|
-
names: optional, category names corresponding to choices.
|
|
2106
|
-
|
|
2107
|
-
Returns: std::string a markdown string.
|
|
2108
|
-
"""
|
|
2093
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2094
|
+
...
|
|
2109
2095
|
@typing.overload
|
|
2110
|
-
def
|
|
2096
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2097
|
+
...
|
|
2098
|
+
def __mul__(self, arg0: ConcentratedGaussianPoint2) -> ConcentratedGaussianPoint2:
|
|
2111
2099
|
...
|
|
2112
2100
|
@typing.overload
|
|
2113
|
-
def
|
|
2101
|
+
def evaluate(self, x: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]) -> float:
|
|
2114
2102
|
...
|
|
2115
|
-
def dot(self, keyFormatter: typing.Callable[[int], str] = ..., showZero: bool = True) -> str:
|
|
2116
|
-
"""
|
|
2117
|
-
output to graphviz format string
|
|
2118
|
-
"""
|
|
2119
|
-
def enumerate(self) -> list[tuple[DiscreteValues, float]]:
|
|
2120
|
-
"""
|
|
2121
|
-
Enumerate all values into a map from values to double.
|
|
2122
|
-
"""
|
|
2123
|
-
def equals(self, other: DecisionTreeFactor, tol: float = 1e-09) -> bool:
|
|
2124
|
-
"""
|
|
2125
|
-
equality
|
|
2126
|
-
"""
|
|
2127
2103
|
@typing.overload
|
|
2128
|
-
def
|
|
2129
|
-
|
|
2130
|
-
Create new factor by maximizing over all values with the same separator.
|
|
2131
|
-
"""
|
|
2104
|
+
def evaluate(self, values: ...) -> float:
|
|
2105
|
+
...
|
|
2132
2106
|
@typing.overload
|
|
2133
|
-
def
|
|
2134
|
-
"""
|
|
2135
|
-
Create new factor by maximizing over all values with the same separator.
|
|
2136
|
-
"""
|
|
2137
|
-
def print(self, s: str = 'DecisionTreeFactor\n', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
2107
|
+
def logProbability(self, x: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]) -> float:
|
|
2138
2108
|
...
|
|
2139
2109
|
@typing.overload
|
|
2140
|
-
def
|
|
2141
|
-
|
|
2142
|
-
|
|
2143
|
-
|
|
2110
|
+
def logProbability(self, values: ...) -> float:
|
|
2111
|
+
...
|
|
2112
|
+
def negLogConstant(self) -> float:
|
|
2113
|
+
...
|
|
2114
|
+
def reset(self) -> ConcentratedGaussianPoint2:
|
|
2115
|
+
...
|
|
2116
|
+
def retractMean(self) -> numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
2117
|
+
...
|
|
2118
|
+
def transportTo(self, x_hat: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]) -> ConcentratedGaussianPoint2:
|
|
2119
|
+
...
|
|
2120
|
+
class ConcentratedGaussianPoint3(ExtendedPriorFactorPoint3):
|
|
2144
2121
|
@typing.overload
|
|
2145
|
-
def
|
|
2146
|
-
"""
|
|
2147
|
-
Create new factor by summing all values with the same separator values.
|
|
2148
|
-
"""
|
|
2149
|
-
class DegeneracyMode:
|
|
2150
|
-
"""
|
|
2151
|
-
Members:
|
|
2152
|
-
|
|
2153
|
-
IGNORE_DEGENERACY
|
|
2154
|
-
|
|
2155
|
-
ZERO_ON_DEGENERACY
|
|
2156
|
-
|
|
2157
|
-
HANDLE_INFINITY
|
|
2158
|
-
"""
|
|
2159
|
-
HANDLE_INFINITY: typing.ClassVar[DegeneracyMode] # value = <DegeneracyMode.HANDLE_INFINITY: 2>
|
|
2160
|
-
IGNORE_DEGENERACY: typing.ClassVar[DegeneracyMode] # value = <DegeneracyMode.IGNORE_DEGENERACY: 0>
|
|
2161
|
-
ZERO_ON_DEGENERACY: typing.ClassVar[DegeneracyMode] # value = <DegeneracyMode.ZERO_ON_DEGENERACY: 1>
|
|
2162
|
-
__members__: typing.ClassVar[dict[str, DegeneracyMode]] # value = {'IGNORE_DEGENERACY': <DegeneracyMode.IGNORE_DEGENERACY: 0>, 'ZERO_ON_DEGENERACY': <DegeneracyMode.ZERO_ON_DEGENERACY: 1>, 'HANDLE_INFINITY': <DegeneracyMode.HANDLE_INFINITY: 2>}
|
|
2163
|
-
def __and__(self, other: typing.Any) -> typing.Any:
|
|
2122
|
+
def __init__(self) -> None:
|
|
2164
2123
|
...
|
|
2165
|
-
|
|
2124
|
+
@typing.overload
|
|
2125
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Gaussian) -> None:
|
|
2166
2126
|
...
|
|
2167
|
-
|
|
2127
|
+
@typing.overload
|
|
2128
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]], mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Gaussian) -> None:
|
|
2168
2129
|
...
|
|
2169
|
-
|
|
2130
|
+
@typing.overload
|
|
2131
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2170
2132
|
...
|
|
2171
|
-
|
|
2133
|
+
@typing.overload
|
|
2134
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]], mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2172
2135
|
...
|
|
2173
|
-
def
|
|
2136
|
+
def __mul__(self, arg0: ConcentratedGaussianPoint3) -> ConcentratedGaussianPoint3:
|
|
2174
2137
|
...
|
|
2175
|
-
|
|
2138
|
+
@typing.overload
|
|
2139
|
+
def evaluate(self, x: numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]]) -> float:
|
|
2176
2140
|
...
|
|
2177
|
-
|
|
2141
|
+
@typing.overload
|
|
2142
|
+
def evaluate(self, values: ...) -> float:
|
|
2178
2143
|
...
|
|
2179
|
-
|
|
2144
|
+
@typing.overload
|
|
2145
|
+
def logProbability(self, x: numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]]) -> float:
|
|
2180
2146
|
...
|
|
2181
|
-
|
|
2147
|
+
@typing.overload
|
|
2148
|
+
def logProbability(self, values: ...) -> float:
|
|
2182
2149
|
...
|
|
2183
|
-
def
|
|
2150
|
+
def negLogConstant(self) -> float:
|
|
2184
2151
|
...
|
|
2185
|
-
def
|
|
2152
|
+
def reset(self) -> ConcentratedGaussianPoint3:
|
|
2186
2153
|
...
|
|
2187
|
-
def
|
|
2154
|
+
def retractMean(self) -> numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
2188
2155
|
...
|
|
2189
|
-
def
|
|
2156
|
+
def transportTo(self, x_hat: numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]]) -> ConcentratedGaussianPoint3:
|
|
2190
2157
|
...
|
|
2191
|
-
|
|
2158
|
+
class ConcentratedGaussianPose2(ExtendedPriorFactorPose2):
|
|
2159
|
+
@typing.overload
|
|
2160
|
+
def __init__(self) -> None:
|
|
2192
2161
|
...
|
|
2193
|
-
|
|
2162
|
+
@typing.overload
|
|
2163
|
+
def __init__(self, key: int, origin: Pose2, noiseModel: noiseModel.Gaussian) -> None:
|
|
2194
2164
|
...
|
|
2195
|
-
|
|
2165
|
+
@typing.overload
|
|
2166
|
+
def __init__(self, key: int, origin: Pose2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Gaussian) -> None:
|
|
2196
2167
|
...
|
|
2197
|
-
|
|
2168
|
+
@typing.overload
|
|
2169
|
+
def __init__(self, key: int, origin: Pose2, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2198
2170
|
...
|
|
2199
|
-
|
|
2171
|
+
@typing.overload
|
|
2172
|
+
def __init__(self, key: int, origin: Pose2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2200
2173
|
...
|
|
2201
|
-
def
|
|
2174
|
+
def __mul__(self, arg0: ConcentratedGaussianPose2) -> ConcentratedGaussianPose2:
|
|
2202
2175
|
...
|
|
2203
|
-
|
|
2176
|
+
@typing.overload
|
|
2177
|
+
def evaluate(self, x: Pose2) -> float:
|
|
2204
2178
|
...
|
|
2205
|
-
@
|
|
2206
|
-
def
|
|
2179
|
+
@typing.overload
|
|
2180
|
+
def evaluate(self, values: ...) -> float:
|
|
2207
2181
|
...
|
|
2208
|
-
@
|
|
2209
|
-
def
|
|
2182
|
+
@typing.overload
|
|
2183
|
+
def logProbability(self, x: Pose2) -> float:
|
|
2210
2184
|
...
|
|
2211
|
-
|
|
2185
|
+
@typing.overload
|
|
2186
|
+
def logProbability(self, values: ...) -> float:
|
|
2187
|
+
...
|
|
2188
|
+
def negLogConstant(self) -> float:
|
|
2189
|
+
...
|
|
2190
|
+
def reset(self) -> ConcentratedGaussianPose2:
|
|
2191
|
+
...
|
|
2192
|
+
def retractMean(self) -> Pose2:
|
|
2193
|
+
...
|
|
2194
|
+
def transportTo(self, x_hat: Pose2) -> ConcentratedGaussianPose2:
|
|
2195
|
+
...
|
|
2196
|
+
class ConcentratedGaussianPose3(ExtendedPriorFactorPose3):
|
|
2212
2197
|
@typing.overload
|
|
2213
2198
|
def __init__(self) -> None:
|
|
2214
2199
|
...
|
|
2215
2200
|
@typing.overload
|
|
2216
|
-
def __init__(self, key: int,
|
|
2201
|
+
def __init__(self, key: int, origin: Pose3, noiseModel: noiseModel.Gaussian) -> None:
|
|
2217
2202
|
...
|
|
2218
2203
|
@typing.overload
|
|
2219
|
-
def __init__(self, key: int,
|
|
2204
|
+
def __init__(self, key: int, origin: Pose3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Gaussian) -> None:
|
|
2220
2205
|
...
|
|
2221
|
-
class DerivativeFactorChebyshev2(NoiseModelFactor):
|
|
2222
2206
|
@typing.overload
|
|
2223
|
-
def __init__(self) -> None:
|
|
2207
|
+
def __init__(self, key: int, origin: Pose3, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2224
2208
|
...
|
|
2225
2209
|
@typing.overload
|
|
2226
|
-
def __init__(self, key: int,
|
|
2210
|
+
def __init__(self, key: int, origin: Pose3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2211
|
+
...
|
|
2212
|
+
def __mul__(self, arg0: ConcentratedGaussianPose3) -> ConcentratedGaussianPose3:
|
|
2227
2213
|
...
|
|
2228
2214
|
@typing.overload
|
|
2229
|
-
def
|
|
2215
|
+
def evaluate(self, x: Pose3) -> float:
|
|
2230
2216
|
...
|
|
2231
|
-
class DerivativeFactorChebyshev2Basis(NoiseModelFactor):
|
|
2232
2217
|
@typing.overload
|
|
2233
|
-
def
|
|
2218
|
+
def evaluate(self, values: ...) -> float:
|
|
2234
2219
|
...
|
|
2235
2220
|
@typing.overload
|
|
2236
|
-
def
|
|
2221
|
+
def logProbability(self, x: Pose3) -> float:
|
|
2237
2222
|
...
|
|
2238
2223
|
@typing.overload
|
|
2239
|
-
def
|
|
2224
|
+
def logProbability(self, values: ...) -> float:
|
|
2240
2225
|
...
|
|
2241
|
-
|
|
2226
|
+
def negLogConstant(self) -> float:
|
|
2227
|
+
...
|
|
2228
|
+
def reset(self) -> ConcentratedGaussianPose3:
|
|
2229
|
+
...
|
|
2230
|
+
def retractMean(self) -> Pose3:
|
|
2231
|
+
...
|
|
2232
|
+
def transportTo(self, x_hat: Pose3) -> ConcentratedGaussianPose3:
|
|
2233
|
+
...
|
|
2234
|
+
class ConcentratedGaussianRot2(ExtendedPriorFactorRot2):
|
|
2242
2235
|
@typing.overload
|
|
2243
2236
|
def __init__(self) -> None:
|
|
2244
2237
|
...
|
|
2245
2238
|
@typing.overload
|
|
2246
|
-
def __init__(self, key: int,
|
|
2239
|
+
def __init__(self, key: int, origin: Rot2, noiseModel: noiseModel.Gaussian) -> None:
|
|
2247
2240
|
...
|
|
2248
2241
|
@typing.overload
|
|
2249
|
-
def __init__(self, key: int,
|
|
2242
|
+
def __init__(self, key: int, origin: Rot2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Gaussian) -> None:
|
|
2250
2243
|
...
|
|
2251
|
-
|
|
2252
|
-
def
|
|
2244
|
+
@typing.overload
|
|
2245
|
+
def __init__(self, key: int, origin: Rot2, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2253
2246
|
...
|
|
2254
|
-
|
|
2247
|
+
@typing.overload
|
|
2248
|
+
def __init__(self, key: int, origin: Rot2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2255
2249
|
...
|
|
2256
|
-
def
|
|
2250
|
+
def __mul__(self, arg0: ConcentratedGaussianRot2) -> ConcentratedGaussianRot2:
|
|
2257
2251
|
...
|
|
2258
2252
|
@typing.overload
|
|
2259
|
-
def
|
|
2253
|
+
def evaluate(self, x: Rot2) -> float:
|
|
2260
2254
|
...
|
|
2261
2255
|
@typing.overload
|
|
2262
|
-
def
|
|
2263
|
-
|
|
2264
|
-
Render as html tables.
|
|
2265
|
-
"""
|
|
2256
|
+
def evaluate(self, values: ...) -> float:
|
|
2257
|
+
...
|
|
2266
2258
|
@typing.overload
|
|
2267
|
-
def
|
|
2259
|
+
def logProbability(self, x: Rot2) -> float:
|
|
2268
2260
|
...
|
|
2269
2261
|
@typing.overload
|
|
2270
|
-
def
|
|
2271
|
-
|
|
2272
|
-
|
|
2273
|
-
|
|
2262
|
+
def logProbability(self, values: ...) -> float:
|
|
2263
|
+
...
|
|
2264
|
+
def negLogConstant(self) -> float:
|
|
2265
|
+
...
|
|
2266
|
+
def reset(self) -> ConcentratedGaussianRot2:
|
|
2267
|
+
...
|
|
2268
|
+
def retractMean(self) -> Rot2:
|
|
2269
|
+
...
|
|
2270
|
+
def transportTo(self, x_hat: Rot2) -> ConcentratedGaussianRot2:
|
|
2271
|
+
...
|
|
2272
|
+
class ConcentratedGaussianRot3(ExtendedPriorFactorRot3):
|
|
2274
2273
|
@typing.overload
|
|
2275
|
-
def
|
|
2274
|
+
def __init__(self) -> None:
|
|
2276
2275
|
...
|
|
2277
2276
|
@typing.overload
|
|
2278
|
-
def
|
|
2279
|
-
|
|
2280
|
-
Add aDiscreteDistributionusing a table or a string.
|
|
2281
|
-
"""
|
|
2277
|
+
def __init__(self, key: int, origin: Rot3, noiseModel: noiseModel.Gaussian) -> None:
|
|
2278
|
+
...
|
|
2282
2279
|
@typing.overload
|
|
2283
|
-
def
|
|
2280
|
+
def __init__(self, key: int, origin: Rot3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Gaussian) -> None:
|
|
2284
2281
|
...
|
|
2285
2282
|
@typing.overload
|
|
2286
|
-
def
|
|
2283
|
+
def __init__(self, key: int, origin: Rot3, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2287
2284
|
...
|
|
2288
|
-
|
|
2289
|
-
|
|
2290
|
-
Get a specific factor by index (this checks array bounds and may throw an exception, as opposed to operator[] which does not).
|
|
2291
|
-
"""
|
|
2292
|
-
def dot(self, keyFormatter: typing.Callable[[int], str] = ..., writer: DotWriter = ...) -> str:
|
|
2293
|
-
"""
|
|
2294
|
-
Output to graphviz format string.
|
|
2295
|
-
"""
|
|
2296
|
-
def empty(self) -> bool:
|
|
2297
|
-
"""
|
|
2298
|
-
Check if the graph is empty (null factors set byremove()will cause this to return false).
|
|
2299
|
-
"""
|
|
2300
|
-
def equals(self, bn: DiscreteBayesNet, tol: float = 1e-09) -> bool:
|
|
2301
|
-
"""
|
|
2302
|
-
Check equality.
|
|
2303
|
-
"""
|
|
2304
|
-
def evaluate(self, values: DiscreteValues) -> float:
|
|
2285
|
+
@typing.overload
|
|
2286
|
+
def __init__(self, key: int, origin: Rot3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2305
2287
|
...
|
|
2306
|
-
def
|
|
2307
|
-
"""
|
|
2308
|
-
Potentially slow function to return all keys involved, sorted, as a set.
|
|
2309
|
-
"""
|
|
2310
|
-
def logProbability(self, values: DiscreteValues) -> float:
|
|
2288
|
+
def __mul__(self, arg0: ConcentratedGaussianRot3) -> ConcentratedGaussianRot3:
|
|
2311
2289
|
...
|
|
2312
|
-
|
|
2290
|
+
@typing.overload
|
|
2291
|
+
def evaluate(self, x: Rot3) -> float:
|
|
2313
2292
|
...
|
|
2314
2293
|
@typing.overload
|
|
2315
|
-
def
|
|
2316
|
-
|
|
2317
|
-
do ancestral sampling
|
|
2318
|
-
|
|
2319
|
-
Assumes the Bayes net is reverse topologically sorted, i.e. last conditional will be sampled first. If the Bayes net resulted from eliminating a factor graph, this is true for the elimination ordering.
|
|
2320
|
-
|
|
2321
|
-
Returns: a sampled value for all variables.
|
|
2322
|
-
"""
|
|
2294
|
+
def evaluate(self, values: ...) -> float:
|
|
2295
|
+
...
|
|
2323
2296
|
@typing.overload
|
|
2324
|
-
def
|
|
2325
|
-
"""
|
|
2326
|
-
do ancestral sampling, given certain variables.
|
|
2327
|
-
|
|
2328
|
-
Assumes the Bayes net is reverse topologically sorted and that the Bayes net does not contain any conditionals for the given values.
|
|
2329
|
-
|
|
2330
|
-
Returns: given values extended with sampled value for all other variables.
|
|
2331
|
-
"""
|
|
2332
|
-
def saveGraph(self, s: str, keyFormatter: typing.Callable[[int], str] = ..., writer: DotWriter = ...) -> None:
|
|
2297
|
+
def logProbability(self, x: Rot3) -> float:
|
|
2333
2298
|
...
|
|
2334
|
-
|
|
2335
|
-
|
|
2336
|
-
return the number of factors (including any null factors set byremove()).
|
|
2337
|
-
"""
|
|
2338
|
-
class DiscreteBayesTree:
|
|
2339
|
-
def __call__(self, arg0: DiscreteValues) -> float:
|
|
2299
|
+
@typing.overload
|
|
2300
|
+
def logProbability(self, values: ...) -> float:
|
|
2340
2301
|
...
|
|
2341
|
-
def
|
|
2302
|
+
def negLogConstant(self) -> float:
|
|
2303
|
+
...
|
|
2304
|
+
def reset(self) -> ConcentratedGaussianRot3:
|
|
2305
|
+
...
|
|
2306
|
+
def retractMean(self) -> Rot3:
|
|
2342
2307
|
...
|
|
2308
|
+
def transportTo(self, x_hat: Rot3) -> ConcentratedGaussianRot3:
|
|
2309
|
+
...
|
|
2310
|
+
class ConcentratedGaussianSL4(ExtendedPriorFactorSL4):
|
|
2311
|
+
@typing.overload
|
|
2343
2312
|
def __init__(self) -> None:
|
|
2344
2313
|
...
|
|
2345
|
-
|
|
2314
|
+
@typing.overload
|
|
2315
|
+
def __init__(self, key: int, origin: SL4, noiseModel: noiseModel.Gaussian) -> None:
|
|
2346
2316
|
...
|
|
2347
2317
|
@typing.overload
|
|
2348
|
-
def
|
|
2318
|
+
def __init__(self, key: int, origin: SL4, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Gaussian) -> None:
|
|
2349
2319
|
...
|
|
2350
2320
|
@typing.overload
|
|
2351
|
-
def
|
|
2352
|
-
|
|
2353
|
-
Render as html tables.
|
|
2354
|
-
"""
|
|
2321
|
+
def __init__(self, key: int, origin: SL4, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2322
|
+
...
|
|
2355
2323
|
@typing.overload
|
|
2356
|
-
def
|
|
2324
|
+
def __init__(self, key: int, origin: SL4, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2325
|
+
...
|
|
2326
|
+
def __mul__(self, arg0: ConcentratedGaussianSL4) -> ConcentratedGaussianSL4:
|
|
2357
2327
|
...
|
|
2358
2328
|
@typing.overload
|
|
2359
|
-
def
|
|
2360
|
-
|
|
2361
|
-
Render as markdown tables.
|
|
2362
|
-
"""
|
|
2329
|
+
def evaluate(self, x: SL4) -> float:
|
|
2330
|
+
...
|
|
2363
2331
|
@typing.overload
|
|
2364
|
-
def
|
|
2365
|
-
|
|
2366
|
-
add a clique (top down)
|
|
2367
|
-
"""
|
|
2332
|
+
def evaluate(self, values: ...) -> float:
|
|
2333
|
+
...
|
|
2368
2334
|
@typing.overload
|
|
2369
|
-
def
|
|
2370
|
-
"""
|
|
2371
|
-
add a clique (top down)
|
|
2372
|
-
"""
|
|
2373
|
-
def clique(self, j: int) -> DiscreteBayesTreeClique:
|
|
2374
|
-
"""
|
|
2375
|
-
alternate syntax for matlab: find the clique that contains the variable withKeyj
|
|
2376
|
-
"""
|
|
2377
|
-
def dot(self, keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
2378
|
-
"""
|
|
2379
|
-
Output to graphviz format string.
|
|
2380
|
-
"""
|
|
2381
|
-
def empty(self) -> bool:
|
|
2382
|
-
"""
|
|
2383
|
-
Check if there are any cliques in the tree.
|
|
2384
|
-
"""
|
|
2385
|
-
def equals(self, other: DiscreteBayesTree, tol: float = 1e-09) -> bool:
|
|
2386
|
-
"""
|
|
2387
|
-
Check equality.
|
|
2388
|
-
"""
|
|
2389
|
-
def evaluate(self, values: DiscreteValues) -> float:
|
|
2335
|
+
def logProbability(self, x: SL4) -> float:
|
|
2390
2336
|
...
|
|
2391
|
-
|
|
2392
|
-
|
|
2393
|
-
Insert a new subtree with known parent clique.
|
|
2394
|
-
|
|
2395
|
-
This function does not check that the specified parent is the correct parent. This function updates all of the internal data structures associated with adding a subtree, such as populating the nodes index.
|
|
2396
|
-
"""
|
|
2397
|
-
def joint(self, j1: int, j2: int) -> ...:
|
|
2398
|
-
"""
|
|
2399
|
-
return joint on two variables Limitation: can only calculate joint if cliques are disjoint or one of them is root
|
|
2400
|
-
"""
|
|
2401
|
-
def jointBayesNet(self, j1: int, j2: int) -> DiscreteBayesNet:
|
|
2402
|
-
"""
|
|
2403
|
-
return joint on two variables as aBayesNetLimitation: can only calculate joint if cliques are disjoint or one of them is root
|
|
2404
|
-
"""
|
|
2405
|
-
def marginalFactor(self, key: int) -> DiscreteConditional:
|
|
2337
|
+
@typing.overload
|
|
2338
|
+
def logProbability(self, values: ...) -> float:
|
|
2406
2339
|
...
|
|
2407
|
-
def
|
|
2408
|
-
"""
|
|
2409
|
-
Collect number of cliques with cached separator marginals.
|
|
2410
|
-
"""
|
|
2411
|
-
def print(self, s: str = 'DiscreteBayesTree\n', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
2412
|
-
"""
|
|
2413
|
-
print
|
|
2414
|
-
"""
|
|
2415
|
-
def saveGraph(self, s: str, keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
2340
|
+
def negLogConstant(self) -> float:
|
|
2416
2341
|
...
|
|
2417
|
-
def
|
|
2418
|
-
"""
|
|
2419
|
-
number of cliques
|
|
2420
|
-
"""
|
|
2421
|
-
class DiscreteBayesTreeClique:
|
|
2422
|
-
def __call__(self, arg0: DiscreteValues) -> float:
|
|
2342
|
+
def reset(self) -> ConcentratedGaussianSL4:
|
|
2423
2343
|
...
|
|
2424
|
-
def
|
|
2344
|
+
def retractMean(self) -> SL4:
|
|
2425
2345
|
...
|
|
2346
|
+
def transportTo(self, x_hat: SL4) -> ConcentratedGaussianSL4:
|
|
2347
|
+
...
|
|
2348
|
+
class ConcentratedGaussianSO3(ExtendedPriorFactorSO3):
|
|
2426
2349
|
@typing.overload
|
|
2427
2350
|
def __init__(self) -> None:
|
|
2428
2351
|
...
|
|
2429
2352
|
@typing.overload
|
|
2430
|
-
def __init__(self,
|
|
2431
|
-
...
|
|
2432
|
-
def __repr__(self, s: str = 'DiscreteBayesTreeClique', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
2353
|
+
def __init__(self, key: int, origin: SO3, noiseModel: noiseModel.Gaussian) -> None:
|
|
2433
2354
|
...
|
|
2434
|
-
|
|
2435
|
-
|
|
2436
|
-
Access the conditional.
|
|
2437
|
-
"""
|
|
2438
|
-
def evaluate(self, values: DiscreteValues) -> float:
|
|
2355
|
+
@typing.overload
|
|
2356
|
+
def __init__(self, key: int, origin: SO3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Gaussian) -> None:
|
|
2439
2357
|
...
|
|
2440
|
-
|
|
2441
|
-
|
|
2442
|
-
Return true if this clique is the root of a Bayes tree.
|
|
2443
|
-
"""
|
|
2444
|
-
def nrChildren(self) -> int:
|
|
2445
|
-
"""
|
|
2446
|
-
Return the number of children.
|
|
2447
|
-
"""
|
|
2448
|
-
def print(self, s: str = 'DiscreteBayesTreeClique', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
2449
|
-
"""
|
|
2450
|
-
print this node
|
|
2451
|
-
"""
|
|
2452
|
-
def printSignature(self, s: str = 'Clique: ', formatter: typing.Callable[[int], str] = ...) -> None:
|
|
2453
|
-
"""
|
|
2454
|
-
print index signature only
|
|
2455
|
-
"""
|
|
2456
|
-
class DiscreteCluster:
|
|
2457
|
-
factors: DiscreteFactorGraph
|
|
2458
|
-
orderedFrontalKeys: Ordering
|
|
2459
|
-
def __getitem__(self, arg0: int) -> DiscreteCluster:
|
|
2358
|
+
@typing.overload
|
|
2359
|
+
def __init__(self, key: int, origin: SO3, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2460
2360
|
...
|
|
2461
|
-
|
|
2361
|
+
@typing.overload
|
|
2362
|
+
def __init__(self, key: int, origin: SO3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2462
2363
|
...
|
|
2463
|
-
def
|
|
2364
|
+
def __mul__(self, arg0: ConcentratedGaussianSO3) -> ConcentratedGaussianSO3:
|
|
2464
2365
|
...
|
|
2465
|
-
|
|
2366
|
+
@typing.overload
|
|
2367
|
+
def evaluate(self, x: SO3) -> float:
|
|
2466
2368
|
...
|
|
2467
|
-
class DiscreteConditional(DecisionTreeFactor):
|
|
2468
2369
|
@typing.overload
|
|
2469
|
-
def
|
|
2370
|
+
def evaluate(self, values: ...) -> float:
|
|
2470
2371
|
...
|
|
2471
2372
|
@typing.overload
|
|
2472
|
-
def
|
|
2373
|
+
def logProbability(self, x: SO3) -> float:
|
|
2473
2374
|
...
|
|
2474
2375
|
@typing.overload
|
|
2475
|
-
def
|
|
2376
|
+
def logProbability(self, values: ...) -> float:
|
|
2476
2377
|
...
|
|
2378
|
+
def negLogConstant(self) -> float:
|
|
2379
|
+
...
|
|
2380
|
+
def reset(self) -> ConcentratedGaussianSO3:
|
|
2381
|
+
...
|
|
2382
|
+
def retractMean(self) -> SO3:
|
|
2383
|
+
...
|
|
2384
|
+
def transportTo(self, x_hat: SO3) -> ConcentratedGaussianSO3:
|
|
2385
|
+
...
|
|
2386
|
+
class ConcentratedGaussianSO4(ExtendedPriorFactorSO4):
|
|
2477
2387
|
@typing.overload
|
|
2478
|
-
def __init__(self
|
|
2388
|
+
def __init__(self) -> None:
|
|
2479
2389
|
...
|
|
2480
2390
|
@typing.overload
|
|
2481
|
-
def __init__(self, key:
|
|
2391
|
+
def __init__(self, key: int, origin: SO4, noiseModel: noiseModel.Gaussian) -> None:
|
|
2482
2392
|
...
|
|
2483
2393
|
@typing.overload
|
|
2484
|
-
def __init__(self,
|
|
2394
|
+
def __init__(self, key: int, origin: SO4, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Gaussian) -> None:
|
|
2485
2395
|
...
|
|
2486
2396
|
@typing.overload
|
|
2487
|
-
def __init__(self,
|
|
2397
|
+
def __init__(self, key: int, origin: SO4, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2488
2398
|
...
|
|
2489
2399
|
@typing.overload
|
|
2490
|
-
def __init__(self, key: tuple[
|
|
2400
|
+
def __init__(self, key: int, origin: SO4, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2491
2401
|
...
|
|
2492
|
-
def __mul__(self, arg0:
|
|
2402
|
+
def __mul__(self, arg0: ConcentratedGaussianSO4) -> ConcentratedGaussianSO4:
|
|
2493
2403
|
...
|
|
2494
|
-
|
|
2404
|
+
@typing.overload
|
|
2405
|
+
def evaluate(self, x: SO4) -> float:
|
|
2495
2406
|
...
|
|
2496
2407
|
@typing.overload
|
|
2497
|
-
def
|
|
2408
|
+
def evaluate(self, values: ...) -> float:
|
|
2498
2409
|
...
|
|
2499
2410
|
@typing.overload
|
|
2500
|
-
def
|
|
2501
|
-
|
|
2502
|
-
Render as html table.
|
|
2503
|
-
"""
|
|
2411
|
+
def logProbability(self, x: SO4) -> float:
|
|
2412
|
+
...
|
|
2504
2413
|
@typing.overload
|
|
2505
|
-
def
|
|
2414
|
+
def logProbability(self, values: ...) -> float:
|
|
2415
|
+
...
|
|
2416
|
+
def negLogConstant(self) -> float:
|
|
2417
|
+
...
|
|
2418
|
+
def reset(self) -> ConcentratedGaussianSO4:
|
|
2506
2419
|
...
|
|
2420
|
+
def retractMean(self) -> SO4:
|
|
2421
|
+
...
|
|
2422
|
+
def transportTo(self, x_hat: SO4) -> ConcentratedGaussianSO4:
|
|
2423
|
+
...
|
|
2424
|
+
class ConcentratedGaussianSOn(ExtendedPriorFactorSOn):
|
|
2507
2425
|
@typing.overload
|
|
2508
|
-
def
|
|
2509
|
-
|
|
2510
|
-
Render as markdown table.
|
|
2511
|
-
"""
|
|
2512
|
-
def argmax(self, parentsValues: DiscreteValues) -> int:
|
|
2513
|
-
"""
|
|
2514
|
-
Return assignment for single frontal variable that maximizes value.
|
|
2515
|
-
|
|
2516
|
-
Args:
|
|
2517
|
-
parentsValues: Known assignments for the parents.
|
|
2518
|
-
|
|
2519
|
-
Returns: maximizing assignment for the frontal variable.
|
|
2520
|
-
"""
|
|
2521
|
-
def choose(self, given: DiscreteValues) -> DiscreteConditional:
|
|
2522
|
-
"""
|
|
2523
|
-
<DiscreteValuesversion
|
|
2524
|
-
|
|
2525
|
-
restrict to given parent values. Note: does not need be complete set. Examples: P(C|D,E) + . -> P(C|D,E) P(C|D,E) + E -> P(C|D) P(C|D,E) + D -> P(C|E) P(C|D,E) + D,E -> P(C) P(C|D,E) + C -> error!
|
|
2526
|
-
|
|
2527
|
-
Returns: a shared_ptr to a new DiscreteConditional
|
|
2528
|
-
"""
|
|
2529
|
-
def equals(self, other: DiscreteConditional, tol: float = 1e-09) -> bool:
|
|
2530
|
-
"""
|
|
2531
|
-
GTSAM-style equals.
|
|
2532
|
-
"""
|
|
2426
|
+
def __init__(self) -> None:
|
|
2427
|
+
...
|
|
2533
2428
|
@typing.overload
|
|
2534
|
-
def
|
|
2535
|
-
|
|
2536
|
-
Calculate error forDiscreteValuesx, is -log(probability).
|
|
2537
|
-
"""
|
|
2429
|
+
def __init__(self, key: int, origin: SOn, noiseModel: noiseModel.Gaussian) -> None:
|
|
2430
|
+
...
|
|
2538
2431
|
@typing.overload
|
|
2539
|
-
def
|
|
2432
|
+
def __init__(self, key: int, origin: SOn, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Gaussian) -> None:
|
|
2540
2433
|
...
|
|
2541
2434
|
@typing.overload
|
|
2542
|
-
def
|
|
2543
|
-
|
|
2544
|
-
<DiscreteValuesversion
|
|
2545
|
-
"""
|
|
2435
|
+
def __init__(self, key: int, origin: SOn, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2436
|
+
...
|
|
2546
2437
|
@typing.overload
|
|
2547
|
-
def
|
|
2548
|
-
|
|
2549
|
-
|
|
2550
|
-
|
|
2551
|
-
Dispatches to DiscreteValues version.
|
|
2552
|
-
"""
|
|
2553
|
-
def firstFrontalKey(self) -> int:
|
|
2554
|
-
"""
|
|
2555
|
-
Convenience function to get the first frontal key.
|
|
2556
|
-
"""
|
|
2438
|
+
def __init__(self, key: int, origin: SOn, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2439
|
+
...
|
|
2440
|
+
def __mul__(self, arg0: ConcentratedGaussianSOn) -> ConcentratedGaussianSOn:
|
|
2441
|
+
...
|
|
2557
2442
|
@typing.overload
|
|
2558
|
-
def
|
|
2559
|
-
|
|
2560
|
-
Convert to a likelihood factor by providing value before bar.
|
|
2561
|
-
"""
|
|
2443
|
+
def evaluate(self, x: SOn) -> float:
|
|
2444
|
+
...
|
|
2562
2445
|
@typing.overload
|
|
2563
|
-
def
|
|
2446
|
+
def evaluate(self, values: ...) -> float:
|
|
2564
2447
|
...
|
|
2565
2448
|
@typing.overload
|
|
2566
|
-
def logProbability(self,
|
|
2449
|
+
def logProbability(self, x: SOn) -> float:
|
|
2567
2450
|
...
|
|
2568
2451
|
@typing.overload
|
|
2569
|
-
def logProbability(self,
|
|
2570
|
-
|
|
2571
|
-
Log-probability is just -error(x).
|
|
2572
|
-
"""
|
|
2573
|
-
def marginal(self, key: int) -> DiscreteConditional:
|
|
2574
|
-
"""
|
|
2575
|
-
Calculate marginal on given key, no parent case.
|
|
2576
|
-
"""
|
|
2452
|
+
def logProbability(self, values: ...) -> float:
|
|
2453
|
+
...
|
|
2577
2454
|
def negLogConstant(self) -> float:
|
|
2578
|
-
"""
|
|
2579
|
-
negLogConstant is just zero, such that -logProbability(x) = -log(evaluate(x)) = error(x) and hence error(x) > 0 for all x.
|
|
2580
|
-
|
|
2581
|
-
Thus -log(K) for the normalization constant k is 0.
|
|
2582
|
-
"""
|
|
2583
|
-
def nrFrontals(self) -> int:
|
|
2584
|
-
"""
|
|
2585
|
-
return the number of frontals
|
|
2586
|
-
"""
|
|
2587
|
-
def nrParents(self) -> int:
|
|
2588
|
-
"""
|
|
2589
|
-
return the number of parents
|
|
2590
|
-
"""
|
|
2591
|
-
def print(self, s: str = 'Discrete Conditional\n', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
2592
2455
|
...
|
|
2593
|
-
def
|
|
2594
|
-
"""
|
|
2595
|
-
print index signature only
|
|
2596
|
-
"""
|
|
2597
|
-
@typing.overload
|
|
2598
|
-
def sample(self, parentsValues: DiscreteValues, rng: MT19937 = None) -> int:
|
|
2599
|
-
"""
|
|
2600
|
-
Samplefrom conditional, given missing variables Example: std::mt19937_64 rng(42);DiscreteValuesgiven = ...; size_t sample = dc.sample(given, &rng);.
|
|
2601
|
-
|
|
2602
|
-
Args:
|
|
2603
|
-
parentsValues: Known values of the parents
|
|
2604
|
-
rng: Pseudo-Random Number Generator.
|
|
2605
|
-
|
|
2606
|
-
Returns: sample from conditional
|
|
2607
|
-
"""
|
|
2608
|
-
@typing.overload
|
|
2609
|
-
def sample(self, value: int, rng: MT19937 = None) -> int:
|
|
2456
|
+
def reset(self) -> ConcentratedGaussianSOn:
|
|
2610
2457
|
...
|
|
2611
|
-
|
|
2612
|
-
def sample(self, rng: MT19937 = None) -> int:
|
|
2613
|
-
"""
|
|
2614
|
-
Samplefrom conditional, zero parent version Example: std::mt19937_64 rng(42); auto sample = dc.sample(&rng);.
|
|
2615
|
-
"""
|
|
2616
|
-
def sampleInPlace(self, parentsValues: DiscreteValues, rng: MT19937 = None) -> None:
|
|
2617
|
-
"""
|
|
2618
|
-
Samplein place with optional PRNG, stores result in partial solution.
|
|
2619
|
-
"""
|
|
2620
|
-
class DiscreteDistribution(DiscreteConditional):
|
|
2621
|
-
def __call__(self, arg0: int) -> float:
|
|
2458
|
+
def retractMean(self) -> SOn:
|
|
2622
2459
|
...
|
|
2460
|
+
def transportTo(self, x_hat: SOn) -> ConcentratedGaussianSOn:
|
|
2461
|
+
...
|
|
2462
|
+
class ConcentratedGaussianSimilarity2(ExtendedPriorFactorSimilarity2):
|
|
2623
2463
|
@typing.overload
|
|
2624
2464
|
def __init__(self) -> None:
|
|
2625
2465
|
...
|
|
2626
2466
|
@typing.overload
|
|
2627
|
-
def __init__(self,
|
|
2467
|
+
def __init__(self, key: int, origin: Similarity2, noiseModel: noiseModel.Gaussian) -> None:
|
|
2628
2468
|
...
|
|
2629
2469
|
@typing.overload
|
|
2630
|
-
def __init__(self, key: tuple[
|
|
2470
|
+
def __init__(self, key: int, origin: Similarity2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Gaussian) -> None:
|
|
2631
2471
|
...
|
|
2632
2472
|
@typing.overload
|
|
2633
|
-
def __init__(self, key:
|
|
2473
|
+
def __init__(self, key: int, origin: Similarity2, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2634
2474
|
...
|
|
2635
|
-
|
|
2475
|
+
@typing.overload
|
|
2476
|
+
def __init__(self, key: int, origin: Similarity2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2636
2477
|
...
|
|
2637
|
-
def
|
|
2638
|
-
"""
|
|
2639
|
-
We also want to keep the Base version, takingDiscreteValues:
|
|
2640
|
-
|
|
2641
|
-
Return entire probability mass function.
|
|
2642
|
-
"""
|
|
2643
|
-
def print(self, s: str = 'Discrete Prior\n', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
2478
|
+
def __mul__(self, arg0: ConcentratedGaussianSimilarity2) -> ConcentratedGaussianSimilarity2:
|
|
2644
2479
|
...
|
|
2645
|
-
class DiscreteEliminationTree:
|
|
2646
2480
|
@typing.overload
|
|
2647
|
-
def
|
|
2481
|
+
def evaluate(self, x: Similarity2) -> float:
|
|
2648
2482
|
...
|
|
2649
2483
|
@typing.overload
|
|
2650
|
-
def
|
|
2484
|
+
def evaluate(self, values: ...) -> float:
|
|
2651
2485
|
...
|
|
2652
|
-
|
|
2486
|
+
@typing.overload
|
|
2487
|
+
def logProbability(self, x: Similarity2) -> float:
|
|
2653
2488
|
...
|
|
2654
|
-
|
|
2655
|
-
|
|
2656
|
-
Test whether the tree is equal to another.
|
|
2657
|
-
"""
|
|
2658
|
-
def print(self, name: str = 'EliminationTree: ', formatter: typing.Callable[[int], str] = ...) -> None:
|
|
2659
|
-
"""
|
|
2660
|
-
Print the tree to cout.
|
|
2661
|
-
"""
|
|
2662
|
-
class DiscreteFactor(Factor):
|
|
2663
|
-
def __call__(self, arg0: DiscreteValues) -> float:
|
|
2489
|
+
@typing.overload
|
|
2490
|
+
def logProbability(self, values: ...) -> float:
|
|
2664
2491
|
...
|
|
2665
|
-
def
|
|
2492
|
+
def negLogConstant(self) -> float:
|
|
2666
2493
|
...
|
|
2667
|
-
def
|
|
2668
|
-
"""
|
|
2669
|
-
equals
|
|
2670
|
-
"""
|
|
2671
|
-
def errorTree(self) -> ...:
|
|
2672
|
-
"""
|
|
2673
|
-
Compute error for each assignment and return as a tree.
|
|
2674
|
-
"""
|
|
2675
|
-
def print(self, s: str = 'DiscreteFactor\n', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
2494
|
+
def reset(self) -> ConcentratedGaussianSimilarity2:
|
|
2676
2495
|
...
|
|
2677
|
-
|
|
2678
|
-
def __call__(self, arg0: DiscreteValues) -> float:
|
|
2496
|
+
def retractMean(self) -> Similarity2:
|
|
2679
2497
|
...
|
|
2498
|
+
def transportTo(self, x_hat: Similarity2) -> ConcentratedGaussianSimilarity2:
|
|
2499
|
+
...
|
|
2500
|
+
class ConcentratedGaussianSimilarity3(ExtendedPriorFactorSimilarity3):
|
|
2680
2501
|
@typing.overload
|
|
2681
2502
|
def __init__(self) -> None:
|
|
2682
2503
|
...
|
|
2683
2504
|
@typing.overload
|
|
2684
|
-
def __init__(self,
|
|
2685
|
-
...
|
|
2686
|
-
def __repr__(self, s: str = '') -> str:
|
|
2505
|
+
def __init__(self, key: int, origin: Similarity3, noiseModel: noiseModel.Gaussian) -> None:
|
|
2687
2506
|
...
|
|
2688
2507
|
@typing.overload
|
|
2689
|
-
def
|
|
2508
|
+
def __init__(self, key: int, origin: Similarity3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Gaussian) -> None:
|
|
2690
2509
|
...
|
|
2691
2510
|
@typing.overload
|
|
2692
|
-
def
|
|
2693
|
-
"""
|
|
2694
|
-
Render as html tables.
|
|
2695
|
-
|
|
2696
|
-
Args:
|
|
2697
|
-
keyFormatter: GTSAM-style Key formatter.
|
|
2698
|
-
names: optional, a map from Key to category names.
|
|
2699
|
-
|
|
2700
|
-
Returns: std::string a (potentially long) html string.
|
|
2701
|
-
"""
|
|
2702
|
-
@typing.overload
|
|
2703
|
-
def _repr_markdown_(self, keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
2511
|
+
def __init__(self, key: int, origin: Similarity3, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2704
2512
|
...
|
|
2705
2513
|
@typing.overload
|
|
2706
|
-
def
|
|
2707
|
-
"""
|
|
2708
|
-
Render as markdown tables.
|
|
2709
|
-
|
|
2710
|
-
Args:
|
|
2711
|
-
keyFormatter: GTSAM-style Key formatter.
|
|
2712
|
-
names: optional, a map from Key to category names.
|
|
2713
|
-
|
|
2714
|
-
Returns: std::string a (potentially long) markdown string.
|
|
2715
|
-
"""
|
|
2716
|
-
@typing.overload
|
|
2717
|
-
def add(self, j: tuple[int, int], spec: str) -> None:
|
|
2514
|
+
def __init__(self, key: int, origin: Similarity3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2718
2515
|
...
|
|
2719
|
-
|
|
2720
|
-
def add(self, j: tuple[int, int], spec: list[float]) -> None:
|
|
2516
|
+
def __mul__(self, arg0: ConcentratedGaussianSimilarity3) -> ConcentratedGaussianSimilarity3:
|
|
2721
2517
|
...
|
|
2722
2518
|
@typing.overload
|
|
2723
|
-
def
|
|
2519
|
+
def evaluate(self, x: Similarity3) -> float:
|
|
2724
2520
|
...
|
|
2725
2521
|
@typing.overload
|
|
2726
|
-
def
|
|
2522
|
+
def evaluate(self, values: ...) -> float:
|
|
2727
2523
|
...
|
|
2728
2524
|
@typing.overload
|
|
2729
|
-
def
|
|
2525
|
+
def logProbability(self, x: Similarity3) -> float:
|
|
2730
2526
|
...
|
|
2731
|
-
def at(self, i: int) -> DiscreteFactor:
|
|
2732
|
-
"""
|
|
2733
|
-
Get a specific factor by index and typecast to factor type F (this checks array bounds and may throw an exception, as opposed to operator[] which does not).
|
|
2734
|
-
"""
|
|
2735
|
-
def dot(self, keyFormatter: typing.Callable[[int], str] = ..., writer: DotWriter = ...) -> str:
|
|
2736
|
-
"""
|
|
2737
|
-
Output to graphviz format string.
|
|
2738
|
-
"""
|
|
2739
2527
|
@typing.overload
|
|
2740
|
-
def
|
|
2528
|
+
def logProbability(self, values: ...) -> float:
|
|
2741
2529
|
...
|
|
2742
|
-
|
|
2743
|
-
def eliminateMultifrontal(self, type: Ordering.OrderingType, function: typing.Callable[[DiscreteFactorGraph, Ordering], tuple[DiscreteConditional, DiscreteFactor]]) -> DiscreteBayesTree:
|
|
2530
|
+
def negLogConstant(self) -> float:
|
|
2744
2531
|
...
|
|
2532
|
+
def reset(self) -> ConcentratedGaussianSimilarity3:
|
|
2533
|
+
...
|
|
2534
|
+
def retractMean(self) -> Similarity3:
|
|
2535
|
+
...
|
|
2536
|
+
def transportTo(self, x_hat: Similarity3) -> ConcentratedGaussianSimilarity3:
|
|
2537
|
+
...
|
|
2538
|
+
class ConcentratedGaussianStereoPoint2(ExtendedPriorFactorStereoPoint2):
|
|
2745
2539
|
@typing.overload
|
|
2746
|
-
def
|
|
2747
|
-
"""
|
|
2748
|
-
Do multifrontal elimination of all variables to produce a Bayes tree.
|
|
2749
|
-
|
|
2750
|
-
If an ordering is not provided, the ordering will be computed using either COLAMD or METIS, depending on the parameter orderingType (Ordering::COLAMD or Ordering::METIS) Example - Full QR elimination in specified order: std::shared_ptr<GaussianBayesTree>result=graph.eliminateMultifrontal(EliminateQR,myOrdering);
|
|
2751
|
-
"""
|
|
2752
|
-
@typing.overload
|
|
2753
|
-
def eliminateMultifrontal(self, ordering: Ordering, function: typing.Callable[[DiscreteFactorGraph, Ordering], tuple[DiscreteConditional, DiscreteFactor]]) -> DiscreteBayesTree:
|
|
2540
|
+
def __init__(self) -> None:
|
|
2754
2541
|
...
|
|
2755
2542
|
@typing.overload
|
|
2756
|
-
def
|
|
2757
|
-
|
|
2758
|
-
Do multifrontal elimination of some variables, inorderingprovided, to produce a Bayes tree and a remaining factor graph.
|
|
2759
|
-
|
|
2760
|
-
This computes the factorization $ p(X) = p(A|B) p(B)
|
|
2761
|
-
$, where $ A = $variables, $ X $ is all the variables in the factor graph, and $ B = X\\backslash A $.
|
|
2762
|
-
"""
|
|
2543
|
+
def __init__(self, key: int, origin: StereoPoint2, noiseModel: noiseModel.Gaussian) -> None:
|
|
2544
|
+
...
|
|
2763
2545
|
@typing.overload
|
|
2764
|
-
def
|
|
2546
|
+
def __init__(self, key: int, origin: StereoPoint2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Gaussian) -> None:
|
|
2765
2547
|
...
|
|
2766
2548
|
@typing.overload
|
|
2767
|
-
def
|
|
2768
|
-
|
|
2769
|
-
Do sequential elimination of some variables, inorderingprovided, to produce a Bayes net and a remaining factor graph.
|
|
2770
|
-
|
|
2771
|
-
This computes the factorization $ p(X) = p(A|B) p(B) $, where $ A = $variables, $ X $ is all the variables in the factor graph, and $B = X\\backslash A $.
|
|
2772
|
-
"""
|
|
2549
|
+
def __init__(self, key: int, origin: StereoPoint2, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2550
|
+
...
|
|
2773
2551
|
@typing.overload
|
|
2774
|
-
def
|
|
2552
|
+
def __init__(self, key: int, origin: StereoPoint2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2553
|
+
...
|
|
2554
|
+
def __mul__(self, arg0: ConcentratedGaussianStereoPoint2) -> ConcentratedGaussianStereoPoint2:
|
|
2775
2555
|
...
|
|
2776
2556
|
@typing.overload
|
|
2777
|
-
def
|
|
2557
|
+
def evaluate(self, x: StereoPoint2) -> float:
|
|
2778
2558
|
...
|
|
2779
2559
|
@typing.overload
|
|
2780
|
-
def
|
|
2560
|
+
def evaluate(self, values: ...) -> float:
|
|
2781
2561
|
...
|
|
2782
2562
|
@typing.overload
|
|
2783
|
-
def
|
|
2784
|
-
|
|
2785
|
-
Do sequential elimination of all variables to produce a Bayes net.
|
|
2786
|
-
|
|
2787
|
-
Example - Full QR elimination in specified order: std::shared_ptr<GaussianBayesNet>result=graph.eliminateSequential(myOrdering,EliminateQR); Example - Reusing an existing VariableIndex to improve performance: VariableIndexvarIndex(graph);//BuildvariableindexDatadata=otherFunctionUsingVariableIndex(graph,varIndex);//Othercodethatusesvariableindexstd::shared_ptr<GaussianBayesNet>result=graph.eliminateSequential(myOrdering,EliminateQR,varIndex,std::nullopt);
|
|
2788
|
-
"""
|
|
2563
|
+
def logProbability(self, x: StereoPoint2) -> float:
|
|
2564
|
+
...
|
|
2789
2565
|
@typing.overload
|
|
2790
|
-
def
|
|
2566
|
+
def logProbability(self, values: ...) -> float:
|
|
2791
2567
|
...
|
|
2792
|
-
def
|
|
2793
|
-
"""
|
|
2794
|
-
Check if the graph is empty (null factors set byremove()will cause this to return false).
|
|
2795
|
-
"""
|
|
2796
|
-
def equals(self, fg: DiscreteFactorGraph, tol: float = 1e-09) -> bool:
|
|
2568
|
+
def negLogConstant(self) -> float:
|
|
2797
2569
|
...
|
|
2798
|
-
def
|
|
2799
|
-
|
|
2800
|
-
|
|
2801
|
-
|
|
2570
|
+
def reset(self) -> ConcentratedGaussianStereoPoint2:
|
|
2571
|
+
...
|
|
2572
|
+
def retractMean(self) -> StereoPoint2:
|
|
2573
|
+
...
|
|
2574
|
+
def transportTo(self, x_hat: StereoPoint2) -> ConcentratedGaussianStereoPoint2:
|
|
2575
|
+
...
|
|
2576
|
+
class ConcentratedGaussianVector(ExtendedPriorFactorVector):
|
|
2802
2577
|
@typing.overload
|
|
2803
|
-
def
|
|
2578
|
+
def __init__(self) -> None:
|
|
2804
2579
|
...
|
|
2805
2580
|
@typing.overload
|
|
2806
|
-
def
|
|
2807
|
-
"""
|
|
2808
|
-
Implement the max-product algorithm.
|
|
2809
|
-
|
|
2810
|
-
Args:
|
|
2811
|
-
ordering:
|
|
2812
|
-
|
|
2813
|
-
Returns: DiscreteLookupDAG `DAG with lookup tables
|
|
2814
|
-
"""
|
|
2815
|
-
def optimize(self) -> DiscreteValues:
|
|
2816
|
-
"""
|
|
2817
|
-
Find the maximum probable explanation (MPE) by doing max-product.
|
|
2818
|
-
|
|
2819
|
-
Returns: DiscreteValues : MPE
|
|
2820
|
-
"""
|
|
2821
|
-
def print(self, s: str = '') -> None:
|
|
2581
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Gaussian) -> None:
|
|
2822
2582
|
...
|
|
2823
|
-
def product(self) -> DiscreteFactor:
|
|
2824
|
-
"""
|
|
2825
|
-
return product of all factors as a single factor
|
|
2826
|
-
"""
|
|
2827
2583
|
@typing.overload
|
|
2828
|
-
def
|
|
2829
|
-
|
|
2830
|
-
Add a factor directly using a shared_ptr.
|
|
2831
|
-
"""
|
|
2584
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Gaussian) -> None:
|
|
2585
|
+
...
|
|
2832
2586
|
@typing.overload
|
|
2833
|
-
def
|
|
2587
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2834
2588
|
...
|
|
2835
2589
|
@typing.overload
|
|
2836
|
-
def
|
|
2590
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
2591
|
+
...
|
|
2592
|
+
def __mul__(self, arg0: ConcentratedGaussianVector) -> ConcentratedGaussianVector:
|
|
2837
2593
|
...
|
|
2838
2594
|
@typing.overload
|
|
2839
|
-
def
|
|
2595
|
+
def evaluate(self, x: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]) -> float:
|
|
2840
2596
|
...
|
|
2841
2597
|
@typing.overload
|
|
2842
|
-
def
|
|
2843
|
-
"""
|
|
2844
|
-
Push back aBayesTreeas a collection of factors.
|
|
2845
|
-
|
|
2846
|
-
NOTE: This should be hidden in derived classes in favor of a type-specialized version that calls this templated function.
|
|
2847
|
-
"""
|
|
2848
|
-
def saveGraph(self, s: str, keyFormatter: typing.Callable[[int], str] = ..., writer: DotWriter = ...) -> None:
|
|
2598
|
+
def evaluate(self, values: ...) -> float:
|
|
2849
2599
|
...
|
|
2850
|
-
def size(self) -> int:
|
|
2851
|
-
"""
|
|
2852
|
-
return the number of factors (including any null factors set byremove()).
|
|
2853
|
-
"""
|
|
2854
2600
|
@typing.overload
|
|
2855
|
-
def
|
|
2601
|
+
def logProbability(self, x: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]) -> float:
|
|
2856
2602
|
...
|
|
2857
2603
|
@typing.overload
|
|
2858
|
-
def
|
|
2859
|
-
"""
|
|
2860
|
-
Implement the sum-product algorithm.
|
|
2861
|
-
|
|
2862
|
-
Args:
|
|
2863
|
-
ordering:
|
|
2864
|
-
|
|
2865
|
-
Returns: DiscreteBayesNet encoding posterior P(X|Z)
|
|
2866
|
-
"""
|
|
2867
|
-
class DiscreteJunctionTree:
|
|
2868
|
-
def __getitem__(self, arg0: int) -> DiscreteCluster:
|
|
2604
|
+
def logProbability(self, values: ...) -> float:
|
|
2869
2605
|
...
|
|
2870
|
-
def
|
|
2606
|
+
def negLogConstant(self) -> float:
|
|
2871
2607
|
...
|
|
2872
|
-
def
|
|
2608
|
+
def reset(self) -> ConcentratedGaussianVector:
|
|
2873
2609
|
...
|
|
2874
|
-
def
|
|
2610
|
+
def retractMean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
2875
2611
|
...
|
|
2876
|
-
def
|
|
2877
|
-
"""
|
|
2878
|
-
Print the tree to cout.
|
|
2879
|
-
"""
|
|
2880
|
-
class DiscreteKeys:
|
|
2881
|
-
def __init__(self) -> None:
|
|
2612
|
+
def transportTo(self, x_hat: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]) -> ConcentratedGaussianVector:
|
|
2882
2613
|
...
|
|
2883
|
-
|
|
2614
|
+
class ConjugateGradientParameters(IterativeOptimizationParameters):
|
|
2615
|
+
epsilon_abs: float
|
|
2616
|
+
epsilon_rel: float
|
|
2617
|
+
maxIterations: int
|
|
2618
|
+
minIterations: int
|
|
2619
|
+
reset: int
|
|
2620
|
+
def __init__(self) -> None:
|
|
2884
2621
|
...
|
|
2885
|
-
|
|
2622
|
+
class ConstantTwistScenario(Scenario):
|
|
2623
|
+
@typing.overload
|
|
2624
|
+
def __init__(self, w: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], v: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]) -> None:
|
|
2886
2625
|
...
|
|
2887
|
-
|
|
2626
|
+
@typing.overload
|
|
2627
|
+
def __init__(self, w: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], v: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], nTb0: Pose3) -> None:
|
|
2888
2628
|
...
|
|
2889
|
-
|
|
2890
|
-
|
|
2891
|
-
Print the keys and cardinalities.
|
|
2892
|
-
"""
|
|
2893
|
-
def push_back(self, point_pair: tuple[int, int]) -> None:
|
|
2629
|
+
class ConstantVelocityFactor(NonlinearFactor):
|
|
2630
|
+
def __init__(self, i: int, j: int, dt: float, model: noiseModel.Base) -> None:
|
|
2894
2631
|
...
|
|
2895
|
-
def
|
|
2632
|
+
def evaluateError(self, x1: NavState, x2: NavState) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
2896
2633
|
...
|
|
2897
|
-
class
|
|
2634
|
+
class CustomFactor(NoiseModelFactor):
|
|
2635
|
+
@typing.overload
|
|
2898
2636
|
def __init__(self) -> None:
|
|
2899
2637
|
...
|
|
2900
|
-
def __repr__(self, s: str = 'DiscreteLookupDAG\n', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
2901
|
-
...
|
|
2902
2638
|
@typing.overload
|
|
2903
|
-
def
|
|
2639
|
+
def __init__(self, noiseModel: noiseModel.Base, keys: list[int], errorFunction: typing.Callable[[CustomFactor, Values, JacobianVector], numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]]) -> None:
|
|
2640
|
+
...
|
|
2641
|
+
def __repr__(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
2642
|
+
...
|
|
2643
|
+
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
2904
2644
|
"""
|
|
2905
|
-
|
|
2645
|
+
print
|
|
2646
|
+
"""
|
|
2647
|
+
class DSFMapIndexPair:
|
|
2648
|
+
def __init__(self) -> None:
|
|
2649
|
+
...
|
|
2650
|
+
def find(self, key: IndexPair) -> IndexPair:
|
|
2651
|
+
...
|
|
2652
|
+
def merge(self, x: IndexPair, y: IndexPair) -> None:
|
|
2653
|
+
...
|
|
2654
|
+
def sets(self) -> dict[IndexPair, set[IndexPair]]:
|
|
2655
|
+
...
|
|
2656
|
+
class DecisionTreeFactor(DiscreteFactor):
|
|
2657
|
+
def __call__(self, arg0: DiscreteValues) -> float:
|
|
2658
|
+
...
|
|
2659
|
+
@typing.overload
|
|
2660
|
+
def __init__(self) -> None:
|
|
2661
|
+
...
|
|
2662
|
+
@typing.overload
|
|
2663
|
+
def __init__(self, key: tuple[int, int], spec: list[float]) -> None:
|
|
2664
|
+
...
|
|
2665
|
+
@typing.overload
|
|
2666
|
+
def __init__(self, key: tuple[int, int], table: str) -> None:
|
|
2667
|
+
...
|
|
2668
|
+
@typing.overload
|
|
2669
|
+
def __init__(self, keys: DiscreteKeys, table: list[float]) -> None:
|
|
2670
|
+
...
|
|
2671
|
+
@typing.overload
|
|
2672
|
+
def __init__(self, keys: DiscreteKeys, table: str) -> None:
|
|
2673
|
+
...
|
|
2674
|
+
@typing.overload
|
|
2675
|
+
def __init__(self, keys: list[tuple[int, int]], table: list[float]) -> None:
|
|
2676
|
+
...
|
|
2677
|
+
@typing.overload
|
|
2678
|
+
def __init__(self, keys: list[tuple[int, int]], table: str) -> None:
|
|
2679
|
+
...
|
|
2680
|
+
@typing.overload
|
|
2681
|
+
def __init__(self, c: ...) -> None:
|
|
2682
|
+
...
|
|
2683
|
+
def __mul__(self, arg0: DecisionTreeFactor) -> DecisionTreeFactor:
|
|
2684
|
+
...
|
|
2685
|
+
def __repr__(self, s: str = 'DecisionTreeFactor\n', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
2686
|
+
...
|
|
2687
|
+
def __truediv__(self, arg0: DecisionTreeFactor) -> DecisionTreeFactor:
|
|
2688
|
+
...
|
|
2689
|
+
@typing.overload
|
|
2690
|
+
def _repr_html_(self, keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
2691
|
+
...
|
|
2692
|
+
@typing.overload
|
|
2693
|
+
def _repr_html_(self, keyFormatter: typing.Callable[[int], str], names: dict[int, list[str]]) -> str:
|
|
2694
|
+
"""
|
|
2695
|
+
Render as html table.
|
|
2906
2696
|
|
|
2907
|
-
|
|
2697
|
+
Args:
|
|
2698
|
+
keyFormatter: GTSAM-style Key formatter.
|
|
2699
|
+
names: optional, category names corresponding to choices.
|
|
2908
2700
|
|
|
2909
|
-
Returns:
|
|
2701
|
+
Returns: std::string a html string.
|
|
2910
2702
|
"""
|
|
2911
2703
|
@typing.overload
|
|
2912
|
-
def
|
|
2704
|
+
def _repr_markdown_(self, keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
2705
|
+
...
|
|
2706
|
+
@typing.overload
|
|
2707
|
+
def _repr_markdown_(self, keyFormatter: typing.Callable[[int], str], names: dict[int, list[str]]) -> str:
|
|
2913
2708
|
"""
|
|
2914
|
-
|
|
2709
|
+
Render as markdown table.
|
|
2915
2710
|
|
|
2916
|
-
|
|
2711
|
+
Args:
|
|
2712
|
+
keyFormatter: GTSAM-style Key formatter.
|
|
2713
|
+
names: optional, category names corresponding to choices.
|
|
2917
2714
|
|
|
2918
|
-
Returns:
|
|
2715
|
+
Returns: std::string a markdown string.
|
|
2919
2716
|
"""
|
|
2920
|
-
|
|
2717
|
+
@typing.overload
|
|
2718
|
+
def cardinality(self, j: int) -> int:
|
|
2719
|
+
...
|
|
2720
|
+
@typing.overload
|
|
2721
|
+
def cardinality(self, j: int) -> int:
|
|
2722
|
+
...
|
|
2723
|
+
def dot(self, keyFormatter: typing.Callable[[int], str] = ..., showZero: bool = True) -> str:
|
|
2921
2724
|
"""
|
|
2922
|
-
|
|
2725
|
+
output to graphviz format string
|
|
2923
2726
|
"""
|
|
2924
|
-
def
|
|
2727
|
+
def enumerate(self) -> list[tuple[DiscreteValues, float]]:
|
|
2925
2728
|
"""
|
|
2926
|
-
|
|
2729
|
+
Enumerate all values into a map from values to double.
|
|
2927
2730
|
"""
|
|
2928
|
-
def
|
|
2731
|
+
def equals(self, other: DecisionTreeFactor, tol: float = 1e-09) -> bool:
|
|
2929
2732
|
"""
|
|
2930
|
-
|
|
2733
|
+
equality
|
|
2931
2734
|
"""
|
|
2932
|
-
|
|
2735
|
+
@typing.overload
|
|
2736
|
+
def max(self, nrFrontals: int) -> DiscreteFactor:
|
|
2737
|
+
"""
|
|
2738
|
+
Create new factor by maximizing over all values with the same separator.
|
|
2739
|
+
"""
|
|
2740
|
+
@typing.overload
|
|
2741
|
+
def max(self, keys: Ordering) -> DiscreteFactor:
|
|
2742
|
+
"""
|
|
2743
|
+
Create new factor by maximizing over all values with the same separator.
|
|
2744
|
+
"""
|
|
2745
|
+
def print(self, s: str = 'DecisionTreeFactor\n', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
2933
2746
|
...
|
|
2934
|
-
|
|
2747
|
+
@typing.overload
|
|
2748
|
+
def sum(self, nrFrontals: int) -> DiscreteFactor:
|
|
2749
|
+
"""
|
|
2750
|
+
Create new factor by summing all values with the same separator values.
|
|
2751
|
+
"""
|
|
2752
|
+
@typing.overload
|
|
2753
|
+
def sum(self, keys: Ordering) -> DiscreteFactor:
|
|
2754
|
+
"""
|
|
2755
|
+
Create new factor by summing all values with the same separator values.
|
|
2756
|
+
"""
|
|
2757
|
+
class DegeneracyMode:
|
|
2758
|
+
"""
|
|
2759
|
+
Members:
|
|
2760
|
+
|
|
2761
|
+
IGNORE_DEGENERACY
|
|
2762
|
+
|
|
2763
|
+
ZERO_ON_DEGENERACY
|
|
2764
|
+
|
|
2765
|
+
HANDLE_INFINITY
|
|
2766
|
+
"""
|
|
2767
|
+
HANDLE_INFINITY: typing.ClassVar[DegeneracyMode] # value = <DegeneracyMode.HANDLE_INFINITY: 2>
|
|
2768
|
+
IGNORE_DEGENERACY: typing.ClassVar[DegeneracyMode] # value = <DegeneracyMode.IGNORE_DEGENERACY: 0>
|
|
2769
|
+
ZERO_ON_DEGENERACY: typing.ClassVar[DegeneracyMode] # value = <DegeneracyMode.ZERO_ON_DEGENERACY: 1>
|
|
2770
|
+
__members__: typing.ClassVar[dict[str, DegeneracyMode]] # value = {'IGNORE_DEGENERACY': <DegeneracyMode.IGNORE_DEGENERACY: 0>, 'ZERO_ON_DEGENERACY': <DegeneracyMode.ZERO_ON_DEGENERACY: 1>, 'HANDLE_INFINITY': <DegeneracyMode.HANDLE_INFINITY: 2>}
|
|
2771
|
+
def __and__(self, other: typing.Any) -> typing.Any:
|
|
2935
2772
|
...
|
|
2936
|
-
def
|
|
2773
|
+
def __eq__(self, other: typing.Any) -> bool:
|
|
2774
|
+
...
|
|
2775
|
+
def __ge__(self, other: typing.Any) -> bool:
|
|
2776
|
+
...
|
|
2777
|
+
def __getstate__(self) -> int:
|
|
2778
|
+
...
|
|
2779
|
+
def __gt__(self, other: typing.Any) -> bool:
|
|
2780
|
+
...
|
|
2781
|
+
def __hash__(self) -> int:
|
|
2782
|
+
...
|
|
2783
|
+
def __index__(self) -> int:
|
|
2784
|
+
...
|
|
2785
|
+
def __init__(self, value: int) -> None:
|
|
2786
|
+
...
|
|
2787
|
+
def __int__(self) -> int:
|
|
2788
|
+
...
|
|
2789
|
+
def __invert__(self) -> typing.Any:
|
|
2790
|
+
...
|
|
2791
|
+
def __le__(self, other: typing.Any) -> bool:
|
|
2792
|
+
...
|
|
2793
|
+
def __lt__(self, other: typing.Any) -> bool:
|
|
2794
|
+
...
|
|
2795
|
+
def __ne__(self, other: typing.Any) -> bool:
|
|
2796
|
+
...
|
|
2797
|
+
def __or__(self, other: typing.Any) -> typing.Any:
|
|
2798
|
+
...
|
|
2799
|
+
def __rand__(self, other: typing.Any) -> typing.Any:
|
|
2800
|
+
...
|
|
2801
|
+
def __repr__(self) -> str:
|
|
2802
|
+
...
|
|
2803
|
+
def __ror__(self, other: typing.Any) -> typing.Any:
|
|
2804
|
+
...
|
|
2805
|
+
def __rxor__(self, other: typing.Any) -> typing.Any:
|
|
2806
|
+
...
|
|
2807
|
+
def __setstate__(self, state: int) -> None:
|
|
2808
|
+
...
|
|
2809
|
+
def __str__(self) -> str:
|
|
2810
|
+
...
|
|
2811
|
+
def __xor__(self, other: typing.Any) -> typing.Any:
|
|
2812
|
+
...
|
|
2813
|
+
@property
|
|
2814
|
+
def name(self) -> str:
|
|
2815
|
+
...
|
|
2816
|
+
@property
|
|
2817
|
+
def value(self) -> int:
|
|
2818
|
+
...
|
|
2819
|
+
class DerivativeFactorChebyshev1Basis(NoiseModelFactor):
|
|
2820
|
+
@typing.overload
|
|
2821
|
+
def __init__(self) -> None:
|
|
2822
|
+
...
|
|
2823
|
+
@typing.overload
|
|
2824
|
+
def __init__(self, key: int, z: float, model: noiseModel.Base, N: int, x: float) -> None:
|
|
2825
|
+
...
|
|
2826
|
+
@typing.overload
|
|
2827
|
+
def __init__(self, key: int, z: float, model: noiseModel.Base, N: int, x: float, a: float, b: float) -> None:
|
|
2828
|
+
...
|
|
2829
|
+
class DerivativeFactorChebyshev2(NoiseModelFactor):
|
|
2830
|
+
@typing.overload
|
|
2831
|
+
def __init__(self) -> None:
|
|
2832
|
+
...
|
|
2833
|
+
@typing.overload
|
|
2834
|
+
def __init__(self, key: int, z: float, model: noiseModel.Base, N: int, x: float) -> None:
|
|
2835
|
+
...
|
|
2836
|
+
@typing.overload
|
|
2837
|
+
def __init__(self, key: int, z: float, model: noiseModel.Base, N: int, x: float, a: float, b: float) -> None:
|
|
2838
|
+
...
|
|
2839
|
+
class DerivativeFactorChebyshev2Basis(NoiseModelFactor):
|
|
2840
|
+
@typing.overload
|
|
2841
|
+
def __init__(self) -> None:
|
|
2842
|
+
...
|
|
2843
|
+
@typing.overload
|
|
2844
|
+
def __init__(self, key: int, z: float, model: noiseModel.Base, N: int, x: float) -> None:
|
|
2845
|
+
...
|
|
2846
|
+
@typing.overload
|
|
2847
|
+
def __init__(self, key: int, z: float, model: noiseModel.Base, N: int, x: float, a: float, b: float) -> None:
|
|
2848
|
+
...
|
|
2849
|
+
class DerivativeFactorFourierBasis(NoiseModelFactor):
|
|
2850
|
+
@typing.overload
|
|
2851
|
+
def __init__(self) -> None:
|
|
2852
|
+
...
|
|
2853
|
+
@typing.overload
|
|
2854
|
+
def __init__(self, key: int, z: float, model: noiseModel.Base, N: int, x: float) -> None:
|
|
2855
|
+
...
|
|
2856
|
+
@typing.overload
|
|
2857
|
+
def __init__(self, key: int, z: float, model: noiseModel.Base, N: int, x: float, a: float, b: float) -> None:
|
|
2858
|
+
...
|
|
2859
|
+
class DiscreteBayesNet:
|
|
2860
|
+
def __call__(self, arg0: DiscreteValues) -> float:
|
|
2861
|
+
...
|
|
2862
|
+
def __init__(self) -> None:
|
|
2863
|
+
...
|
|
2864
|
+
def __repr__(self, s: str = 'DiscreteBayesNet\n', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
2865
|
+
...
|
|
2866
|
+
@typing.overload
|
|
2867
|
+
def _repr_html_(self, keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
2868
|
+
...
|
|
2869
|
+
@typing.overload
|
|
2870
|
+
def _repr_html_(self, keyFormatter: typing.Callable[[int], str], names: dict[int, list[str]]) -> str:
|
|
2937
2871
|
"""
|
|
2938
|
-
|
|
2872
|
+
Render as html tables.
|
|
2939
2873
|
"""
|
|
2940
|
-
|
|
2941
|
-
def
|
|
2874
|
+
@typing.overload
|
|
2875
|
+
def _repr_markdown_(self, keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
2942
2876
|
...
|
|
2943
|
-
|
|
2877
|
+
@typing.overload
|
|
2878
|
+
def _repr_markdown_(self, keyFormatter: typing.Callable[[int], str], names: dict[int, list[str]]) -> str:
|
|
2879
|
+
"""
|
|
2880
|
+
Render as markdown tables.
|
|
2881
|
+
"""
|
|
2882
|
+
@typing.overload
|
|
2883
|
+
def add(self, s: DiscreteConditional) -> None:
|
|
2944
2884
|
...
|
|
2945
|
-
|
|
2885
|
+
@typing.overload
|
|
2886
|
+
def add(self, key: tuple[int, int], spec: str) -> None:
|
|
2946
2887
|
"""
|
|
2947
|
-
|
|
2948
|
-
|
|
2949
|
-
|
|
2950
|
-
|
|
2951
|
-
|
|
2952
|
-
|
|
2888
|
+
Add aDiscreteDistributionusing a table or a string.
|
|
2889
|
+
"""
|
|
2890
|
+
@typing.overload
|
|
2891
|
+
def add(self, key: tuple[int, int], parents: DiscreteKeys, spec: str) -> None:
|
|
2892
|
+
...
|
|
2893
|
+
@typing.overload
|
|
2894
|
+
def add(self, key: tuple[int, int], parents: list[tuple[int, int]], spec: str) -> None:
|
|
2895
|
+
...
|
|
2896
|
+
def at(self, i: int) -> DiscreteConditional:
|
|
2897
|
+
"""
|
|
2898
|
+
Get a specific factor by index (this checks array bounds and may throw an exception, as opposed to operator[] which does not).
|
|
2899
|
+
"""
|
|
2900
|
+
def dot(self, keyFormatter: typing.Callable[[int], str] = ..., writer: DotWriter = ...) -> str:
|
|
2901
|
+
"""
|
|
2902
|
+
Output to graphviz format string.
|
|
2903
|
+
"""
|
|
2904
|
+
def empty(self) -> bool:
|
|
2905
|
+
"""
|
|
2906
|
+
Check if the graph is empty (null factors set byremove()will cause this to return false).
|
|
2907
|
+
"""
|
|
2908
|
+
def equals(self, bn: DiscreteBayesNet, tol: float = 1e-09) -> bool:
|
|
2909
|
+
"""
|
|
2910
|
+
Check equality.
|
|
2911
|
+
"""
|
|
2912
|
+
def evaluate(self, values: DiscreteValues) -> float:
|
|
2913
|
+
...
|
|
2914
|
+
def keys(self) -> ...:
|
|
2915
|
+
"""
|
|
2916
|
+
Potentially slow function to return all keys involved, sorted, as a set.
|
|
2917
|
+
"""
|
|
2918
|
+
def logProbability(self, values: DiscreteValues) -> float:
|
|
2919
|
+
...
|
|
2920
|
+
def print(self, s: str = 'DiscreteBayesNet\n', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
2921
|
+
...
|
|
2922
|
+
@typing.overload
|
|
2923
|
+
def sample(self, rng: MT19937 = None) -> DiscreteValues:
|
|
2924
|
+
"""
|
|
2925
|
+
do ancestral sampling
|
|
2926
|
+
|
|
2927
|
+
Assumes the Bayes net is reverse topologically sorted, i.e. last conditional will be sampled first. If the Bayes net resulted from eliminating a factor graph, this is true for the elimination ordering.
|
|
2928
|
+
|
|
2929
|
+
Returns: a sampled value for all variables.
|
|
2930
|
+
"""
|
|
2931
|
+
@typing.overload
|
|
2932
|
+
def sample(self, given: DiscreteValues, rng: MT19937 = None) -> DiscreteValues:
|
|
2933
|
+
"""
|
|
2934
|
+
do ancestral sampling, given certain variables.
|
|
2935
|
+
|
|
2936
|
+
Assumes the Bayes net is reverse topologically sorted and that the Bayes net does not contain any conditionals for the given values.
|
|
2937
|
+
|
|
2938
|
+
Returns: given values extended with sampled value for all other variables.
|
|
2939
|
+
"""
|
|
2940
|
+
def saveGraph(self, s: str, keyFormatter: typing.Callable[[int], str] = ..., writer: DotWriter = ...) -> None:
|
|
2941
|
+
...
|
|
2942
|
+
def size(self) -> int:
|
|
2943
|
+
"""
|
|
2944
|
+
return the number of factors (including any null factors set byremove()).
|
|
2945
|
+
"""
|
|
2946
|
+
class DiscreteBayesTree:
|
|
2947
|
+
def __call__(self, arg0: DiscreteValues) -> float:
|
|
2948
|
+
...
|
|
2949
|
+
def __getitem__(self, arg0: int) -> DiscreteBayesTreeClique:
|
|
2950
|
+
...
|
|
2951
|
+
def __init__(self) -> None:
|
|
2952
|
+
...
|
|
2953
|
+
def __repr__(self, s: str = 'DiscreteBayesTree\n', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
2954
|
+
...
|
|
2955
|
+
@typing.overload
|
|
2956
|
+
def _repr_html_(self, keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
2957
|
+
...
|
|
2958
|
+
@typing.overload
|
|
2959
|
+
def _repr_html_(self, keyFormatter: typing.Callable[[int], str], names: dict[int, list[str]]) -> str:
|
|
2960
|
+
"""
|
|
2961
|
+
Render as html tables.
|
|
2962
|
+
"""
|
|
2963
|
+
@typing.overload
|
|
2964
|
+
def _repr_markdown_(self, keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
2965
|
+
...
|
|
2966
|
+
@typing.overload
|
|
2967
|
+
def _repr_markdown_(self, keyFormatter: typing.Callable[[int], str], names: dict[int, list[str]]) -> str:
|
|
2968
|
+
"""
|
|
2969
|
+
Render as markdown tables.
|
|
2970
|
+
"""
|
|
2971
|
+
@typing.overload
|
|
2972
|
+
def addClique(self, clique: DiscreteBayesTreeClique) -> None:
|
|
2973
|
+
"""
|
|
2974
|
+
add a clique (top down)
|
|
2975
|
+
"""
|
|
2976
|
+
@typing.overload
|
|
2977
|
+
def addClique(self, clique: DiscreteBayesTreeClique, parent_clique: DiscreteBayesTreeClique) -> None:
|
|
2978
|
+
"""
|
|
2979
|
+
add a clique (top down)
|
|
2980
|
+
"""
|
|
2981
|
+
def clique(self, j: int) -> DiscreteBayesTreeClique:
|
|
2982
|
+
"""
|
|
2983
|
+
alternate syntax for matlab: find the clique that contains the variable withKeyj
|
|
2984
|
+
"""
|
|
2985
|
+
def dot(self, keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
2986
|
+
"""
|
|
2987
|
+
Output to graphviz format string.
|
|
2988
|
+
"""
|
|
2989
|
+
def empty(self) -> bool:
|
|
2990
|
+
"""
|
|
2991
|
+
Check if there are any cliques in the tree.
|
|
2992
|
+
"""
|
|
2993
|
+
def equals(self, other: DiscreteBayesTree, tol: float = 1e-09) -> bool:
|
|
2994
|
+
"""
|
|
2995
|
+
Check equality.
|
|
2996
|
+
"""
|
|
2997
|
+
def evaluate(self, values: DiscreteValues) -> float:
|
|
2998
|
+
...
|
|
2999
|
+
def insertRoot(self, subtree: DiscreteBayesTreeClique) -> None:
|
|
3000
|
+
"""
|
|
3001
|
+
Insert a new subtree with known parent clique.
|
|
3002
|
+
|
|
3003
|
+
This function does not check that the specified parent is the correct parent. This function updates all of the internal data structures associated with adding a subtree, such as populating the nodes index.
|
|
3004
|
+
"""
|
|
3005
|
+
def joint(self, j1: int, j2: int) -> ...:
|
|
3006
|
+
"""
|
|
3007
|
+
return joint on two variables Limitation: can only calculate joint if cliques are disjoint or one of them is root
|
|
3008
|
+
"""
|
|
3009
|
+
def jointBayesNet(self, j1: int, j2: int) -> DiscreteBayesNet:
|
|
3010
|
+
"""
|
|
3011
|
+
return joint on two variables as aBayesNetLimitation: can only calculate joint if cliques are disjoint or one of them is root
|
|
3012
|
+
"""
|
|
3013
|
+
def marginalFactor(self, key: int) -> DiscreteConditional:
|
|
3014
|
+
...
|
|
3015
|
+
def numCachedSeparatorMarginals(self) -> int:
|
|
3016
|
+
"""
|
|
3017
|
+
Collect number of cliques with cached separator marginals.
|
|
3018
|
+
"""
|
|
3019
|
+
def print(self, s: str = 'DiscreteBayesTree\n', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3020
|
+
"""
|
|
3021
|
+
print
|
|
3022
|
+
"""
|
|
3023
|
+
def saveGraph(self, s: str, keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3024
|
+
...
|
|
3025
|
+
def size(self) -> int:
|
|
3026
|
+
"""
|
|
3027
|
+
number of cliques
|
|
3028
|
+
"""
|
|
3029
|
+
class DiscreteBayesTreeClique:
|
|
3030
|
+
def __call__(self, arg0: DiscreteValues) -> float:
|
|
3031
|
+
...
|
|
3032
|
+
def __getitem__(self, arg0: int) -> DiscreteBayesTreeClique:
|
|
3033
|
+
...
|
|
3034
|
+
@typing.overload
|
|
3035
|
+
def __init__(self) -> None:
|
|
3036
|
+
...
|
|
3037
|
+
@typing.overload
|
|
3038
|
+
def __init__(self, conditional: DiscreteConditional) -> None:
|
|
3039
|
+
...
|
|
3040
|
+
def __repr__(self, s: str = 'DiscreteBayesTreeClique', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3041
|
+
...
|
|
3042
|
+
def conditional(self) -> DiscreteConditional:
|
|
3043
|
+
"""
|
|
3044
|
+
Access the conditional.
|
|
3045
|
+
"""
|
|
3046
|
+
def evaluate(self, values: DiscreteValues) -> float:
|
|
3047
|
+
...
|
|
3048
|
+
def isRoot(self) -> bool:
|
|
3049
|
+
"""
|
|
3050
|
+
Return true if this clique is the root of a Bayes tree.
|
|
3051
|
+
"""
|
|
3052
|
+
def nrChildren(self) -> int:
|
|
3053
|
+
"""
|
|
3054
|
+
Return the number of children.
|
|
3055
|
+
"""
|
|
3056
|
+
def print(self, s: str = 'DiscreteBayesTreeClique', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3057
|
+
"""
|
|
3058
|
+
print this node
|
|
3059
|
+
"""
|
|
3060
|
+
def printSignature(self, s: str = 'Clique: ', formatter: typing.Callable[[int], str] = ...) -> None:
|
|
3061
|
+
"""
|
|
3062
|
+
print index signature only
|
|
3063
|
+
"""
|
|
3064
|
+
class DiscreteCluster:
|
|
3065
|
+
factors: DiscreteFactorGraph
|
|
3066
|
+
orderedFrontalKeys: Ordering
|
|
3067
|
+
def __getitem__(self, arg0: int) -> DiscreteCluster:
|
|
3068
|
+
...
|
|
3069
|
+
def __repr__(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3070
|
+
...
|
|
3071
|
+
def nrChildren(self) -> int:
|
|
3072
|
+
...
|
|
3073
|
+
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3074
|
+
...
|
|
3075
|
+
class DiscreteConditional(DecisionTreeFactor):
|
|
3076
|
+
@typing.overload
|
|
3077
|
+
def __init__(self) -> None:
|
|
3078
|
+
...
|
|
3079
|
+
@typing.overload
|
|
3080
|
+
def __init__(self, nFrontals: int, f: DecisionTreeFactor) -> None:
|
|
3081
|
+
...
|
|
3082
|
+
@typing.overload
|
|
3083
|
+
def __init__(self, key: tuple[int, int], spec: str) -> None:
|
|
3084
|
+
...
|
|
3085
|
+
@typing.overload
|
|
3086
|
+
def __init__(self, key: tuple[int, int], parents: DiscreteKeys, spec: str) -> None:
|
|
3087
|
+
...
|
|
3088
|
+
@typing.overload
|
|
3089
|
+
def __init__(self, key: tuple[int, int], parents: list[tuple[int, int]], spec: str) -> None:
|
|
3090
|
+
...
|
|
3091
|
+
@typing.overload
|
|
3092
|
+
def __init__(self, joint: DecisionTreeFactor, marginal: DecisionTreeFactor) -> None:
|
|
3093
|
+
...
|
|
3094
|
+
@typing.overload
|
|
3095
|
+
def __init__(self, joint: DecisionTreeFactor, marginal: DecisionTreeFactor, orderedKeys: Ordering) -> None:
|
|
3096
|
+
...
|
|
3097
|
+
@typing.overload
|
|
3098
|
+
def __init__(self, key: tuple[int, int], parents: DiscreteKeys, table: list[float]) -> None:
|
|
3099
|
+
...
|
|
3100
|
+
def __mul__(self, arg0: DiscreteConditional) -> DiscreteConditional:
|
|
3101
|
+
...
|
|
3102
|
+
def __repr__(self, s: str = 'Discrete Conditional\n', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3103
|
+
...
|
|
3104
|
+
@typing.overload
|
|
3105
|
+
def _repr_html_(self, keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3106
|
+
...
|
|
3107
|
+
@typing.overload
|
|
3108
|
+
def _repr_html_(self, keyFormatter: typing.Callable[[int], str], names: dict[int, list[str]]) -> str:
|
|
3109
|
+
"""
|
|
3110
|
+
Render as html table.
|
|
3111
|
+
"""
|
|
3112
|
+
@typing.overload
|
|
3113
|
+
def _repr_markdown_(self, keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3114
|
+
...
|
|
3115
|
+
@typing.overload
|
|
3116
|
+
def _repr_markdown_(self, keyFormatter: typing.Callable[[int], str], names: dict[int, list[str]]) -> str:
|
|
3117
|
+
"""
|
|
3118
|
+
Render as markdown table.
|
|
3119
|
+
"""
|
|
3120
|
+
def argmax(self, parentsValues: DiscreteValues) -> int:
|
|
3121
|
+
"""
|
|
3122
|
+
Return assignment for single frontal variable that maximizes value.
|
|
3123
|
+
|
|
3124
|
+
Args:
|
|
3125
|
+
parentsValues: Known assignments for the parents.
|
|
3126
|
+
|
|
3127
|
+
Returns: maximizing assignment for the frontal variable.
|
|
3128
|
+
"""
|
|
3129
|
+
def choose(self, given: DiscreteValues) -> DiscreteConditional:
|
|
3130
|
+
"""
|
|
3131
|
+
<DiscreteValuesversion
|
|
3132
|
+
|
|
3133
|
+
restrict to given parent values. Note: does not need be complete set. Examples: P(C|D,E) + . -> P(C|D,E) P(C|D,E) + E -> P(C|D) P(C|D,E) + D -> P(C|E) P(C|D,E) + D,E -> P(C) P(C|D,E) + C -> error!
|
|
3134
|
+
|
|
3135
|
+
Returns: a shared_ptr to a new DiscreteConditional
|
|
3136
|
+
"""
|
|
3137
|
+
def equals(self, other: DiscreteConditional, tol: float = 1e-09) -> bool:
|
|
3138
|
+
"""
|
|
3139
|
+
GTSAM-style equals.
|
|
3140
|
+
"""
|
|
3141
|
+
@typing.overload
|
|
3142
|
+
def error(self, values: DiscreteValues) -> float:
|
|
3143
|
+
"""
|
|
3144
|
+
Calculate error forDiscreteValuesx, is -log(probability).
|
|
3145
|
+
"""
|
|
3146
|
+
@typing.overload
|
|
3147
|
+
def error(self, x: ...) -> float:
|
|
3148
|
+
...
|
|
3149
|
+
@typing.overload
|
|
3150
|
+
def evaluate(self, values: DiscreteValues) -> float:
|
|
3151
|
+
"""
|
|
3152
|
+
<DiscreteValuesversion
|
|
3153
|
+
"""
|
|
3154
|
+
@typing.overload
|
|
3155
|
+
def evaluate(self, x: ...) -> float:
|
|
3156
|
+
"""
|
|
3157
|
+
Calculate probability forHybridValuesx.
|
|
3158
|
+
|
|
3159
|
+
Dispatches to DiscreteValues version.
|
|
3160
|
+
"""
|
|
3161
|
+
def firstFrontalKey(self) -> int:
|
|
3162
|
+
"""
|
|
3163
|
+
Convenience function to get the first frontal key.
|
|
3164
|
+
"""
|
|
3165
|
+
@typing.overload
|
|
3166
|
+
def likelihood(self, frontalValues: DiscreteValues) -> DecisionTreeFactor:
|
|
3167
|
+
"""
|
|
3168
|
+
Convert to a likelihood factor by providing value before bar.
|
|
3169
|
+
"""
|
|
3170
|
+
@typing.overload
|
|
3171
|
+
def likelihood(self, value: int) -> DecisionTreeFactor:
|
|
3172
|
+
...
|
|
3173
|
+
@typing.overload
|
|
3174
|
+
def logProbability(self, values: DiscreteValues) -> float:
|
|
3175
|
+
...
|
|
3176
|
+
@typing.overload
|
|
3177
|
+
def logProbability(self, x: ...) -> float:
|
|
3178
|
+
"""
|
|
3179
|
+
Log-probability is just -error(x).
|
|
3180
|
+
"""
|
|
3181
|
+
def marginal(self, key: int) -> DiscreteConditional:
|
|
3182
|
+
"""
|
|
3183
|
+
Calculate marginal on given key, no parent case.
|
|
3184
|
+
"""
|
|
3185
|
+
def negLogConstant(self) -> float:
|
|
3186
|
+
"""
|
|
3187
|
+
negLogConstant is just zero, such that -logProbability(x) = -log(evaluate(x)) = error(x) and hence error(x) > 0 for all x.
|
|
3188
|
+
|
|
3189
|
+
Thus -log(K) for the normalization constant k is 0.
|
|
3190
|
+
"""
|
|
3191
|
+
def nrFrontals(self) -> int:
|
|
3192
|
+
"""
|
|
3193
|
+
return the number of frontals
|
|
3194
|
+
"""
|
|
3195
|
+
def nrParents(self) -> int:
|
|
3196
|
+
"""
|
|
3197
|
+
return the number of parents
|
|
3198
|
+
"""
|
|
3199
|
+
def print(self, s: str = 'Discrete Conditional\n', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3200
|
+
...
|
|
3201
|
+
def printSignature(self, s: str = 'Discrete Conditional: ', formatter: typing.Callable[[int], str] = ...) -> None:
|
|
3202
|
+
"""
|
|
3203
|
+
print index signature only
|
|
3204
|
+
"""
|
|
3205
|
+
@typing.overload
|
|
3206
|
+
def sample(self, parentsValues: DiscreteValues, rng: MT19937 = None) -> int:
|
|
3207
|
+
"""
|
|
3208
|
+
Samplefrom conditional, given missing variables Example: std::mt19937_64 rng(42);DiscreteValuesgiven = ...; size_t sample = dc.sample(given, &rng);.
|
|
3209
|
+
|
|
3210
|
+
Args:
|
|
3211
|
+
parentsValues: Known values of the parents
|
|
3212
|
+
rng: Pseudo-Random Number Generator.
|
|
3213
|
+
|
|
3214
|
+
Returns: sample from conditional
|
|
3215
|
+
"""
|
|
3216
|
+
@typing.overload
|
|
3217
|
+
def sample(self, value: int, rng: MT19937 = None) -> int:
|
|
3218
|
+
...
|
|
3219
|
+
@typing.overload
|
|
3220
|
+
def sample(self, rng: MT19937 = None) -> int:
|
|
3221
|
+
"""
|
|
3222
|
+
Samplefrom conditional, zero parent version Example: std::mt19937_64 rng(42); auto sample = dc.sample(&rng);.
|
|
3223
|
+
"""
|
|
3224
|
+
def sampleInPlace(self, parentsValues: DiscreteValues, rng: MT19937 = None) -> None:
|
|
3225
|
+
"""
|
|
3226
|
+
Samplein place with optional PRNG, stores result in partial solution.
|
|
3227
|
+
"""
|
|
3228
|
+
class DiscreteDistribution(DiscreteConditional):
|
|
3229
|
+
def __call__(self, arg0: int) -> float:
|
|
3230
|
+
...
|
|
3231
|
+
@typing.overload
|
|
3232
|
+
def __init__(self) -> None:
|
|
3233
|
+
...
|
|
3234
|
+
@typing.overload
|
|
3235
|
+
def __init__(self, f: DecisionTreeFactor) -> None:
|
|
3236
|
+
...
|
|
3237
|
+
@typing.overload
|
|
3238
|
+
def __init__(self, key: tuple[int, int], spec: str) -> None:
|
|
3239
|
+
...
|
|
3240
|
+
@typing.overload
|
|
3241
|
+
def __init__(self, key: tuple[int, int], spec: list[float]) -> None:
|
|
3242
|
+
...
|
|
3243
|
+
def __repr__(self, s: str = 'Discrete Prior\n', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3244
|
+
...
|
|
3245
|
+
def pmf(self) -> list[float]:
|
|
3246
|
+
"""
|
|
3247
|
+
We also want to keep the Base version, takingDiscreteValues:
|
|
3248
|
+
|
|
3249
|
+
Return entire probability mass function.
|
|
3250
|
+
"""
|
|
3251
|
+
def print(self, s: str = 'Discrete Prior\n', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3252
|
+
...
|
|
3253
|
+
class DiscreteEliminationTree:
|
|
3254
|
+
@typing.overload
|
|
3255
|
+
def __init__(self, factorGraph: DiscreteFactorGraph, structure: VariableIndex, order: Ordering) -> None:
|
|
3256
|
+
...
|
|
3257
|
+
@typing.overload
|
|
3258
|
+
def __init__(self, factorGraph: DiscreteFactorGraph, order: Ordering) -> None:
|
|
3259
|
+
...
|
|
3260
|
+
def __repr__(self, name: str = 'EliminationTree: ', formatter: typing.Callable[[int], str] = ...) -> str:
|
|
3261
|
+
...
|
|
3262
|
+
def equals(self, other: DiscreteEliminationTree, tol: float = 1e-09) -> bool:
|
|
3263
|
+
"""
|
|
3264
|
+
Test whether the tree is equal to another.
|
|
3265
|
+
"""
|
|
3266
|
+
def print(self, name: str = 'EliminationTree: ', formatter: typing.Callable[[int], str] = ...) -> None:
|
|
3267
|
+
"""
|
|
3268
|
+
Print the tree to cout.
|
|
3269
|
+
"""
|
|
3270
|
+
class DiscreteFactor(Factor):
|
|
3271
|
+
def __call__(self, arg0: DiscreteValues) -> float:
|
|
3272
|
+
...
|
|
3273
|
+
def __repr__(self, s: str = 'DiscreteFactor\n', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3274
|
+
...
|
|
3275
|
+
def equals(self, lf: DiscreteFactor, tol: float = 1e-09) -> bool:
|
|
3276
|
+
"""
|
|
3277
|
+
equals
|
|
3278
|
+
"""
|
|
3279
|
+
def errorTree(self) -> ...:
|
|
3280
|
+
"""
|
|
3281
|
+
Compute error for each assignment and return as a tree.
|
|
3282
|
+
"""
|
|
3283
|
+
def print(self, s: str = 'DiscreteFactor\n', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3284
|
+
...
|
|
3285
|
+
class DiscreteFactorGraph:
|
|
3286
|
+
def __call__(self, arg0: DiscreteValues) -> float:
|
|
3287
|
+
...
|
|
3288
|
+
@typing.overload
|
|
3289
|
+
def __init__(self) -> None:
|
|
3290
|
+
...
|
|
3291
|
+
@typing.overload
|
|
3292
|
+
def __init__(self, bayesNet: DiscreteBayesNet) -> None:
|
|
3293
|
+
...
|
|
3294
|
+
def __repr__(self, s: str = '') -> str:
|
|
3295
|
+
...
|
|
3296
|
+
@typing.overload
|
|
3297
|
+
def _repr_html_(self, keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3298
|
+
...
|
|
3299
|
+
@typing.overload
|
|
3300
|
+
def _repr_html_(self, keyFormatter: typing.Callable[[int], str], names: dict[int, list[str]]) -> str:
|
|
3301
|
+
"""
|
|
3302
|
+
Render as html tables.
|
|
3303
|
+
|
|
3304
|
+
Args:
|
|
3305
|
+
keyFormatter: GTSAM-style Key formatter.
|
|
3306
|
+
names: optional, a map from Key to category names.
|
|
3307
|
+
|
|
3308
|
+
Returns: std::string a (potentially long) html string.
|
|
3309
|
+
"""
|
|
3310
|
+
@typing.overload
|
|
3311
|
+
def _repr_markdown_(self, keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3312
|
+
...
|
|
3313
|
+
@typing.overload
|
|
3314
|
+
def _repr_markdown_(self, keyFormatter: typing.Callable[[int], str], names: dict[int, list[str]]) -> str:
|
|
3315
|
+
"""
|
|
3316
|
+
Render as markdown tables.
|
|
3317
|
+
|
|
3318
|
+
Args:
|
|
3319
|
+
keyFormatter: GTSAM-style Key formatter.
|
|
3320
|
+
names: optional, a map from Key to category names.
|
|
3321
|
+
|
|
3322
|
+
Returns: std::string a (potentially long) markdown string.
|
|
3323
|
+
"""
|
|
3324
|
+
@typing.overload
|
|
3325
|
+
def add(self, j: tuple[int, int], spec: str) -> None:
|
|
3326
|
+
...
|
|
3327
|
+
@typing.overload
|
|
3328
|
+
def add(self, j: tuple[int, int], spec: list[float]) -> None:
|
|
3329
|
+
...
|
|
3330
|
+
@typing.overload
|
|
3331
|
+
def add(self, keys: DiscreteKeys, spec: str) -> None:
|
|
3332
|
+
...
|
|
3333
|
+
@typing.overload
|
|
3334
|
+
def add(self, keys: list[tuple[int, int]], spec: str) -> None:
|
|
3335
|
+
...
|
|
3336
|
+
@typing.overload
|
|
3337
|
+
def add(self, keys: list[tuple[int, int]], spec: list[float]) -> None:
|
|
3338
|
+
...
|
|
3339
|
+
def at(self, i: int) -> DiscreteFactor:
|
|
3340
|
+
"""
|
|
3341
|
+
Get a specific factor by index and typecast to factor type F (this checks array bounds and may throw an exception, as opposed to operator[] which does not).
|
|
3342
|
+
"""
|
|
3343
|
+
def dot(self, keyFormatter: typing.Callable[[int], str] = ..., writer: DotWriter = ...) -> str:
|
|
3344
|
+
"""
|
|
3345
|
+
Output to graphviz format string.
|
|
3346
|
+
"""
|
|
3347
|
+
@typing.overload
|
|
3348
|
+
def eliminateMultifrontal(self, type: Ordering.OrderingType = Ordering.OrderingType.COLAMD) -> DiscreteBayesTree:
|
|
3349
|
+
...
|
|
3350
|
+
@typing.overload
|
|
3351
|
+
def eliminateMultifrontal(self, type: Ordering.OrderingType, function: typing.Callable[[DiscreteFactorGraph, Ordering], tuple[DiscreteConditional, DiscreteFactor]]) -> DiscreteBayesTree:
|
|
3352
|
+
...
|
|
3353
|
+
@typing.overload
|
|
3354
|
+
def eliminateMultifrontal(self, ordering: Ordering) -> DiscreteBayesTree:
|
|
3355
|
+
"""
|
|
3356
|
+
Do multifrontal elimination of all variables to produce a Bayes tree.
|
|
3357
|
+
|
|
3358
|
+
If an ordering is not provided, the ordering will be computed using either COLAMD or METIS, depending on the parameter orderingType (Ordering::COLAMD or Ordering::METIS) Example - Full QR elimination in specified order: std::shared_ptr<GaussianBayesTree>result=graph.eliminateMultifrontal(EliminateQR,myOrdering);
|
|
3359
|
+
"""
|
|
3360
|
+
@typing.overload
|
|
3361
|
+
def eliminateMultifrontal(self, ordering: Ordering, function: typing.Callable[[DiscreteFactorGraph, Ordering], tuple[DiscreteConditional, DiscreteFactor]]) -> DiscreteBayesTree:
|
|
3362
|
+
...
|
|
3363
|
+
@typing.overload
|
|
3364
|
+
def eliminatePartialMultifrontal(self, ordering: Ordering) -> tuple[DiscreteBayesTree, DiscreteFactorGraph]:
|
|
3365
|
+
"""
|
|
3366
|
+
Do multifrontal elimination of some variables, inorderingprovided, to produce a Bayes tree and a remaining factor graph.
|
|
3367
|
+
|
|
3368
|
+
This computes the factorization $ p(X) = p(A|B) p(B)
|
|
3369
|
+
$, where $ A = $variables, $ X $ is all the variables in the factor graph, and $ B = X\\backslash A $.
|
|
3370
|
+
"""
|
|
3371
|
+
@typing.overload
|
|
3372
|
+
def eliminatePartialMultifrontal(self, ordering: Ordering, function: typing.Callable[[DiscreteFactorGraph, Ordering], tuple[DiscreteConditional, DiscreteFactor]]) -> tuple[DiscreteBayesTree, DiscreteFactorGraph]:
|
|
3373
|
+
...
|
|
3374
|
+
@typing.overload
|
|
3375
|
+
def eliminatePartialSequential(self, ordering: Ordering) -> tuple[DiscreteBayesNet, DiscreteFactorGraph]:
|
|
3376
|
+
"""
|
|
3377
|
+
Do sequential elimination of some variables, inorderingprovided, to produce a Bayes net and a remaining factor graph.
|
|
3378
|
+
|
|
3379
|
+
This computes the factorization $ p(X) = p(A|B) p(B) $, where $ A = $variables, $ X $ is all the variables in the factor graph, and $B = X\\backslash A $.
|
|
3380
|
+
"""
|
|
3381
|
+
@typing.overload
|
|
3382
|
+
def eliminatePartialSequential(self, ordering: Ordering, function: typing.Callable[[DiscreteFactorGraph, Ordering], tuple[DiscreteConditional, DiscreteFactor]]) -> tuple[DiscreteBayesNet, DiscreteFactorGraph]:
|
|
3383
|
+
...
|
|
3384
|
+
@typing.overload
|
|
3385
|
+
def eliminateSequential(self, type: Ordering.OrderingType = Ordering.OrderingType.COLAMD) -> DiscreteBayesNet:
|
|
3386
|
+
...
|
|
3387
|
+
@typing.overload
|
|
3388
|
+
def eliminateSequential(self, type: Ordering.OrderingType, function: typing.Callable[[DiscreteFactorGraph, Ordering], tuple[DiscreteConditional, DiscreteFactor]]) -> DiscreteBayesNet:
|
|
3389
|
+
...
|
|
3390
|
+
@typing.overload
|
|
3391
|
+
def eliminateSequential(self, ordering: Ordering) -> DiscreteBayesNet:
|
|
3392
|
+
"""
|
|
3393
|
+
Do sequential elimination of all variables to produce a Bayes net.
|
|
3394
|
+
|
|
3395
|
+
Example - Full QR elimination in specified order: std::shared_ptr<GaussianBayesNet>result=graph.eliminateSequential(myOrdering,EliminateQR); Example - Reusing an existing VariableIndex to improve performance: VariableIndexvarIndex(graph);//BuildvariableindexDatadata=otherFunctionUsingVariableIndex(graph,varIndex);//Othercodethatusesvariableindexstd::shared_ptr<GaussianBayesNet>result=graph.eliminateSequential(myOrdering,EliminateQR,varIndex,std::nullopt);
|
|
3396
|
+
"""
|
|
3397
|
+
@typing.overload
|
|
3398
|
+
def eliminateSequential(self, ordering: Ordering, function: typing.Callable[[DiscreteFactorGraph, Ordering], tuple[DiscreteConditional, DiscreteFactor]]) -> DiscreteBayesNet:
|
|
3399
|
+
...
|
|
3400
|
+
def empty(self) -> bool:
|
|
3401
|
+
"""
|
|
3402
|
+
Check if the graph is empty (null factors set byremove()will cause this to return false).
|
|
3403
|
+
"""
|
|
3404
|
+
def equals(self, fg: DiscreteFactorGraph, tol: float = 1e-09) -> bool:
|
|
3405
|
+
...
|
|
3406
|
+
def keys(self) -> ...:
|
|
3407
|
+
"""
|
|
3408
|
+
Return the set of variables involved in the factors (set union)
|
|
3409
|
+
"""
|
|
3410
|
+
@typing.overload
|
|
3411
|
+
def maxProduct(self, type: Ordering.OrderingType = Ordering.OrderingType.COLAMD) -> DiscreteLookupDAG:
|
|
3412
|
+
...
|
|
3413
|
+
@typing.overload
|
|
3414
|
+
def maxProduct(self, ordering: Ordering) -> DiscreteLookupDAG:
|
|
3415
|
+
"""
|
|
3416
|
+
Implement the max-product algorithm.
|
|
3417
|
+
|
|
3418
|
+
Args:
|
|
3419
|
+
ordering:
|
|
3420
|
+
|
|
3421
|
+
Returns: DiscreteLookupDAG `DAG with lookup tables
|
|
3422
|
+
"""
|
|
3423
|
+
def optimize(self) -> DiscreteValues:
|
|
3424
|
+
"""
|
|
3425
|
+
Find the maximum probable explanation (MPE) by doing max-product.
|
|
3426
|
+
|
|
3427
|
+
Returns: DiscreteValues : MPE
|
|
3428
|
+
"""
|
|
3429
|
+
def print(self, s: str = '') -> None:
|
|
3430
|
+
...
|
|
3431
|
+
def product(self) -> DiscreteFactor:
|
|
3432
|
+
"""
|
|
3433
|
+
return product of all factors as a single factor
|
|
3434
|
+
"""
|
|
3435
|
+
@typing.overload
|
|
3436
|
+
def push_back(self, factor: DiscreteFactor) -> None:
|
|
3437
|
+
"""
|
|
3438
|
+
Add a factor directly using a shared_ptr.
|
|
3439
|
+
"""
|
|
3440
|
+
@typing.overload
|
|
3441
|
+
def push_back(self, conditional: DiscreteConditional) -> None:
|
|
3442
|
+
...
|
|
3443
|
+
@typing.overload
|
|
3444
|
+
def push_back(self, graph: DiscreteFactorGraph) -> None:
|
|
3445
|
+
...
|
|
3446
|
+
@typing.overload
|
|
3447
|
+
def push_back(self, bayesNet: DiscreteBayesNet) -> None:
|
|
3448
|
+
...
|
|
3449
|
+
@typing.overload
|
|
3450
|
+
def push_back(self, bayesTree: DiscreteBayesTree) -> None:
|
|
3451
|
+
"""
|
|
3452
|
+
Push back aBayesTreeas a collection of factors.
|
|
3453
|
+
|
|
3454
|
+
NOTE: This should be hidden in derived classes in favor of a type-specialized version that calls this templated function.
|
|
3455
|
+
"""
|
|
3456
|
+
def saveGraph(self, s: str, keyFormatter: typing.Callable[[int], str] = ..., writer: DotWriter = ...) -> None:
|
|
3457
|
+
...
|
|
3458
|
+
def size(self) -> int:
|
|
3459
|
+
"""
|
|
3460
|
+
return the number of factors (including any null factors set byremove()).
|
|
3461
|
+
"""
|
|
3462
|
+
@typing.overload
|
|
3463
|
+
def sumProduct(self, type: Ordering.OrderingType = Ordering.OrderingType.COLAMD) -> DiscreteBayesNet:
|
|
3464
|
+
...
|
|
3465
|
+
@typing.overload
|
|
3466
|
+
def sumProduct(self, ordering: Ordering) -> DiscreteBayesNet:
|
|
3467
|
+
"""
|
|
3468
|
+
Implement the sum-product algorithm.
|
|
3469
|
+
|
|
3470
|
+
Args:
|
|
3471
|
+
ordering:
|
|
3472
|
+
|
|
3473
|
+
Returns: DiscreteBayesNet encoding posterior P(X|Z)
|
|
3474
|
+
"""
|
|
3475
|
+
class DiscreteJunctionTree:
|
|
3476
|
+
def __getitem__(self, arg0: int) -> DiscreteCluster:
|
|
3477
|
+
...
|
|
3478
|
+
def __init__(self, eliminationTree: DiscreteEliminationTree) -> None:
|
|
3479
|
+
...
|
|
3480
|
+
def __repr__(self, name: str = 'JunctionTree: ', formatter: typing.Callable[[int], str] = ...) -> str:
|
|
3481
|
+
...
|
|
3482
|
+
def nrRoots(self) -> int:
|
|
3483
|
+
...
|
|
3484
|
+
def print(self, name: str = 'JunctionTree: ', formatter: typing.Callable[[int], str] = ...) -> None:
|
|
3485
|
+
"""
|
|
3486
|
+
Print the tree to cout.
|
|
3487
|
+
"""
|
|
3488
|
+
class DiscreteKeys:
|
|
3489
|
+
def __init__(self) -> None:
|
|
3490
|
+
...
|
|
3491
|
+
def __repr__(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3492
|
+
...
|
|
3493
|
+
def at(self, n: int) -> tuple[int, int]:
|
|
3494
|
+
...
|
|
3495
|
+
def empty(self) -> bool:
|
|
3496
|
+
...
|
|
3497
|
+
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3498
|
+
"""
|
|
3499
|
+
Print the keys and cardinalities.
|
|
3500
|
+
"""
|
|
3501
|
+
def push_back(self, point_pair: tuple[int, int]) -> None:
|
|
3502
|
+
...
|
|
3503
|
+
def size(self) -> int:
|
|
3504
|
+
...
|
|
3505
|
+
class DiscreteLookupDAG:
|
|
3506
|
+
def __init__(self) -> None:
|
|
3507
|
+
...
|
|
3508
|
+
def __repr__(self, s: str = 'DiscreteLookupDAG\n', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3509
|
+
...
|
|
3510
|
+
@typing.overload
|
|
3511
|
+
def argmax(self) -> DiscreteValues:
|
|
3512
|
+
"""
|
|
3513
|
+
argmax by back-substitution, optionally given certain variables.
|
|
3514
|
+
|
|
3515
|
+
Assumes the DAG is reverse topologically sorted, i.e. last conditional will be optimized first and that the DAG does not contain any conditionals for the given variables. If the DAG resulted from eliminating a factor graph, this is true for the elimination ordering.
|
|
3516
|
+
|
|
3517
|
+
Returns: given assignment extended w. optimal assignment for all variables.
|
|
3518
|
+
"""
|
|
3519
|
+
@typing.overload
|
|
3520
|
+
def argmax(self, given: DiscreteValues) -> DiscreteValues:
|
|
3521
|
+
"""
|
|
3522
|
+
argmax by back-substitution, optionally given certain variables.
|
|
3523
|
+
|
|
3524
|
+
Assumes the DAG is reverse topologically sorted, i.e. last conditional will be optimized first and that the DAG does not contain any conditionals for the given variables. If the DAG resulted from eliminating a factor graph, this is true for the elimination ordering.
|
|
3525
|
+
|
|
3526
|
+
Returns: given assignment extended w. optimal assignment for all variables.
|
|
3527
|
+
"""
|
|
3528
|
+
def at(self, i: int) -> DiscreteLookupTable:
|
|
3529
|
+
"""
|
|
3530
|
+
Get a specific factor by index (this checks array bounds and may throw an exception, as opposed to operator[] which does not).
|
|
3531
|
+
"""
|
|
3532
|
+
def empty(self) -> bool:
|
|
3533
|
+
"""
|
|
3534
|
+
Check if the graph is empty (null factors set byremove()will cause this to return false).
|
|
3535
|
+
"""
|
|
3536
|
+
def keys(self) -> ...:
|
|
3537
|
+
"""
|
|
3538
|
+
Potentially slow function to return all keys involved, sorted, as a set.
|
|
3539
|
+
"""
|
|
3540
|
+
def print(self, s: str = 'DiscreteLookupDAG\n', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3541
|
+
...
|
|
3542
|
+
def push_back(self, table: DiscreteLookupTable) -> None:
|
|
3543
|
+
...
|
|
3544
|
+
def size(self) -> int:
|
|
3545
|
+
"""
|
|
3546
|
+
return the number of factors (including any null factors set byremove()).
|
|
3547
|
+
"""
|
|
3548
|
+
class DiscreteLookupTable(DiscreteConditional):
|
|
3549
|
+
def __init__(self, nFrontals: int, keys: DiscreteKeys, potentials: ...) -> None:
|
|
3550
|
+
...
|
|
3551
|
+
def __repr__(self, s: str = 'Discrete Lookup Table: ', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3552
|
+
...
|
|
3553
|
+
def argmax(self, parentsValues: DiscreteValues) -> int:
|
|
3554
|
+
"""
|
|
3555
|
+
return assignment for single frontal variable that maximizes value.
|
|
3556
|
+
|
|
3557
|
+
Args:
|
|
3558
|
+
parentsValues: Known assignments for the parents.
|
|
3559
|
+
|
|
3560
|
+
Returns: maximizing assignment for the frontal variable.
|
|
2953
3561
|
"""
|
|
2954
3562
|
def print(self, s: str = 'Discrete Lookup Table: ', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
2955
3563
|
...
|
|
2956
|
-
class DiscreteMarginals:
|
|
2957
|
-
def __call__(self, arg0: int) -> DiscreteFactor:
|
|
3564
|
+
class DiscreteMarginals:
|
|
3565
|
+
def __call__(self, arg0: int) -> DiscreteFactor:
|
|
3566
|
+
...
|
|
3567
|
+
@typing.overload
|
|
3568
|
+
def __init__(self) -> None:
|
|
3569
|
+
...
|
|
3570
|
+
@typing.overload
|
|
3571
|
+
def __init__(self, graph: DiscreteFactorGraph) -> None:
|
|
3572
|
+
...
|
|
3573
|
+
def __repr__(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3574
|
+
...
|
|
3575
|
+
def marginalProbabilities(self, key: tuple[int, int]) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3576
|
+
"""
|
|
3577
|
+
Compute the marginal of a single variable.
|
|
3578
|
+
|
|
3579
|
+
Args:
|
|
3580
|
+
key: DiscreteKey of the Variable
|
|
3581
|
+
|
|
3582
|
+
Returns: Vector of marginal probabilities
|
|
3583
|
+
"""
|
|
3584
|
+
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3585
|
+
...
|
|
3586
|
+
class DiscreteScenario(Scenario):
|
|
3587
|
+
@staticmethod
|
|
3588
|
+
def FromCSV(csv_filepath: str) -> DiscreteScenario:
|
|
3589
|
+
"""
|
|
3590
|
+
Named constructor to create a scenario from a CSV file.
|
|
3591
|
+
|
|
3592
|
+
The CSV file should contain a header row followed by data rows. All timestamps will be normalized so that the first timestamp in the file corresponds to t=0 for the scenario. CSV is expected to contain the following columns: t,q_w,q_x,q_y,q_z,v_x,v_y,v_z,p_x,p_y,p_z,w_x,w_y,w_z,a_x,a_y,a_z Other columns will be ignored.
|
|
3593
|
+
|
|
3594
|
+
Args:
|
|
3595
|
+
csv_filepath: Path to the CSV file.
|
|
3596
|
+
|
|
3597
|
+
Returns: A constructed DiscreteScenario.
|
|
3598
|
+
"""
|
|
3599
|
+
def __init__(self, poses: dict[float, Pose3], angularVelocities_b: dict[float, numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]]], velocities_n: dict[float, numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]]], accelerations_n: dict[float, numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]]]) -> None:
|
|
3600
|
+
...
|
|
3601
|
+
class DiscreteSearch:
|
|
3602
|
+
@staticmethod
|
|
3603
|
+
def FromFactorGraph(factorGraph: DiscreteFactorGraph, ordering: Ordering, buildJunctionTree: bool = False) -> DiscreteSearch:
|
|
3604
|
+
"""
|
|
3605
|
+
Construct from aDiscreteFactorGraph.
|
|
3606
|
+
|
|
3607
|
+
Internally creates either an elimination tree or a junction tree. The latter incurs more up-front computation but the search itself might be faster. Then again, for the elimination tree, the heuristic will be more fine-grained (more slots).
|
|
3608
|
+
|
|
3609
|
+
Args:
|
|
3610
|
+
factorGraph: The factor graph to search over.
|
|
3611
|
+
ordering: The ordering used to create etree (and maybe jtree).
|
|
3612
|
+
buildJunctionTree: Whether to build a junction tree or not.
|
|
3613
|
+
"""
|
|
3614
|
+
@typing.overload
|
|
3615
|
+
def __init__(self, etree: DiscreteEliminationTree) -> None:
|
|
3616
|
+
...
|
|
3617
|
+
@typing.overload
|
|
3618
|
+
def __init__(self, junctionTree: DiscreteJunctionTree) -> None:
|
|
3619
|
+
...
|
|
3620
|
+
@typing.overload
|
|
3621
|
+
def __init__(self, bayesNet: DiscreteBayesNet) -> None:
|
|
3622
|
+
...
|
|
3623
|
+
@typing.overload
|
|
3624
|
+
def __init__(self, bayesTree: DiscreteBayesTree) -> None:
|
|
3625
|
+
...
|
|
3626
|
+
def __repr__(self, name: str = 'DiscreteSearch: ', formatter: typing.Callable[[int], str] = ...) -> str:
|
|
3627
|
+
...
|
|
3628
|
+
def lowerBound(self) -> float:
|
|
3629
|
+
"""
|
|
3630
|
+
Return lower bound on the cost-to-go for the entire search.
|
|
3631
|
+
"""
|
|
3632
|
+
def print(self, name: str = 'DiscreteSearch: ', formatter: typing.Callable[[int], str] = ...) -> None:
|
|
3633
|
+
"""
|
|
3634
|
+
Print the tree to cout.
|
|
3635
|
+
"""
|
|
3636
|
+
def run(self, K: int = 1) -> list[DiscreteSearchSolution]:
|
|
3637
|
+
"""
|
|
3638
|
+
Search for the K best solutions.
|
|
3639
|
+
|
|
3640
|
+
This method performs a search to find the K best solutions for the given DiscreteBayesNet. It uses a priority queue to manage the search nodes, expanding nodes with the smallest bound first. The search continues until all possible nodes have been expanded or pruned.
|
|
3641
|
+
|
|
3642
|
+
Returns: A vector of the K best solutions found during the search.
|
|
3643
|
+
"""
|
|
3644
|
+
class DiscreteSearchSolution:
|
|
3645
|
+
assignment: DiscreteValues
|
|
3646
|
+
error: float
|
|
3647
|
+
def __init__(self, error: float, assignment: DiscreteValues) -> None:
|
|
3648
|
+
...
|
|
3649
|
+
class DiscreteValues:
|
|
3650
|
+
def __bool__(self) -> bool:
|
|
3651
|
+
"""
|
|
3652
|
+
Check whether the map is nonempty
|
|
3653
|
+
"""
|
|
3654
|
+
@typing.overload
|
|
3655
|
+
def __contains__(self, arg0: int) -> bool:
|
|
3656
|
+
...
|
|
3657
|
+
@typing.overload
|
|
3658
|
+
def __contains__(self, arg0: typing.Any) -> bool:
|
|
3659
|
+
...
|
|
3660
|
+
def __delitem__(self, arg0: int) -> None:
|
|
3661
|
+
...
|
|
3662
|
+
def __getitem__(self, arg0: int) -> int:
|
|
3663
|
+
...
|
|
3664
|
+
def __init__(self) -> None:
|
|
3665
|
+
...
|
|
3666
|
+
def __iter__(self) -> typing.Iterator[int]:
|
|
3667
|
+
...
|
|
3668
|
+
def __len__(self) -> int:
|
|
3669
|
+
...
|
|
3670
|
+
def __repr__(self) -> str:
|
|
3671
|
+
"""
|
|
3672
|
+
Return the canonical string representation of this map.
|
|
3673
|
+
"""
|
|
3674
|
+
def __setitem__(self, arg0: int, arg1: int) -> None:
|
|
3675
|
+
...
|
|
3676
|
+
def items(self) -> typing.ItemsView:
|
|
3677
|
+
...
|
|
3678
|
+
def keys(self) -> typing.KeysView:
|
|
3679
|
+
...
|
|
3680
|
+
def values(self) -> typing.ValuesView:
|
|
3681
|
+
...
|
|
3682
|
+
class DoglegOptimizer(NonlinearOptimizer):
|
|
3683
|
+
@typing.overload
|
|
3684
|
+
def __init__(self, graph: NonlinearFactorGraph, initialValues: ...) -> None:
|
|
3685
|
+
...
|
|
3686
|
+
@typing.overload
|
|
3687
|
+
def __init__(self, graph: NonlinearFactorGraph, initialValues: ..., params: DoglegParams) -> None:
|
|
3688
|
+
...
|
|
3689
|
+
def getDelta(self) -> float:
|
|
3690
|
+
"""
|
|
3691
|
+
Access the current trust region radius delta.
|
|
3692
|
+
"""
|
|
3693
|
+
class DoglegParams(NonlinearOptimizerParams):
|
|
3694
|
+
def __init__(self) -> None:
|
|
3695
|
+
...
|
|
3696
|
+
def getDeltaInitial(self) -> float:
|
|
3697
|
+
...
|
|
3698
|
+
def getVerbosityDL(self) -> str:
|
|
3699
|
+
...
|
|
3700
|
+
def setDeltaInitial(self, deltaInitial: float) -> None:
|
|
3701
|
+
...
|
|
3702
|
+
def setVerbosityDL(self, verbosityDL: str) -> None:
|
|
3703
|
+
...
|
|
3704
|
+
class DotWriter:
|
|
3705
|
+
binaryEdges: bool
|
|
3706
|
+
boxes: set[int]
|
|
3707
|
+
connectKeysToFactor: bool
|
|
3708
|
+
factorPositions: dict[int, numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]]
|
|
3709
|
+
figureHeightInches: float
|
|
3710
|
+
figureWidthInches: float
|
|
3711
|
+
plotFactorPoints: bool
|
|
3712
|
+
positionHints: dict[str, float]
|
|
3713
|
+
variablePositions: dict[int, numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]]
|
|
3714
|
+
def __init__(self, figureWidthInches: float = 5, figureHeightInches: float = 5, plotFactorPoints: bool = True, connectKeysToFactor: bool = True, binaryEdges: bool = True) -> None:
|
|
3715
|
+
...
|
|
3716
|
+
class DummyPreconditionerParameters(PreconditionerParameters):
|
|
3717
|
+
def __init__(self) -> None:
|
|
3718
|
+
...
|
|
3719
|
+
class EdgeKey:
|
|
3720
|
+
@typing.overload
|
|
3721
|
+
def __init__(self, i: int, j: int) -> None:
|
|
3722
|
+
...
|
|
3723
|
+
@typing.overload
|
|
3724
|
+
def __init__(self, key: int) -> None:
|
|
3725
|
+
...
|
|
3726
|
+
@typing.overload
|
|
3727
|
+
def __init__(self, key: EdgeKey) -> None:
|
|
3728
|
+
...
|
|
3729
|
+
def __repr__(self, s: str = '') -> str:
|
|
3730
|
+
...
|
|
3731
|
+
def i(self) -> int:
|
|
3732
|
+
"""
|
|
3733
|
+
Retrieve high 32 bits.
|
|
3734
|
+
"""
|
|
3735
|
+
def j(self) -> int:
|
|
3736
|
+
"""
|
|
3737
|
+
Retrieve low 32 bits.
|
|
3738
|
+
"""
|
|
3739
|
+
def key(self) -> int:
|
|
3740
|
+
"""
|
|
3741
|
+
Cast toKey.
|
|
3742
|
+
"""
|
|
3743
|
+
def print(self, s: str = '') -> None:
|
|
3744
|
+
"""
|
|
3745
|
+
Prints theEdgeKeywith an optional prefix string.
|
|
3746
|
+
"""
|
|
3747
|
+
class EssentialMatrix:
|
|
3748
|
+
@staticmethod
|
|
3749
|
+
def Dim() -> int:
|
|
3750
|
+
...
|
|
3751
|
+
@staticmethod
|
|
3752
|
+
@typing.overload
|
|
3753
|
+
def FromPose3(_1P2_: Pose3) -> EssentialMatrix:
|
|
3754
|
+
"""
|
|
3755
|
+
Named constructor converting aPose3with scale toEssentialMatrix(no scale)
|
|
3756
|
+
"""
|
|
3757
|
+
@staticmethod
|
|
3758
|
+
@typing.overload
|
|
3759
|
+
def FromPose3(_1P2_: Pose3, H: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> EssentialMatrix:
|
|
3760
|
+
"""
|
|
3761
|
+
Named constructor converting aPose3with scale toEssentialMatrix(no scale)
|
|
3762
|
+
"""
|
|
3763
|
+
def __init__(self, aRb: Rot3, aTb: Unit3) -> None:
|
|
3764
|
+
...
|
|
3765
|
+
def __repr__(self, s: str = '') -> str:
|
|
3766
|
+
...
|
|
3767
|
+
def dim(self) -> int:
|
|
3768
|
+
...
|
|
3769
|
+
def direction(self) -> Unit3:
|
|
3770
|
+
"""
|
|
3771
|
+
Direction.
|
|
3772
|
+
"""
|
|
3773
|
+
def equals(self, other: EssentialMatrix, tol: float) -> bool:
|
|
3774
|
+
"""
|
|
3775
|
+
assert equality up to a tolerance
|
|
3776
|
+
"""
|
|
3777
|
+
def error(self, vA: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], vB: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]) -> float:
|
|
3778
|
+
"""
|
|
3779
|
+
epipolar error, algebraic
|
|
3780
|
+
"""
|
|
3781
|
+
def localCoordinates(self, other: EssentialMatrix) -> numpy.ndarray[tuple[typing.Literal[5], typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3782
|
+
"""
|
|
3783
|
+
Compute the coordinates in the tangent space.
|
|
3784
|
+
"""
|
|
3785
|
+
def matrix(self) -> numpy.ndarray[tuple[typing.Literal[3], typing.Literal[3]], numpy.dtype[numpy.float64]]:
|
|
3786
|
+
"""
|
|
3787
|
+
Return 3*3 matrix representation.
|
|
3788
|
+
"""
|
|
3789
|
+
def print(self, s: str = '') -> None:
|
|
3790
|
+
"""
|
|
3791
|
+
print with optional string
|
|
3792
|
+
"""
|
|
3793
|
+
def retract(self, xi: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]) -> EssentialMatrix:
|
|
3794
|
+
"""
|
|
3795
|
+
Retract delta to manifold.
|
|
3796
|
+
"""
|
|
3797
|
+
def rotation(self) -> Rot3:
|
|
3798
|
+
"""
|
|
3799
|
+
Rotation.
|
|
3800
|
+
"""
|
|
3801
|
+
class EssentialMatrixConstraint(NoiseModelFactor):
|
|
3802
|
+
def __init__(self, key1: int, key2: int, measuredE: EssentialMatrix, model: noiseModel.Base) -> None:
|
|
3803
|
+
...
|
|
3804
|
+
def __repr__(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3805
|
+
...
|
|
3806
|
+
def equals(self, expected: EssentialMatrixConstraint, tol: float) -> bool:
|
|
3807
|
+
"""
|
|
3808
|
+
equals
|
|
3809
|
+
"""
|
|
3810
|
+
def evaluateError(self, p1: Pose3, p2: Pose3) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3811
|
+
...
|
|
3812
|
+
def measured(self) -> EssentialMatrix:
|
|
3813
|
+
"""
|
|
3814
|
+
return the measured
|
|
3815
|
+
"""
|
|
3816
|
+
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3817
|
+
"""
|
|
3818
|
+
implement functions needed forTestable
|
|
3819
|
+
|
|
3820
|
+
print
|
|
3821
|
+
"""
|
|
3822
|
+
class EssentialMatrixFactor(NoiseModelFactor):
|
|
3823
|
+
def __init__(self, key: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base) -> None:
|
|
3824
|
+
...
|
|
3825
|
+
def __repr__(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3826
|
+
...
|
|
3827
|
+
def evaluateError(self, E: EssentialMatrix) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3828
|
+
...
|
|
3829
|
+
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3830
|
+
"""
|
|
3831
|
+
print
|
|
3832
|
+
"""
|
|
3833
|
+
class EssentialMatrixFactor2(NoiseModelFactor):
|
|
3834
|
+
def __init__(self, key1: int, key2: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base) -> None:
|
|
3835
|
+
...
|
|
3836
|
+
def __repr__(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3837
|
+
...
|
|
3838
|
+
def evaluateError(self, E: EssentialMatrix, d: float) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3839
|
+
...
|
|
3840
|
+
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3841
|
+
"""
|
|
3842
|
+
print
|
|
3843
|
+
"""
|
|
3844
|
+
class EssentialMatrixFactor3(EssentialMatrixFactor2):
|
|
3845
|
+
def __init__(self, key1: int, key2: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], cRb: Rot3, model: noiseModel.Base) -> None:
|
|
3846
|
+
...
|
|
3847
|
+
def __repr__(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3848
|
+
...
|
|
3849
|
+
def evaluateError(self, E: EssentialMatrix, d: float) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3850
|
+
...
|
|
3851
|
+
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3852
|
+
"""
|
|
3853
|
+
print
|
|
3854
|
+
"""
|
|
3855
|
+
class EssentialMatrixFactor4Cal3Bundler(NoiseModelFactor):
|
|
3856
|
+
def __init__(self, keyE: int, keyK: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base = None) -> None:
|
|
3857
|
+
...
|
|
3858
|
+
def __repr__(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3859
|
+
...
|
|
3860
|
+
def evaluateError(self, E: EssentialMatrix, K: Cal3Bundler) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3861
|
+
...
|
|
3862
|
+
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3863
|
+
...
|
|
3864
|
+
class EssentialMatrixFactor4Cal3DS2(NoiseModelFactor):
|
|
3865
|
+
def __init__(self, keyE: int, keyK: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base = None) -> None:
|
|
3866
|
+
...
|
|
3867
|
+
def __repr__(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3868
|
+
...
|
|
3869
|
+
def evaluateError(self, E: EssentialMatrix, K: Cal3DS2) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3870
|
+
...
|
|
3871
|
+
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3872
|
+
...
|
|
3873
|
+
class EssentialMatrixFactor4Cal3Fisheye(NoiseModelFactor):
|
|
3874
|
+
def __init__(self, keyE: int, keyK: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base = None) -> None:
|
|
3875
|
+
...
|
|
3876
|
+
def __repr__(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3877
|
+
...
|
|
3878
|
+
def evaluateError(self, E: EssentialMatrix, K: Cal3Fisheye) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3879
|
+
...
|
|
3880
|
+
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3881
|
+
...
|
|
3882
|
+
class EssentialMatrixFactor4Cal3Unified(NoiseModelFactor):
|
|
3883
|
+
def __init__(self, keyE: int, keyK: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base = None) -> None:
|
|
3884
|
+
...
|
|
3885
|
+
def __repr__(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3886
|
+
...
|
|
3887
|
+
def evaluateError(self, E: EssentialMatrix, K: Cal3Unified) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3888
|
+
...
|
|
3889
|
+
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3890
|
+
...
|
|
3891
|
+
class EssentialMatrixFactor4Cal3_S2(NoiseModelFactor):
|
|
3892
|
+
def __init__(self, keyE: int, keyK: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base = None) -> None:
|
|
3893
|
+
...
|
|
3894
|
+
def __repr__(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3895
|
+
...
|
|
3896
|
+
def evaluateError(self, E: EssentialMatrix, K: Cal3_S2) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3897
|
+
...
|
|
3898
|
+
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3899
|
+
...
|
|
3900
|
+
class EssentialMatrixFactor4Cal3f(NoiseModelFactor):
|
|
3901
|
+
def __init__(self, keyE: int, keyK: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base = None) -> None:
|
|
3902
|
+
...
|
|
3903
|
+
def __repr__(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3904
|
+
...
|
|
3905
|
+
def evaluateError(self, E: EssentialMatrix, K: Cal3f) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3906
|
+
...
|
|
3907
|
+
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3908
|
+
...
|
|
3909
|
+
class EssentialMatrixFactor5Cal3Bundler(NoiseModelFactor):
|
|
3910
|
+
def __init__(self, keyE: int, keyKa: int, keyKb: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base = None) -> None:
|
|
3911
|
+
...
|
|
3912
|
+
def __repr__(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3913
|
+
...
|
|
3914
|
+
def evaluateError(self, E: EssentialMatrix, Ka: Cal3Bundler, Kb: Cal3Bundler) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3915
|
+
...
|
|
3916
|
+
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3917
|
+
...
|
|
3918
|
+
class EssentialMatrixFactor5Cal3DS2(NoiseModelFactor):
|
|
3919
|
+
def __init__(self, keyE: int, keyKa: int, keyKb: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base = None) -> None:
|
|
3920
|
+
...
|
|
3921
|
+
def __repr__(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3922
|
+
...
|
|
3923
|
+
def evaluateError(self, E: EssentialMatrix, Ka: Cal3DS2, Kb: Cal3DS2) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3924
|
+
...
|
|
3925
|
+
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3926
|
+
...
|
|
3927
|
+
class EssentialMatrixFactor5Cal3Fisheye(NoiseModelFactor):
|
|
3928
|
+
def __init__(self, keyE: int, keyKa: int, keyKb: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base = None) -> None:
|
|
3929
|
+
...
|
|
3930
|
+
def __repr__(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3931
|
+
...
|
|
3932
|
+
def evaluateError(self, E: EssentialMatrix, Ka: Cal3Fisheye, Kb: Cal3Fisheye) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3933
|
+
...
|
|
3934
|
+
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3935
|
+
...
|
|
3936
|
+
class EssentialMatrixFactor5Cal3Unified(NoiseModelFactor):
|
|
3937
|
+
def __init__(self, keyE: int, keyKa: int, keyKb: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base = None) -> None:
|
|
3938
|
+
...
|
|
3939
|
+
def __repr__(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3940
|
+
...
|
|
3941
|
+
def evaluateError(self, E: EssentialMatrix, Ka: Cal3Unified, Kb: Cal3Unified) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3942
|
+
...
|
|
3943
|
+
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3944
|
+
...
|
|
3945
|
+
class EssentialMatrixFactor5Cal3_S2(NoiseModelFactor):
|
|
3946
|
+
def __init__(self, keyE: int, keyKa: int, keyKb: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base = None) -> None:
|
|
3947
|
+
...
|
|
3948
|
+
def __repr__(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3949
|
+
...
|
|
3950
|
+
def evaluateError(self, E: EssentialMatrix, Ka: Cal3_S2, Kb: Cal3_S2) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3951
|
+
...
|
|
3952
|
+
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3953
|
+
...
|
|
3954
|
+
class EssentialMatrixFactor5Cal3f(NoiseModelFactor):
|
|
3955
|
+
def __init__(self, keyE: int, keyKa: int, keyKb: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base = None) -> None:
|
|
3956
|
+
...
|
|
3957
|
+
def __repr__(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|
|
3958
|
+
...
|
|
3959
|
+
def evaluateError(self, E: EssentialMatrix, Ka: Cal3f, Kb: Cal3f) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3960
|
+
...
|
|
3961
|
+
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3962
|
+
...
|
|
3963
|
+
class EssentialTransferFactorCal3Bundler(NoiseModelFactor):
|
|
3964
|
+
def __init__(self, edge1: EdgeKey, edge2: EdgeKey, triplets: list[tuple[numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]]], calibration: Cal3Bundler, model: noiseModel.Base = None) -> None:
|
|
3965
|
+
...
|
|
3966
|
+
class EssentialTransferFactorCal3_S2(NoiseModelFactor):
|
|
3967
|
+
def __init__(self, edge1: EdgeKey, edge2: EdgeKey, triplets: list[tuple[numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]]], calibration: Cal3_S2, model: noiseModel.Base = None) -> None:
|
|
3968
|
+
...
|
|
3969
|
+
class EssentialTransferFactorCal3f(NoiseModelFactor):
|
|
3970
|
+
def __init__(self, edge1: EdgeKey, edge2: EdgeKey, triplets: list[tuple[numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]]], calibration: Cal3f, model: noiseModel.Base = None) -> None:
|
|
3971
|
+
...
|
|
3972
|
+
class EssentialTransferFactorKCal3Bundler(NoiseModelFactor):
|
|
3973
|
+
@typing.overload
|
|
3974
|
+
def __init__(self, edge1: EdgeKey, edge2: EdgeKey, triplets: list[tuple[numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]]], model: noiseModel.Base = None) -> None:
|
|
3975
|
+
...
|
|
3976
|
+
@typing.overload
|
|
3977
|
+
def __init__(self, edge1: EdgeKey, edge2: EdgeKey, keyK: int, triplets: list[tuple[numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]]], model: noiseModel.Base = None) -> None:
|
|
3978
|
+
...
|
|
3979
|
+
class EssentialTransferFactorKCal3_S2(NoiseModelFactor):
|
|
3980
|
+
@typing.overload
|
|
3981
|
+
def __init__(self, edge1: EdgeKey, edge2: EdgeKey, triplets: list[tuple[numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]]], model: noiseModel.Base = None) -> None:
|
|
3982
|
+
...
|
|
3983
|
+
@typing.overload
|
|
3984
|
+
def __init__(self, edge1: EdgeKey, edge2: EdgeKey, keyK: int, triplets: list[tuple[numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]]], model: noiseModel.Base = None) -> None:
|
|
3985
|
+
...
|
|
3986
|
+
class EssentialTransferFactorKCal3f(NoiseModelFactor):
|
|
3987
|
+
@typing.overload
|
|
3988
|
+
def __init__(self, edge1: EdgeKey, edge2: EdgeKey, triplets: list[tuple[numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]]], model: noiseModel.Base = None) -> None:
|
|
3989
|
+
...
|
|
3990
|
+
@typing.overload
|
|
3991
|
+
def __init__(self, edge1: EdgeKey, edge2: EdgeKey, keyK: int, triplets: list[tuple[numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]]], model: noiseModel.Base = None) -> None:
|
|
3992
|
+
...
|
|
3993
|
+
class EvaluationFactorChebyshev1Basis(NoiseModelFactor):
|
|
3994
|
+
@typing.overload
|
|
3995
|
+
def __init__(self) -> None:
|
|
3996
|
+
...
|
|
3997
|
+
@typing.overload
|
|
3998
|
+
def __init__(self, key: int, z: float, model: noiseModel.Base, N: int, x: float) -> None:
|
|
3999
|
+
...
|
|
4000
|
+
@typing.overload
|
|
4001
|
+
def __init__(self, key: int, z: float, model: noiseModel.Base, N: int, x: float, a: float, b: float) -> None:
|
|
4002
|
+
...
|
|
4003
|
+
class EvaluationFactorChebyshev2(NoiseModelFactor):
|
|
4004
|
+
@typing.overload
|
|
4005
|
+
def __init__(self) -> None:
|
|
4006
|
+
...
|
|
4007
|
+
@typing.overload
|
|
4008
|
+
def __init__(self, key: int, z: float, model: noiseModel.Base, N: int, x: float) -> None:
|
|
4009
|
+
...
|
|
4010
|
+
@typing.overload
|
|
4011
|
+
def __init__(self, key: int, z: float, model: noiseModel.Base, N: int, x: float, a: float, b: float) -> None:
|
|
4012
|
+
...
|
|
4013
|
+
class EvaluationFactorChebyshev2Basis(NoiseModelFactor):
|
|
4014
|
+
@typing.overload
|
|
4015
|
+
def __init__(self) -> None:
|
|
4016
|
+
...
|
|
4017
|
+
@typing.overload
|
|
4018
|
+
def __init__(self, key: int, z: float, model: noiseModel.Base, N: int, x: float) -> None:
|
|
4019
|
+
...
|
|
4020
|
+
@typing.overload
|
|
4021
|
+
def __init__(self, key: int, z: float, model: noiseModel.Base, N: int, x: float, a: float, b: float) -> None:
|
|
4022
|
+
...
|
|
4023
|
+
class EvaluationFactorFourierBasis(NoiseModelFactor):
|
|
4024
|
+
@typing.overload
|
|
4025
|
+
def __init__(self) -> None:
|
|
4026
|
+
...
|
|
4027
|
+
@typing.overload
|
|
4028
|
+
def __init__(self, key: int, z: float, model: noiseModel.Base, N: int, x: float) -> None:
|
|
4029
|
+
...
|
|
4030
|
+
@typing.overload
|
|
4031
|
+
def __init__(self, key: int, z: float, model: noiseModel.Base, N: int, x: float, a: float, b: float) -> None:
|
|
4032
|
+
...
|
|
4033
|
+
class Event:
|
|
4034
|
+
@typing.overload
|
|
4035
|
+
def __init__(self) -> None:
|
|
4036
|
+
...
|
|
4037
|
+
@typing.overload
|
|
4038
|
+
def __init__(self, t: float, p: numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]]) -> None:
|
|
4039
|
+
...
|
|
4040
|
+
@typing.overload
|
|
4041
|
+
def __init__(self, t: float, x: float, y: float, z: float) -> None:
|
|
4042
|
+
...
|
|
4043
|
+
def __repr__(self, s: str = '') -> str:
|
|
4044
|
+
...
|
|
4045
|
+
def height(self) -> float:
|
|
4046
|
+
...
|
|
4047
|
+
def location(self) -> numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4048
|
+
...
|
|
4049
|
+
def print(self, s: str = '') -> None:
|
|
4050
|
+
"""
|
|
4051
|
+
print with optional string
|
|
4052
|
+
"""
|
|
4053
|
+
def time(self) -> float:
|
|
4054
|
+
...
|
|
4055
|
+
class ExtendedKalmanFilterConstantBias:
|
|
4056
|
+
def Density(self) -> JacobianFactor:
|
|
4057
|
+
...
|
|
4058
|
+
def __init__(self, key_initial: int, x_initial: ..., P_initial: noiseModel.Gaussian) -> None:
|
|
4059
|
+
...
|
|
4060
|
+
def predict(self, motionFactor: NoiseModelFactor) -> ...:
|
|
4061
|
+
...
|
|
4062
|
+
def update(self, measurementFactor: NoiseModelFactor) -> ...:
|
|
4063
|
+
...
|
|
4064
|
+
class ExtendedKalmanFilterGal3:
|
|
4065
|
+
def Density(self) -> JacobianFactor:
|
|
4066
|
+
...
|
|
4067
|
+
def __init__(self, key_initial: int, x_initial: Gal3, P_initial: noiseModel.Gaussian) -> None:
|
|
4068
|
+
...
|
|
4069
|
+
def predict(self, motionFactor: NoiseModelFactor) -> Gal3:
|
|
4070
|
+
...
|
|
4071
|
+
def update(self, measurementFactor: NoiseModelFactor) -> Gal3:
|
|
4072
|
+
...
|
|
4073
|
+
class ExtendedKalmanFilterNavState:
|
|
4074
|
+
def Density(self) -> JacobianFactor:
|
|
4075
|
+
...
|
|
4076
|
+
def __init__(self, key_initial: int, x_initial: ..., P_initial: noiseModel.Gaussian) -> None:
|
|
4077
|
+
...
|
|
4078
|
+
def predict(self, motionFactor: NoiseModelFactor) -> ...:
|
|
4079
|
+
...
|
|
4080
|
+
def update(self, measurementFactor: NoiseModelFactor) -> ...:
|
|
4081
|
+
...
|
|
4082
|
+
class ExtendedKalmanFilterPoint2:
|
|
4083
|
+
def Density(self) -> JacobianFactor:
|
|
4084
|
+
...
|
|
4085
|
+
def __init__(self, key_initial: int, x_initial: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], P_initial: noiseModel.Gaussian) -> None:
|
|
4086
|
+
...
|
|
4087
|
+
def predict(self, motionFactor: NoiseModelFactor) -> numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4088
|
+
...
|
|
4089
|
+
def update(self, measurementFactor: NoiseModelFactor) -> numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4090
|
+
...
|
|
4091
|
+
class ExtendedKalmanFilterPoint3:
|
|
4092
|
+
def Density(self) -> JacobianFactor:
|
|
4093
|
+
...
|
|
4094
|
+
def __init__(self, key_initial: int, x_initial: numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]], P_initial: noiseModel.Gaussian) -> None:
|
|
4095
|
+
...
|
|
4096
|
+
def predict(self, motionFactor: NoiseModelFactor) -> numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4097
|
+
...
|
|
4098
|
+
def update(self, measurementFactor: NoiseModelFactor) -> numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4099
|
+
...
|
|
4100
|
+
class ExtendedKalmanFilterPose2:
|
|
4101
|
+
def Density(self) -> JacobianFactor:
|
|
4102
|
+
...
|
|
4103
|
+
def __init__(self, key_initial: int, x_initial: Pose2, P_initial: noiseModel.Gaussian) -> None:
|
|
4104
|
+
...
|
|
4105
|
+
def predict(self, motionFactor: NoiseModelFactor) -> Pose2:
|
|
4106
|
+
...
|
|
4107
|
+
def update(self, measurementFactor: NoiseModelFactor) -> Pose2:
|
|
4108
|
+
...
|
|
4109
|
+
class ExtendedKalmanFilterPose3:
|
|
4110
|
+
def Density(self) -> JacobianFactor:
|
|
4111
|
+
...
|
|
4112
|
+
def __init__(self, key_initial: int, x_initial: Pose3, P_initial: noiseModel.Gaussian) -> None:
|
|
4113
|
+
...
|
|
4114
|
+
def predict(self, motionFactor: NoiseModelFactor) -> Pose3:
|
|
4115
|
+
...
|
|
4116
|
+
def update(self, measurementFactor: NoiseModelFactor) -> Pose3:
|
|
4117
|
+
...
|
|
4118
|
+
class ExtendedKalmanFilterRot2:
|
|
4119
|
+
def Density(self) -> JacobianFactor:
|
|
4120
|
+
...
|
|
4121
|
+
def __init__(self, key_initial: int, x_initial: Rot2, P_initial: noiseModel.Gaussian) -> None:
|
|
4122
|
+
...
|
|
4123
|
+
def predict(self, motionFactor: NoiseModelFactor) -> Rot2:
|
|
4124
|
+
...
|
|
4125
|
+
def update(self, measurementFactor: NoiseModelFactor) -> Rot2:
|
|
4126
|
+
...
|
|
4127
|
+
class ExtendedKalmanFilterRot3:
|
|
4128
|
+
def Density(self) -> JacobianFactor:
|
|
4129
|
+
...
|
|
4130
|
+
def __init__(self, key_initial: int, x_initial: Rot3, P_initial: noiseModel.Gaussian) -> None:
|
|
4131
|
+
...
|
|
4132
|
+
def predict(self, motionFactor: NoiseModelFactor) -> Rot3:
|
|
4133
|
+
...
|
|
4134
|
+
def update(self, measurementFactor: NoiseModelFactor) -> Rot3:
|
|
4135
|
+
...
|
|
4136
|
+
class ExtendedKalmanFilterSL4:
|
|
4137
|
+
def Density(self) -> JacobianFactor:
|
|
4138
|
+
...
|
|
4139
|
+
def __init__(self, key_initial: int, x_initial: SL4, P_initial: noiseModel.Gaussian) -> None:
|
|
4140
|
+
...
|
|
4141
|
+
def predict(self, motionFactor: NoiseModelFactor) -> SL4:
|
|
4142
|
+
...
|
|
4143
|
+
def update(self, measurementFactor: NoiseModelFactor) -> SL4:
|
|
4144
|
+
...
|
|
4145
|
+
class ExtendedKalmanFilterSimilarity2:
|
|
4146
|
+
def Density(self) -> JacobianFactor:
|
|
4147
|
+
...
|
|
4148
|
+
def __init__(self, key_initial: int, x_initial: Similarity2, P_initial: noiseModel.Gaussian) -> None:
|
|
4149
|
+
...
|
|
4150
|
+
def predict(self, motionFactor: NoiseModelFactor) -> Similarity2:
|
|
4151
|
+
...
|
|
4152
|
+
def update(self, measurementFactor: NoiseModelFactor) -> Similarity2:
|
|
4153
|
+
...
|
|
4154
|
+
class ExtendedKalmanFilterSimilarity3:
|
|
4155
|
+
def Density(self) -> JacobianFactor:
|
|
4156
|
+
...
|
|
4157
|
+
def __init__(self, key_initial: int, x_initial: Similarity3, P_initial: noiseModel.Gaussian) -> None:
|
|
4158
|
+
...
|
|
4159
|
+
def predict(self, motionFactor: NoiseModelFactor) -> Similarity3:
|
|
4160
|
+
...
|
|
4161
|
+
def update(self, measurementFactor: NoiseModelFactor) -> Similarity3:
|
|
4162
|
+
...
|
|
4163
|
+
class ExtendedPriorFactorCal3Bundler(NoiseModelFactor):
|
|
4164
|
+
def __getstate__(self) -> tuple:
|
|
4165
|
+
...
|
|
4166
|
+
@typing.overload
|
|
4167
|
+
def __init__(self, key: int, origin: Cal3Bundler, noiseModel: noiseModel.Base) -> None:
|
|
4168
|
+
...
|
|
4169
|
+
@typing.overload
|
|
4170
|
+
def __init__(self, key: int, origin: Cal3Bundler, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
4171
|
+
...
|
|
4172
|
+
@typing.overload
|
|
4173
|
+
def __init__(self, key: int, origin: Cal3Bundler, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4174
|
+
...
|
|
4175
|
+
@typing.overload
|
|
4176
|
+
def __init__(self, key: int, origin: Cal3Bundler, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4177
|
+
...
|
|
4178
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
4179
|
+
...
|
|
4180
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
4181
|
+
...
|
|
4182
|
+
def deserialize(self, serialized: str) -> None:
|
|
4183
|
+
...
|
|
4184
|
+
def error(self, x: Cal3Bundler) -> float:
|
|
4185
|
+
...
|
|
4186
|
+
def evaluateError(self, x: Cal3Bundler) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4187
|
+
...
|
|
4188
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
4189
|
+
...
|
|
4190
|
+
def likelihood(self, x: Cal3Bundler) -> float:
|
|
4191
|
+
...
|
|
4192
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
4193
|
+
...
|
|
4194
|
+
def origin(self) -> Cal3Bundler:
|
|
4195
|
+
...
|
|
4196
|
+
def serialize(self) -> str:
|
|
4197
|
+
...
|
|
4198
|
+
class ExtendedPriorFactorCal3DS2(NoiseModelFactor):
|
|
4199
|
+
def __getstate__(self) -> tuple:
|
|
4200
|
+
...
|
|
4201
|
+
@typing.overload
|
|
4202
|
+
def __init__(self, key: int, origin: Cal3DS2, noiseModel: noiseModel.Base) -> None:
|
|
4203
|
+
...
|
|
4204
|
+
@typing.overload
|
|
4205
|
+
def __init__(self, key: int, origin: Cal3DS2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
4206
|
+
...
|
|
4207
|
+
@typing.overload
|
|
4208
|
+
def __init__(self, key: int, origin: Cal3DS2, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4209
|
+
...
|
|
4210
|
+
@typing.overload
|
|
4211
|
+
def __init__(self, key: int, origin: Cal3DS2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4212
|
+
...
|
|
4213
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
4214
|
+
...
|
|
4215
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
4216
|
+
...
|
|
4217
|
+
def deserialize(self, serialized: str) -> None:
|
|
4218
|
+
...
|
|
4219
|
+
def error(self, x: Cal3DS2) -> float:
|
|
4220
|
+
...
|
|
4221
|
+
def evaluateError(self, x: Cal3DS2) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4222
|
+
...
|
|
4223
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
4224
|
+
...
|
|
4225
|
+
def likelihood(self, x: Cal3DS2) -> float:
|
|
4226
|
+
...
|
|
4227
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
4228
|
+
...
|
|
4229
|
+
def origin(self) -> Cal3DS2:
|
|
4230
|
+
...
|
|
4231
|
+
def serialize(self) -> str:
|
|
4232
|
+
...
|
|
4233
|
+
class ExtendedPriorFactorCal3Fisheye(NoiseModelFactor):
|
|
4234
|
+
def __getstate__(self) -> tuple:
|
|
4235
|
+
...
|
|
4236
|
+
@typing.overload
|
|
4237
|
+
def __init__(self, key: int, origin: Cal3Fisheye, noiseModel: noiseModel.Base) -> None:
|
|
4238
|
+
...
|
|
4239
|
+
@typing.overload
|
|
4240
|
+
def __init__(self, key: int, origin: Cal3Fisheye, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
4241
|
+
...
|
|
4242
|
+
@typing.overload
|
|
4243
|
+
def __init__(self, key: int, origin: Cal3Fisheye, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4244
|
+
...
|
|
4245
|
+
@typing.overload
|
|
4246
|
+
def __init__(self, key: int, origin: Cal3Fisheye, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4247
|
+
...
|
|
4248
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
4249
|
+
...
|
|
4250
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
4251
|
+
...
|
|
4252
|
+
def deserialize(self, serialized: str) -> None:
|
|
4253
|
+
...
|
|
4254
|
+
def error(self, x: Cal3Fisheye) -> float:
|
|
4255
|
+
...
|
|
4256
|
+
def evaluateError(self, x: Cal3Fisheye) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4257
|
+
...
|
|
4258
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
4259
|
+
...
|
|
4260
|
+
def likelihood(self, x: Cal3Fisheye) -> float:
|
|
4261
|
+
...
|
|
4262
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
4263
|
+
...
|
|
4264
|
+
def origin(self) -> Cal3Fisheye:
|
|
4265
|
+
...
|
|
4266
|
+
def serialize(self) -> str:
|
|
4267
|
+
...
|
|
4268
|
+
class ExtendedPriorFactorCal3Unified(NoiseModelFactor):
|
|
4269
|
+
def __getstate__(self) -> tuple:
|
|
4270
|
+
...
|
|
4271
|
+
@typing.overload
|
|
4272
|
+
def __init__(self, key: int, origin: Cal3Unified, noiseModel: noiseModel.Base) -> None:
|
|
4273
|
+
...
|
|
4274
|
+
@typing.overload
|
|
4275
|
+
def __init__(self, key: int, origin: Cal3Unified, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
4276
|
+
...
|
|
4277
|
+
@typing.overload
|
|
4278
|
+
def __init__(self, key: int, origin: Cal3Unified, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4279
|
+
...
|
|
4280
|
+
@typing.overload
|
|
4281
|
+
def __init__(self, key: int, origin: Cal3Unified, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4282
|
+
...
|
|
4283
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
4284
|
+
...
|
|
4285
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
4286
|
+
...
|
|
4287
|
+
def deserialize(self, serialized: str) -> None:
|
|
4288
|
+
...
|
|
4289
|
+
def error(self, x: Cal3Unified) -> float:
|
|
4290
|
+
...
|
|
4291
|
+
def evaluateError(self, x: Cal3Unified) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4292
|
+
...
|
|
4293
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
4294
|
+
...
|
|
4295
|
+
def likelihood(self, x: Cal3Unified) -> float:
|
|
4296
|
+
...
|
|
4297
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
4298
|
+
...
|
|
4299
|
+
def origin(self) -> Cal3Unified:
|
|
4300
|
+
...
|
|
4301
|
+
def serialize(self) -> str:
|
|
4302
|
+
...
|
|
4303
|
+
class ExtendedPriorFactorCal3_S2(NoiseModelFactor):
|
|
4304
|
+
def __getstate__(self) -> tuple:
|
|
4305
|
+
...
|
|
4306
|
+
@typing.overload
|
|
4307
|
+
def __init__(self, key: int, origin: Cal3_S2, noiseModel: noiseModel.Base) -> None:
|
|
4308
|
+
...
|
|
4309
|
+
@typing.overload
|
|
4310
|
+
def __init__(self, key: int, origin: Cal3_S2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
4311
|
+
...
|
|
4312
|
+
@typing.overload
|
|
4313
|
+
def __init__(self, key: int, origin: Cal3_S2, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4314
|
+
...
|
|
4315
|
+
@typing.overload
|
|
4316
|
+
def __init__(self, key: int, origin: Cal3_S2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4317
|
+
...
|
|
4318
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
4319
|
+
...
|
|
4320
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
4321
|
+
...
|
|
4322
|
+
def deserialize(self, serialized: str) -> None:
|
|
4323
|
+
...
|
|
4324
|
+
def error(self, x: Cal3_S2) -> float:
|
|
4325
|
+
...
|
|
4326
|
+
def evaluateError(self, x: Cal3_S2) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4327
|
+
...
|
|
4328
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
4329
|
+
...
|
|
4330
|
+
def likelihood(self, x: Cal3_S2) -> float:
|
|
4331
|
+
...
|
|
4332
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
4333
|
+
...
|
|
4334
|
+
def origin(self) -> Cal3_S2:
|
|
4335
|
+
...
|
|
4336
|
+
def serialize(self) -> str:
|
|
4337
|
+
...
|
|
4338
|
+
class ExtendedPriorFactorCalibratedCamera(NoiseModelFactor):
|
|
4339
|
+
def __getstate__(self) -> tuple:
|
|
4340
|
+
...
|
|
4341
|
+
@typing.overload
|
|
4342
|
+
def __init__(self, key: int, origin: CalibratedCamera, noiseModel: noiseModel.Base) -> None:
|
|
4343
|
+
...
|
|
4344
|
+
@typing.overload
|
|
4345
|
+
def __init__(self, key: int, origin: CalibratedCamera, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
4346
|
+
...
|
|
4347
|
+
@typing.overload
|
|
4348
|
+
def __init__(self, key: int, origin: CalibratedCamera, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4349
|
+
...
|
|
4350
|
+
@typing.overload
|
|
4351
|
+
def __init__(self, key: int, origin: CalibratedCamera, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4352
|
+
...
|
|
4353
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
4354
|
+
...
|
|
4355
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
4356
|
+
...
|
|
4357
|
+
def deserialize(self, serialized: str) -> None:
|
|
4358
|
+
...
|
|
4359
|
+
def error(self, x: CalibratedCamera) -> float:
|
|
4360
|
+
...
|
|
4361
|
+
def evaluateError(self, x: CalibratedCamera) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4362
|
+
...
|
|
4363
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
4364
|
+
...
|
|
4365
|
+
def likelihood(self, x: CalibratedCamera) -> float:
|
|
4366
|
+
...
|
|
4367
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
4368
|
+
...
|
|
4369
|
+
def origin(self) -> CalibratedCamera:
|
|
4370
|
+
...
|
|
4371
|
+
def serialize(self) -> str:
|
|
4372
|
+
...
|
|
4373
|
+
class ExtendedPriorFactorConstantBias(NoiseModelFactor):
|
|
4374
|
+
def __getstate__(self) -> tuple:
|
|
4375
|
+
...
|
|
4376
|
+
@typing.overload
|
|
4377
|
+
def __init__(self, key: int, origin: ..., noiseModel: noiseModel.Base) -> None:
|
|
4378
|
+
...
|
|
4379
|
+
@typing.overload
|
|
4380
|
+
def __init__(self, key: int, origin: ..., mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
4381
|
+
...
|
|
4382
|
+
@typing.overload
|
|
4383
|
+
def __init__(self, key: int, origin: ..., covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4384
|
+
...
|
|
4385
|
+
@typing.overload
|
|
4386
|
+
def __init__(self, key: int, origin: ..., mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4387
|
+
...
|
|
4388
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
4389
|
+
...
|
|
4390
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
4391
|
+
...
|
|
4392
|
+
def deserialize(self, serialized: str) -> None:
|
|
4393
|
+
...
|
|
4394
|
+
def error(self, x: ...) -> float:
|
|
4395
|
+
...
|
|
4396
|
+
def evaluateError(self, x: ...) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4397
|
+
...
|
|
4398
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
4399
|
+
...
|
|
4400
|
+
def likelihood(self, x: ...) -> float:
|
|
4401
|
+
...
|
|
4402
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
4403
|
+
...
|
|
4404
|
+
def origin(self) -> ...:
|
|
4405
|
+
...
|
|
4406
|
+
def serialize(self) -> str:
|
|
4407
|
+
...
|
|
4408
|
+
class ExtendedPriorFactorDouble(NoiseModelFactor):
|
|
4409
|
+
def __getstate__(self) -> tuple:
|
|
4410
|
+
...
|
|
4411
|
+
@typing.overload
|
|
4412
|
+
def __init__(self, key: int, origin: float, noiseModel: noiseModel.Base) -> None:
|
|
4413
|
+
...
|
|
4414
|
+
@typing.overload
|
|
4415
|
+
def __init__(self, key: int, origin: float, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
4416
|
+
...
|
|
4417
|
+
@typing.overload
|
|
4418
|
+
def __init__(self, key: int, origin: float, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4419
|
+
...
|
|
4420
|
+
@typing.overload
|
|
4421
|
+
def __init__(self, key: int, origin: float, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4422
|
+
...
|
|
4423
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
4424
|
+
...
|
|
4425
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
4426
|
+
...
|
|
4427
|
+
def deserialize(self, serialized: str) -> None:
|
|
4428
|
+
...
|
|
4429
|
+
def error(self, x: float) -> float:
|
|
4430
|
+
...
|
|
4431
|
+
def evaluateError(self, x: float) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4432
|
+
...
|
|
4433
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
4434
|
+
...
|
|
4435
|
+
def likelihood(self, x: float) -> float:
|
|
4436
|
+
...
|
|
4437
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
4438
|
+
...
|
|
4439
|
+
def origin(self) -> float:
|
|
4440
|
+
...
|
|
4441
|
+
def serialize(self) -> str:
|
|
4442
|
+
...
|
|
4443
|
+
class ExtendedPriorFactorGal3(NoiseModelFactor):
|
|
4444
|
+
def __getstate__(self) -> tuple:
|
|
4445
|
+
...
|
|
4446
|
+
@typing.overload
|
|
4447
|
+
def __init__(self, key: int, origin: Gal3, noiseModel: noiseModel.Base) -> None:
|
|
4448
|
+
...
|
|
4449
|
+
@typing.overload
|
|
4450
|
+
def __init__(self, key: int, origin: Gal3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
4451
|
+
...
|
|
4452
|
+
@typing.overload
|
|
4453
|
+
def __init__(self, key: int, origin: Gal3, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4454
|
+
...
|
|
4455
|
+
@typing.overload
|
|
4456
|
+
def __init__(self, key: int, origin: Gal3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4457
|
+
...
|
|
4458
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
4459
|
+
...
|
|
4460
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
4461
|
+
...
|
|
4462
|
+
def deserialize(self, serialized: str) -> None:
|
|
4463
|
+
...
|
|
4464
|
+
def error(self, x: Gal3) -> float:
|
|
4465
|
+
...
|
|
4466
|
+
def evaluateError(self, x: Gal3) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4467
|
+
...
|
|
4468
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
4469
|
+
...
|
|
4470
|
+
def likelihood(self, x: Gal3) -> float:
|
|
4471
|
+
...
|
|
4472
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
4473
|
+
...
|
|
4474
|
+
def origin(self) -> Gal3:
|
|
4475
|
+
...
|
|
4476
|
+
def serialize(self) -> str:
|
|
4477
|
+
...
|
|
4478
|
+
class ExtendedPriorFactorNavState(NoiseModelFactor):
|
|
4479
|
+
def __getstate__(self) -> tuple:
|
|
4480
|
+
...
|
|
4481
|
+
@typing.overload
|
|
4482
|
+
def __init__(self, key: int, origin: ..., noiseModel: noiseModel.Base) -> None:
|
|
4483
|
+
...
|
|
4484
|
+
@typing.overload
|
|
4485
|
+
def __init__(self, key: int, origin: ..., mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
4486
|
+
...
|
|
4487
|
+
@typing.overload
|
|
4488
|
+
def __init__(self, key: int, origin: ..., covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4489
|
+
...
|
|
4490
|
+
@typing.overload
|
|
4491
|
+
def __init__(self, key: int, origin: ..., mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4492
|
+
...
|
|
4493
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
4494
|
+
...
|
|
4495
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
4496
|
+
...
|
|
4497
|
+
def deserialize(self, serialized: str) -> None:
|
|
4498
|
+
...
|
|
4499
|
+
def error(self, x: ...) -> float:
|
|
4500
|
+
...
|
|
4501
|
+
def evaluateError(self, x: ...) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4502
|
+
...
|
|
4503
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
4504
|
+
...
|
|
4505
|
+
def likelihood(self, x: ...) -> float:
|
|
4506
|
+
...
|
|
4507
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
4508
|
+
...
|
|
4509
|
+
def origin(self) -> ...:
|
|
4510
|
+
...
|
|
4511
|
+
def serialize(self) -> str:
|
|
4512
|
+
...
|
|
4513
|
+
class ExtendedPriorFactorPinholeCameraCal3Bundler(NoiseModelFactor):
|
|
4514
|
+
def __getstate__(self) -> tuple:
|
|
4515
|
+
...
|
|
4516
|
+
@typing.overload
|
|
4517
|
+
def __init__(self, key: int, origin: PinholeCameraCal3Bundler, noiseModel: noiseModel.Base) -> None:
|
|
4518
|
+
...
|
|
4519
|
+
@typing.overload
|
|
4520
|
+
def __init__(self, key: int, origin: PinholeCameraCal3Bundler, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
4521
|
+
...
|
|
4522
|
+
@typing.overload
|
|
4523
|
+
def __init__(self, key: int, origin: PinholeCameraCal3Bundler, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4524
|
+
...
|
|
4525
|
+
@typing.overload
|
|
4526
|
+
def __init__(self, key: int, origin: PinholeCameraCal3Bundler, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4527
|
+
...
|
|
4528
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
4529
|
+
...
|
|
4530
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
4531
|
+
...
|
|
4532
|
+
def deserialize(self, serialized: str) -> None:
|
|
4533
|
+
...
|
|
4534
|
+
def error(self, x: PinholeCameraCal3Bundler) -> float:
|
|
4535
|
+
...
|
|
4536
|
+
def evaluateError(self, x: PinholeCameraCal3Bundler) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4537
|
+
...
|
|
4538
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
4539
|
+
...
|
|
4540
|
+
def likelihood(self, x: PinholeCameraCal3Bundler) -> float:
|
|
4541
|
+
...
|
|
4542
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
4543
|
+
...
|
|
4544
|
+
def origin(self) -> PinholeCameraCal3Bundler:
|
|
4545
|
+
...
|
|
4546
|
+
def serialize(self) -> str:
|
|
4547
|
+
...
|
|
4548
|
+
class ExtendedPriorFactorPinholeCameraCal3Fisheye(NoiseModelFactor):
|
|
4549
|
+
def __getstate__(self) -> tuple:
|
|
4550
|
+
...
|
|
4551
|
+
@typing.overload
|
|
4552
|
+
def __init__(self, key: int, origin: PinholeCameraCal3Fisheye, noiseModel: noiseModel.Base) -> None:
|
|
4553
|
+
...
|
|
4554
|
+
@typing.overload
|
|
4555
|
+
def __init__(self, key: int, origin: PinholeCameraCal3Fisheye, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
4556
|
+
...
|
|
4557
|
+
@typing.overload
|
|
4558
|
+
def __init__(self, key: int, origin: PinholeCameraCal3Fisheye, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4559
|
+
...
|
|
4560
|
+
@typing.overload
|
|
4561
|
+
def __init__(self, key: int, origin: PinholeCameraCal3Fisheye, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4562
|
+
...
|
|
4563
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
4564
|
+
...
|
|
4565
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
4566
|
+
...
|
|
4567
|
+
def deserialize(self, serialized: str) -> None:
|
|
4568
|
+
...
|
|
4569
|
+
def error(self, x: PinholeCameraCal3Fisheye) -> float:
|
|
4570
|
+
...
|
|
4571
|
+
def evaluateError(self, x: PinholeCameraCal3Fisheye) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4572
|
+
...
|
|
4573
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
4574
|
+
...
|
|
4575
|
+
def likelihood(self, x: PinholeCameraCal3Fisheye) -> float:
|
|
4576
|
+
...
|
|
4577
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
4578
|
+
...
|
|
4579
|
+
def origin(self) -> PinholeCameraCal3Fisheye:
|
|
4580
|
+
...
|
|
4581
|
+
def serialize(self) -> str:
|
|
4582
|
+
...
|
|
4583
|
+
class ExtendedPriorFactorPinholeCameraCal3Unified(NoiseModelFactor):
|
|
4584
|
+
def __getstate__(self) -> tuple:
|
|
4585
|
+
...
|
|
4586
|
+
@typing.overload
|
|
4587
|
+
def __init__(self, key: int, origin: PinholeCameraCal3Unified, noiseModel: noiseModel.Base) -> None:
|
|
4588
|
+
...
|
|
4589
|
+
@typing.overload
|
|
4590
|
+
def __init__(self, key: int, origin: PinholeCameraCal3Unified, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
4591
|
+
...
|
|
4592
|
+
@typing.overload
|
|
4593
|
+
def __init__(self, key: int, origin: PinholeCameraCal3Unified, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4594
|
+
...
|
|
4595
|
+
@typing.overload
|
|
4596
|
+
def __init__(self, key: int, origin: PinholeCameraCal3Unified, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4597
|
+
...
|
|
4598
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
4599
|
+
...
|
|
4600
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
4601
|
+
...
|
|
4602
|
+
def deserialize(self, serialized: str) -> None:
|
|
4603
|
+
...
|
|
4604
|
+
def error(self, x: PinholeCameraCal3Unified) -> float:
|
|
4605
|
+
...
|
|
4606
|
+
def evaluateError(self, x: PinholeCameraCal3Unified) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4607
|
+
...
|
|
4608
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
4609
|
+
...
|
|
4610
|
+
def likelihood(self, x: PinholeCameraCal3Unified) -> float:
|
|
4611
|
+
...
|
|
4612
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
4613
|
+
...
|
|
4614
|
+
def origin(self) -> PinholeCameraCal3Unified:
|
|
4615
|
+
...
|
|
4616
|
+
def serialize(self) -> str:
|
|
4617
|
+
...
|
|
4618
|
+
class ExtendedPriorFactorPinholeCameraCal3_S2(NoiseModelFactor):
|
|
4619
|
+
def __getstate__(self) -> tuple:
|
|
4620
|
+
...
|
|
4621
|
+
@typing.overload
|
|
4622
|
+
def __init__(self, key: int, origin: PinholeCameraCal3_S2, noiseModel: noiseModel.Base) -> None:
|
|
4623
|
+
...
|
|
4624
|
+
@typing.overload
|
|
4625
|
+
def __init__(self, key: int, origin: PinholeCameraCal3_S2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
4626
|
+
...
|
|
4627
|
+
@typing.overload
|
|
4628
|
+
def __init__(self, key: int, origin: PinholeCameraCal3_S2, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4629
|
+
...
|
|
4630
|
+
@typing.overload
|
|
4631
|
+
def __init__(self, key: int, origin: PinholeCameraCal3_S2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4632
|
+
...
|
|
4633
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
4634
|
+
...
|
|
4635
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
4636
|
+
...
|
|
4637
|
+
def deserialize(self, serialized: str) -> None:
|
|
4638
|
+
...
|
|
4639
|
+
def error(self, x: PinholeCameraCal3_S2) -> float:
|
|
4640
|
+
...
|
|
4641
|
+
def evaluateError(self, x: PinholeCameraCal3_S2) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4642
|
+
...
|
|
4643
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
4644
|
+
...
|
|
4645
|
+
def likelihood(self, x: PinholeCameraCal3_S2) -> float:
|
|
4646
|
+
...
|
|
4647
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
4648
|
+
...
|
|
4649
|
+
def origin(self) -> PinholeCameraCal3_S2:
|
|
4650
|
+
...
|
|
4651
|
+
def serialize(self) -> str:
|
|
4652
|
+
...
|
|
4653
|
+
class ExtendedPriorFactorPoint2(NoiseModelFactor):
|
|
4654
|
+
def __getstate__(self) -> tuple:
|
|
4655
|
+
...
|
|
4656
|
+
@typing.overload
|
|
4657
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
4658
|
+
...
|
|
4659
|
+
@typing.overload
|
|
4660
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
4661
|
+
...
|
|
4662
|
+
@typing.overload
|
|
4663
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4664
|
+
...
|
|
4665
|
+
@typing.overload
|
|
4666
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4667
|
+
...
|
|
4668
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
4669
|
+
...
|
|
4670
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
4671
|
+
...
|
|
4672
|
+
def deserialize(self, serialized: str) -> None:
|
|
4673
|
+
...
|
|
4674
|
+
def error(self, x: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]) -> float:
|
|
4675
|
+
...
|
|
4676
|
+
def evaluateError(self, x: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4677
|
+
...
|
|
4678
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
4679
|
+
...
|
|
4680
|
+
def likelihood(self, x: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]) -> float:
|
|
4681
|
+
...
|
|
4682
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
4683
|
+
...
|
|
4684
|
+
def origin(self) -> numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4685
|
+
...
|
|
4686
|
+
def serialize(self) -> str:
|
|
4687
|
+
...
|
|
4688
|
+
class ExtendedPriorFactorPoint3(NoiseModelFactor):
|
|
4689
|
+
def __getstate__(self) -> tuple:
|
|
4690
|
+
...
|
|
4691
|
+
@typing.overload
|
|
4692
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
4693
|
+
...
|
|
4694
|
+
@typing.overload
|
|
4695
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]], mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
4696
|
+
...
|
|
4697
|
+
@typing.overload
|
|
4698
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4699
|
+
...
|
|
4700
|
+
@typing.overload
|
|
4701
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]], mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4702
|
+
...
|
|
4703
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
4704
|
+
...
|
|
4705
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
4706
|
+
...
|
|
4707
|
+
def deserialize(self, serialized: str) -> None:
|
|
4708
|
+
...
|
|
4709
|
+
def error(self, x: numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]]) -> float:
|
|
2958
4710
|
...
|
|
2959
|
-
|
|
2960
|
-
def __init__(self) -> None:
|
|
4711
|
+
def evaluateError(self, x: numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]]) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
2961
4712
|
...
|
|
2962
|
-
|
|
2963
|
-
def __init__(self, graph: DiscreteFactorGraph) -> None:
|
|
4713
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
2964
4714
|
...
|
|
2965
|
-
def
|
|
4715
|
+
def likelihood(self, x: numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]]) -> float:
|
|
2966
4716
|
...
|
|
2967
|
-
def
|
|
2968
|
-
"""
|
|
2969
|
-
Compute the marginal of a single variable.
|
|
2970
|
-
|
|
2971
|
-
Args:
|
|
2972
|
-
key: DiscreteKey of the Variable
|
|
2973
|
-
|
|
2974
|
-
Returns: Vector of marginal probabilities
|
|
2975
|
-
"""
|
|
2976
|
-
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
4717
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
2977
4718
|
...
|
|
2978
|
-
|
|
2979
|
-
|
|
2980
|
-
def
|
|
2981
|
-
|
|
2982
|
-
|
|
2983
|
-
|
|
2984
|
-
The CSV file should contain a header row followed by data rows. All timestamps will be normalized so that the first timestamp in the file corresponds to t=0 for the scenario. CSV is expected to contain the following columns: t,q_w,q_x,q_y,q_z,v_x,v_y,v_z,p_x,p_y,p_z,w_x,w_y,w_z,a_x,a_y,a_z Other columns will be ignored.
|
|
2985
|
-
|
|
2986
|
-
Args:
|
|
2987
|
-
csv_filepath: Path to the CSV file.
|
|
2988
|
-
|
|
2989
|
-
Returns: A constructed DiscreteScenario.
|
|
2990
|
-
"""
|
|
2991
|
-
def __init__(self, poses: dict[float, Pose3], angularVelocities_b: dict[float, numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]]], velocities_n: dict[float, numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]]], accelerations_n: dict[float, numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]]]) -> None:
|
|
4719
|
+
def origin(self) -> numpy.ndarray[tuple[typing.Literal[3], typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4720
|
+
...
|
|
4721
|
+
def serialize(self) -> str:
|
|
4722
|
+
...
|
|
4723
|
+
class ExtendedPriorFactorPose2(NoiseModelFactor):
|
|
4724
|
+
def __getstate__(self) -> tuple:
|
|
2992
4725
|
...
|
|
2993
|
-
class DiscreteSearch:
|
|
2994
|
-
@staticmethod
|
|
2995
|
-
def FromFactorGraph(factorGraph: DiscreteFactorGraph, ordering: Ordering, buildJunctionTree: bool = False) -> DiscreteSearch:
|
|
2996
|
-
"""
|
|
2997
|
-
Construct from aDiscreteFactorGraph.
|
|
2998
|
-
|
|
2999
|
-
Internally creates either an elimination tree or a junction tree. The latter incurs more up-front computation but the search itself might be faster. Then again, for the elimination tree, the heuristic will be more fine-grained (more slots).
|
|
3000
|
-
|
|
3001
|
-
Args:
|
|
3002
|
-
factorGraph: The factor graph to search over.
|
|
3003
|
-
ordering: The ordering used to create etree (and maybe jtree).
|
|
3004
|
-
buildJunctionTree: Whether to build a junction tree or not.
|
|
3005
|
-
"""
|
|
3006
4726
|
@typing.overload
|
|
3007
|
-
def __init__(self,
|
|
4727
|
+
def __init__(self, key: int, origin: Pose2, noiseModel: noiseModel.Base) -> None:
|
|
3008
4728
|
...
|
|
3009
4729
|
@typing.overload
|
|
3010
|
-
def __init__(self,
|
|
4730
|
+
def __init__(self, key: int, origin: Pose2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
3011
4731
|
...
|
|
3012
4732
|
@typing.overload
|
|
3013
|
-
def __init__(self,
|
|
4733
|
+
def __init__(self, key: int, origin: Pose2, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3014
4734
|
...
|
|
3015
4735
|
@typing.overload
|
|
3016
|
-
def __init__(self,
|
|
4736
|
+
def __init__(self, key: int, origin: Pose2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3017
4737
|
...
|
|
3018
|
-
def
|
|
4738
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
3019
4739
|
...
|
|
3020
|
-
def
|
|
3021
|
-
"""
|
|
3022
|
-
Return lower bound on the cost-to-go for the entire search.
|
|
3023
|
-
"""
|
|
3024
|
-
def print(self, name: str = 'DiscreteSearch: ', formatter: typing.Callable[[int], str] = ...) -> None:
|
|
3025
|
-
"""
|
|
3026
|
-
Print the tree to cout.
|
|
3027
|
-
"""
|
|
3028
|
-
def run(self, K: int = 1) -> list[DiscreteSearchSolution]:
|
|
3029
|
-
"""
|
|
3030
|
-
Search for the K best solutions.
|
|
3031
|
-
|
|
3032
|
-
This method performs a search to find the K best solutions for the given DiscreteBayesNet. It uses a priority queue to manage the search nodes, expanding nodes with the smallest bound first. The search continues until all possible nodes have been expanded or pruned.
|
|
3033
|
-
|
|
3034
|
-
Returns: A vector of the K best solutions found during the search.
|
|
3035
|
-
"""
|
|
3036
|
-
class DiscreteSearchSolution:
|
|
3037
|
-
assignment: DiscreteValues
|
|
3038
|
-
error: float
|
|
3039
|
-
def __init__(self, error: float, assignment: DiscreteValues) -> None:
|
|
4740
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
3040
4741
|
...
|
|
3041
|
-
|
|
3042
|
-
def __bool__(self) -> bool:
|
|
3043
|
-
"""
|
|
3044
|
-
Check whether the map is nonempty
|
|
3045
|
-
"""
|
|
3046
|
-
@typing.overload
|
|
3047
|
-
def __contains__(self, arg0: int) -> bool:
|
|
4742
|
+
def deserialize(self, serialized: str) -> None:
|
|
3048
4743
|
...
|
|
3049
|
-
|
|
3050
|
-
def __contains__(self, arg0: typing.Any) -> bool:
|
|
4744
|
+
def error(self, x: Pose2) -> float:
|
|
3051
4745
|
...
|
|
3052
|
-
def
|
|
4746
|
+
def evaluateError(self, x: Pose2) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3053
4747
|
...
|
|
3054
|
-
def
|
|
4748
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
3055
4749
|
...
|
|
3056
|
-
def
|
|
4750
|
+
def likelihood(self, x: Pose2) -> float:
|
|
3057
4751
|
...
|
|
3058
|
-
def
|
|
4752
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
3059
4753
|
...
|
|
3060
|
-
def
|
|
4754
|
+
def origin(self) -> Pose2:
|
|
3061
4755
|
...
|
|
3062
|
-
def
|
|
3063
|
-
"""
|
|
3064
|
-
Return the canonical string representation of this map.
|
|
3065
|
-
"""
|
|
3066
|
-
def __setitem__(self, arg0: int, arg1: int) -> None:
|
|
4756
|
+
def serialize(self) -> str:
|
|
3067
4757
|
...
|
|
3068
|
-
|
|
4758
|
+
class ExtendedPriorFactorPose3(NoiseModelFactor):
|
|
4759
|
+
def __getstate__(self) -> tuple:
|
|
3069
4760
|
...
|
|
3070
|
-
|
|
4761
|
+
@typing.overload
|
|
4762
|
+
def __init__(self, key: int, origin: Pose3, noiseModel: noiseModel.Base) -> None:
|
|
3071
4763
|
...
|
|
3072
|
-
|
|
4764
|
+
@typing.overload
|
|
4765
|
+
def __init__(self, key: int, origin: Pose3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
3073
4766
|
...
|
|
3074
|
-
class DoglegOptimizer(NonlinearOptimizer):
|
|
3075
4767
|
@typing.overload
|
|
3076
|
-
def __init__(self,
|
|
4768
|
+
def __init__(self, key: int, origin: Pose3, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3077
4769
|
...
|
|
3078
4770
|
@typing.overload
|
|
3079
|
-
def __init__(self,
|
|
4771
|
+
def __init__(self, key: int, origin: Pose3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3080
4772
|
...
|
|
3081
|
-
def
|
|
3082
|
-
"""
|
|
3083
|
-
Access the current trust region radius delta.
|
|
3084
|
-
"""
|
|
3085
|
-
class DoglegParams(NonlinearOptimizerParams):
|
|
3086
|
-
def __init__(self) -> None:
|
|
4773
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
3087
4774
|
...
|
|
3088
|
-
def
|
|
4775
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
3089
4776
|
...
|
|
3090
|
-
def
|
|
4777
|
+
def deserialize(self, serialized: str) -> None:
|
|
3091
4778
|
...
|
|
3092
|
-
def
|
|
4779
|
+
def error(self, x: Pose3) -> float:
|
|
3093
4780
|
...
|
|
3094
|
-
def
|
|
4781
|
+
def evaluateError(self, x: Pose3) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3095
4782
|
...
|
|
3096
|
-
|
|
3097
|
-
binaryEdges: bool
|
|
3098
|
-
boxes: set[int]
|
|
3099
|
-
connectKeysToFactor: bool
|
|
3100
|
-
factorPositions: dict[int, numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]]
|
|
3101
|
-
figureHeightInches: float
|
|
3102
|
-
figureWidthInches: float
|
|
3103
|
-
plotFactorPoints: bool
|
|
3104
|
-
positionHints: dict[str, float]
|
|
3105
|
-
variablePositions: dict[int, numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]]
|
|
3106
|
-
def __init__(self, figureWidthInches: float = 5, figureHeightInches: float = 5, plotFactorPoints: bool = True, connectKeysToFactor: bool = True, binaryEdges: bool = True) -> None:
|
|
4783
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
3107
4784
|
...
|
|
3108
|
-
|
|
3109
|
-
|
|
4785
|
+
def likelihood(self, x: Pose3) -> float:
|
|
4786
|
+
...
|
|
4787
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
4788
|
+
...
|
|
4789
|
+
def origin(self) -> Pose3:
|
|
4790
|
+
...
|
|
4791
|
+
def serialize(self) -> str:
|
|
4792
|
+
...
|
|
4793
|
+
class ExtendedPriorFactorRot2(NoiseModelFactor):
|
|
4794
|
+
def __getstate__(self) -> tuple:
|
|
3110
4795
|
...
|
|
3111
|
-
class EdgeKey:
|
|
3112
4796
|
@typing.overload
|
|
3113
|
-
def __init__(self,
|
|
4797
|
+
def __init__(self, key: int, origin: Rot2, noiseModel: noiseModel.Base) -> None:
|
|
3114
4798
|
...
|
|
3115
4799
|
@typing.overload
|
|
3116
|
-
def __init__(self, key: int) -> None:
|
|
4800
|
+
def __init__(self, key: int, origin: Rot2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
3117
4801
|
...
|
|
3118
4802
|
@typing.overload
|
|
3119
|
-
def __init__(self, key:
|
|
4803
|
+
def __init__(self, key: int, origin: Rot2, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3120
4804
|
...
|
|
3121
|
-
|
|
4805
|
+
@typing.overload
|
|
4806
|
+
def __init__(self, key: int, origin: Rot2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3122
4807
|
...
|
|
3123
|
-
def
|
|
3124
|
-
"""
|
|
3125
|
-
Retrieve high 32 bits.
|
|
3126
|
-
"""
|
|
3127
|
-
def j(self) -> int:
|
|
3128
|
-
"""
|
|
3129
|
-
Retrieve low 32 bits.
|
|
3130
|
-
"""
|
|
3131
|
-
def key(self) -> int:
|
|
3132
|
-
"""
|
|
3133
|
-
Cast toKey.
|
|
3134
|
-
"""
|
|
3135
|
-
def print(self, s: str = '') -> None:
|
|
3136
|
-
"""
|
|
3137
|
-
Prints theEdgeKeywith an optional prefix string.
|
|
3138
|
-
"""
|
|
3139
|
-
class EssentialMatrix:
|
|
3140
|
-
@staticmethod
|
|
3141
|
-
def Dim() -> int:
|
|
4808
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
3142
4809
|
...
|
|
3143
|
-
|
|
3144
|
-
@typing.overload
|
|
3145
|
-
def FromPose3(_1P2_: Pose3) -> EssentialMatrix:
|
|
3146
|
-
"""
|
|
3147
|
-
Named constructor converting aPose3with scale toEssentialMatrix(no scale)
|
|
3148
|
-
"""
|
|
3149
|
-
@staticmethod
|
|
3150
|
-
@typing.overload
|
|
3151
|
-
def FromPose3(_1P2_: Pose3, H: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> EssentialMatrix:
|
|
3152
|
-
"""
|
|
3153
|
-
Named constructor converting aPose3with scale toEssentialMatrix(no scale)
|
|
3154
|
-
"""
|
|
3155
|
-
def __init__(self, aRb: Rot3, aTb: Unit3) -> None:
|
|
4810
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
3156
4811
|
...
|
|
3157
|
-
def
|
|
4812
|
+
def deserialize(self, serialized: str) -> None:
|
|
3158
4813
|
...
|
|
3159
|
-
def
|
|
4814
|
+
def error(self, x: Rot2) -> float:
|
|
3160
4815
|
...
|
|
3161
|
-
def
|
|
3162
|
-
"""
|
|
3163
|
-
Direction.
|
|
3164
|
-
"""
|
|
3165
|
-
def equals(self, other: EssentialMatrix, tol: float) -> bool:
|
|
3166
|
-
"""
|
|
3167
|
-
assert equality up to a tolerance
|
|
3168
|
-
"""
|
|
3169
|
-
def error(self, vA: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], vB: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]) -> float:
|
|
3170
|
-
"""
|
|
3171
|
-
epipolar error, algebraic
|
|
3172
|
-
"""
|
|
3173
|
-
def localCoordinates(self, other: EssentialMatrix) -> numpy.ndarray[tuple[typing.Literal[5], typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3174
|
-
"""
|
|
3175
|
-
Compute the coordinates in the tangent space.
|
|
3176
|
-
"""
|
|
3177
|
-
def matrix(self) -> numpy.ndarray[tuple[typing.Literal[3], typing.Literal[3]], numpy.dtype[numpy.float64]]:
|
|
3178
|
-
"""
|
|
3179
|
-
Return 3*3 matrix representation.
|
|
3180
|
-
"""
|
|
3181
|
-
def print(self, s: str = '') -> None:
|
|
3182
|
-
"""
|
|
3183
|
-
print with optional string
|
|
3184
|
-
"""
|
|
3185
|
-
def retract(self, xi: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]) -> EssentialMatrix:
|
|
3186
|
-
"""
|
|
3187
|
-
Retract delta to manifold.
|
|
3188
|
-
"""
|
|
3189
|
-
def rotation(self) -> Rot3:
|
|
3190
|
-
"""
|
|
3191
|
-
Rotation.
|
|
3192
|
-
"""
|
|
3193
|
-
class EssentialMatrixConstraint(NoiseModelFactor):
|
|
3194
|
-
def __init__(self, key1: int, key2: int, measuredE: EssentialMatrix, model: noiseModel.Base) -> None:
|
|
4816
|
+
def evaluateError(self, x: Rot2) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3195
4817
|
...
|
|
3196
|
-
def
|
|
4818
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
3197
4819
|
...
|
|
3198
|
-
def
|
|
3199
|
-
"""
|
|
3200
|
-
equals
|
|
3201
|
-
"""
|
|
3202
|
-
def evaluateError(self, p1: Pose3, p2: Pose3) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
4820
|
+
def likelihood(self, x: Rot2) -> float:
|
|
3203
4821
|
...
|
|
3204
|
-
def
|
|
3205
|
-
"""
|
|
3206
|
-
return the measured
|
|
3207
|
-
"""
|
|
3208
|
-
def print(self, s: str = '', keyFormatter: typing.Callable[[int], str] = ...) -> None:
|
|
3209
|
-
"""
|
|
3210
|
-
implement functions needed forTestable
|
|
3211
|
-
|
|
3212
|
-
print
|
|
3213
|
-
"""
|
|
3214
|
-
class EssentialMatrixFactor(NoiseModelFactor):
|
|
3215
|
-
def __init__(self, key: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base) -> None:
|
|
4822
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
3216
4823
|
...
|
|
3217
|
-
def
|
|
4824
|
+
def origin(self) -> Rot2:
|
|
3218
4825
|
...
|
|
3219
|
-
def
|
|
4826
|
+
def serialize(self) -> str:
|
|
3220
4827
|
...
|
|
3221
|
-
|
|
3222
|
-
|
|
3223
|
-
print
|
|
3224
|
-
"""
|
|
3225
|
-
class EssentialMatrixFactor2(NoiseModelFactor):
|
|
3226
|
-
def __init__(self, key1: int, key2: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base) -> None:
|
|
4828
|
+
class ExtendedPriorFactorRot3(NoiseModelFactor):
|
|
4829
|
+
def __getstate__(self) -> tuple:
|
|
3227
4830
|
...
|
|
3228
|
-
|
|
4831
|
+
@typing.overload
|
|
4832
|
+
def __init__(self, key: int, origin: Rot3, noiseModel: noiseModel.Base) -> None:
|
|
3229
4833
|
...
|
|
3230
|
-
|
|
4834
|
+
@typing.overload
|
|
4835
|
+
def __init__(self, key: int, origin: Rot3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
3231
4836
|
...
|
|
3232
|
-
|
|
3233
|
-
|
|
3234
|
-
|
|
3235
|
-
|
|
3236
|
-
|
|
3237
|
-
|
|
4837
|
+
@typing.overload
|
|
4838
|
+
def __init__(self, key: int, origin: Rot3, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4839
|
+
...
|
|
4840
|
+
@typing.overload
|
|
4841
|
+
def __init__(self, key: int, origin: Rot3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
4842
|
+
...
|
|
4843
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
4844
|
+
...
|
|
4845
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
4846
|
+
...
|
|
4847
|
+
def deserialize(self, serialized: str) -> None:
|
|
3238
4848
|
...
|
|
3239
|
-
def
|
|
4849
|
+
def error(self, x: Rot3) -> float:
|
|
3240
4850
|
...
|
|
3241
|
-
def evaluateError(self,
|
|
4851
|
+
def evaluateError(self, x: Rot3) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3242
4852
|
...
|
|
3243
|
-
def
|
|
3244
|
-
"""
|
|
3245
|
-
print
|
|
3246
|
-
"""
|
|
3247
|
-
class EssentialMatrixFactor4Cal3Bundler(NoiseModelFactor):
|
|
3248
|
-
def __init__(self, keyE: int, keyK: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base = None) -> None:
|
|
4853
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
3249
4854
|
...
|
|
3250
|
-
def
|
|
4855
|
+
def likelihood(self, x: Rot3) -> float:
|
|
3251
4856
|
...
|
|
3252
|
-
def
|
|
4857
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
3253
4858
|
...
|
|
3254
|
-
def
|
|
4859
|
+
def origin(self) -> Rot3:
|
|
3255
4860
|
...
|
|
3256
|
-
|
|
3257
|
-
def __init__(self, keyE: int, keyK: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base = None) -> None:
|
|
4861
|
+
def serialize(self) -> str:
|
|
3258
4862
|
...
|
|
3259
|
-
|
|
4863
|
+
class ExtendedPriorFactorSL4(NoiseModelFactor):
|
|
4864
|
+
def __getstate__(self) -> tuple:
|
|
3260
4865
|
...
|
|
3261
|
-
|
|
4866
|
+
@typing.overload
|
|
4867
|
+
def __init__(self, key: int, origin: SL4, noiseModel: noiseModel.Base) -> None:
|
|
3262
4868
|
...
|
|
3263
|
-
|
|
4869
|
+
@typing.overload
|
|
4870
|
+
def __init__(self, key: int, origin: SL4, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
3264
4871
|
...
|
|
3265
|
-
|
|
3266
|
-
def __init__(self,
|
|
4872
|
+
@typing.overload
|
|
4873
|
+
def __init__(self, key: int, origin: SL4, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3267
4874
|
...
|
|
3268
|
-
|
|
4875
|
+
@typing.overload
|
|
4876
|
+
def __init__(self, key: int, origin: SL4, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3269
4877
|
...
|
|
3270
|
-
def
|
|
4878
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
3271
4879
|
...
|
|
3272
|
-
def
|
|
4880
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
3273
4881
|
...
|
|
3274
|
-
|
|
3275
|
-
def __init__(self, keyE: int, keyK: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base = None) -> None:
|
|
4882
|
+
def deserialize(self, serialized: str) -> None:
|
|
3276
4883
|
...
|
|
3277
|
-
def
|
|
4884
|
+
def error(self, x: SL4) -> float:
|
|
3278
4885
|
...
|
|
3279
|
-
def evaluateError(self,
|
|
4886
|
+
def evaluateError(self, x: SL4) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3280
4887
|
...
|
|
3281
|
-
def
|
|
4888
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
3282
4889
|
...
|
|
3283
|
-
|
|
3284
|
-
def __init__(self, keyE: int, keyK: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base = None) -> None:
|
|
4890
|
+
def likelihood(self, x: SL4) -> float:
|
|
3285
4891
|
...
|
|
3286
|
-
def
|
|
4892
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
3287
4893
|
...
|
|
3288
|
-
def
|
|
4894
|
+
def origin(self) -> SL4:
|
|
3289
4895
|
...
|
|
3290
|
-
def
|
|
4896
|
+
def serialize(self) -> str:
|
|
3291
4897
|
...
|
|
3292
|
-
class
|
|
3293
|
-
def
|
|
4898
|
+
class ExtendedPriorFactorSO3(NoiseModelFactor):
|
|
4899
|
+
def __getstate__(self) -> tuple:
|
|
3294
4900
|
...
|
|
3295
|
-
|
|
4901
|
+
@typing.overload
|
|
4902
|
+
def __init__(self, key: int, origin: SO3, noiseModel: noiseModel.Base) -> None:
|
|
3296
4903
|
...
|
|
3297
|
-
|
|
4904
|
+
@typing.overload
|
|
4905
|
+
def __init__(self, key: int, origin: SO3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
3298
4906
|
...
|
|
3299
|
-
|
|
4907
|
+
@typing.overload
|
|
4908
|
+
def __init__(self, key: int, origin: SO3, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3300
4909
|
...
|
|
3301
|
-
|
|
3302
|
-
def __init__(self,
|
|
4910
|
+
@typing.overload
|
|
4911
|
+
def __init__(self, key: int, origin: SO3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3303
4912
|
...
|
|
3304
|
-
def
|
|
4913
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
3305
4914
|
...
|
|
3306
|
-
def
|
|
4915
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
3307
4916
|
...
|
|
3308
|
-
def
|
|
4917
|
+
def deserialize(self, serialized: str) -> None:
|
|
3309
4918
|
...
|
|
3310
|
-
|
|
3311
|
-
def __init__(self, keyE: int, keyKa: int, keyKb: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base = None) -> None:
|
|
4919
|
+
def error(self, x: SO3) -> float:
|
|
3312
4920
|
...
|
|
3313
|
-
def
|
|
4921
|
+
def evaluateError(self, x: SO3) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3314
4922
|
...
|
|
3315
|
-
def
|
|
4923
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
3316
4924
|
...
|
|
3317
|
-
def
|
|
4925
|
+
def likelihood(self, x: SO3) -> float:
|
|
3318
4926
|
...
|
|
3319
|
-
|
|
3320
|
-
def __init__(self, keyE: int, keyKa: int, keyKb: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base = None) -> None:
|
|
4927
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
3321
4928
|
...
|
|
3322
|
-
def
|
|
4929
|
+
def origin(self) -> SO3:
|
|
3323
4930
|
...
|
|
3324
|
-
def
|
|
4931
|
+
def serialize(self) -> str:
|
|
3325
4932
|
...
|
|
3326
|
-
|
|
4933
|
+
class ExtendedPriorFactorSO4(NoiseModelFactor):
|
|
4934
|
+
def __getstate__(self) -> tuple:
|
|
3327
4935
|
...
|
|
3328
|
-
|
|
3329
|
-
def __init__(self,
|
|
4936
|
+
@typing.overload
|
|
4937
|
+
def __init__(self, key: int, origin: SO4, noiseModel: noiseModel.Base) -> None:
|
|
3330
4938
|
...
|
|
3331
|
-
|
|
4939
|
+
@typing.overload
|
|
4940
|
+
def __init__(self, key: int, origin: SO4, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
3332
4941
|
...
|
|
3333
|
-
|
|
4942
|
+
@typing.overload
|
|
4943
|
+
def __init__(self, key: int, origin: SO4, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3334
4944
|
...
|
|
3335
|
-
|
|
4945
|
+
@typing.overload
|
|
4946
|
+
def __init__(self, key: int, origin: SO4, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3336
4947
|
...
|
|
3337
|
-
|
|
3338
|
-
def __init__(self, keyE: int, keyKa: int, keyKb: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base = None) -> None:
|
|
4948
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
3339
4949
|
...
|
|
3340
|
-
def
|
|
4950
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
3341
4951
|
...
|
|
3342
|
-
def
|
|
4952
|
+
def deserialize(self, serialized: str) -> None:
|
|
3343
4953
|
...
|
|
3344
|
-
def
|
|
4954
|
+
def error(self, x: SO4) -> float:
|
|
3345
4955
|
...
|
|
3346
|
-
|
|
3347
|
-
def __init__(self, keyE: int, keyKa: int, keyKb: int, pA: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], pB: numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], model: noiseModel.Base = None) -> None:
|
|
4956
|
+
def evaluateError(self, x: SO4) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3348
4957
|
...
|
|
3349
|
-
def
|
|
4958
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
3350
4959
|
...
|
|
3351
|
-
def
|
|
4960
|
+
def likelihood(self, x: SO4) -> float:
|
|
3352
4961
|
...
|
|
3353
|
-
def
|
|
4962
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
3354
4963
|
...
|
|
3355
|
-
|
|
3356
|
-
def __init__(self, edge1: EdgeKey, edge2: EdgeKey, triplets: list[tuple[numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]]], calibration: Cal3Bundler, model: noiseModel.Base = None) -> None:
|
|
4964
|
+
def origin(self) -> SO4:
|
|
3357
4965
|
...
|
|
3358
|
-
|
|
3359
|
-
def __init__(self, edge1: EdgeKey, edge2: EdgeKey, triplets: list[tuple[numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]]], calibration: Cal3_S2, model: noiseModel.Base = None) -> None:
|
|
4966
|
+
def serialize(self) -> str:
|
|
3360
4967
|
...
|
|
3361
|
-
class
|
|
3362
|
-
def
|
|
4968
|
+
class ExtendedPriorFactorSOn(NoiseModelFactor):
|
|
4969
|
+
def __getstate__(self) -> tuple:
|
|
3363
4970
|
...
|
|
3364
|
-
class EssentialTransferFactorKCal3Bundler(NoiseModelFactor):
|
|
3365
4971
|
@typing.overload
|
|
3366
|
-
def __init__(self,
|
|
4972
|
+
def __init__(self, key: int, origin: SOn, noiseModel: noiseModel.Base) -> None:
|
|
3367
4973
|
...
|
|
3368
4974
|
@typing.overload
|
|
3369
|
-
def __init__(self,
|
|
4975
|
+
def __init__(self, key: int, origin: SOn, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
3370
4976
|
...
|
|
3371
|
-
class EssentialTransferFactorKCal3_S2(NoiseModelFactor):
|
|
3372
4977
|
@typing.overload
|
|
3373
|
-
def __init__(self,
|
|
4978
|
+
def __init__(self, key: int, origin: SOn, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3374
4979
|
...
|
|
3375
4980
|
@typing.overload
|
|
3376
|
-
def __init__(self,
|
|
4981
|
+
def __init__(self, key: int, origin: SOn, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3377
4982
|
...
|
|
3378
|
-
|
|
3379
|
-
@typing.overload
|
|
3380
|
-
def __init__(self, edge1: EdgeKey, edge2: EdgeKey, triplets: list[tuple[numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]]], model: noiseModel.Base = None) -> None:
|
|
4983
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
3381
4984
|
...
|
|
3382
|
-
|
|
3383
|
-
def __init__(self, edge1: EdgeKey, edge2: EdgeKey, keyK: int, triplets: list[tuple[numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[typing.Literal[2], typing.Literal[1]], numpy.dtype[numpy.float64]]]], model: noiseModel.Base = None) -> None:
|
|
4985
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
3384
4986
|
...
|
|
3385
|
-
|
|
3386
|
-
@typing.overload
|
|
3387
|
-
def __init__(self) -> None:
|
|
4987
|
+
def deserialize(self, serialized: str) -> None:
|
|
3388
4988
|
...
|
|
3389
|
-
|
|
3390
|
-
def __init__(self, key: int, z: float, model: noiseModel.Base, N: int, x: float) -> None:
|
|
4989
|
+
def error(self, x: SOn) -> float:
|
|
3391
4990
|
...
|
|
3392
|
-
|
|
3393
|
-
def __init__(self, key: int, z: float, model: noiseModel.Base, N: int, x: float, a: float, b: float) -> None:
|
|
4991
|
+
def evaluateError(self, x: SOn) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3394
4992
|
...
|
|
3395
|
-
|
|
3396
|
-
@typing.overload
|
|
3397
|
-
def __init__(self) -> None:
|
|
4993
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
3398
4994
|
...
|
|
3399
|
-
|
|
3400
|
-
def __init__(self, key: int, z: float, model: noiseModel.Base, N: int, x: float) -> None:
|
|
4995
|
+
def likelihood(self, x: SOn) -> float:
|
|
3401
4996
|
...
|
|
3402
|
-
|
|
3403
|
-
def __init__(self, key: int, z: float, model: noiseModel.Base, N: int, x: float, a: float, b: float) -> None:
|
|
4997
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
3404
4998
|
...
|
|
3405
|
-
|
|
3406
|
-
|
|
3407
|
-
def
|
|
4999
|
+
def origin(self) -> SOn:
|
|
5000
|
+
...
|
|
5001
|
+
def serialize(self) -> str:
|
|
5002
|
+
...
|
|
5003
|
+
class ExtendedPriorFactorSimilarity2(NoiseModelFactor):
|
|
5004
|
+
def __getstate__(self) -> tuple:
|
|
3408
5005
|
...
|
|
3409
5006
|
@typing.overload
|
|
3410
|
-
def __init__(self, key: int,
|
|
5007
|
+
def __init__(self, key: int, origin: Similarity2, noiseModel: noiseModel.Base) -> None:
|
|
3411
5008
|
...
|
|
3412
5009
|
@typing.overload
|
|
3413
|
-
def __init__(self, key: int,
|
|
5010
|
+
def __init__(self, key: int, origin: Similarity2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
3414
5011
|
...
|
|
3415
|
-
class EvaluationFactorFourierBasis(NoiseModelFactor):
|
|
3416
5012
|
@typing.overload
|
|
3417
|
-
def __init__(self) -> None:
|
|
5013
|
+
def __init__(self, key: int, origin: Similarity2, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3418
5014
|
...
|
|
3419
5015
|
@typing.overload
|
|
3420
|
-
def __init__(self, key: int,
|
|
5016
|
+
def __init__(self, key: int, origin: Similarity2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
5017
|
+
...
|
|
5018
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
5019
|
+
...
|
|
5020
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
5021
|
+
...
|
|
5022
|
+
def deserialize(self, serialized: str) -> None:
|
|
5023
|
+
...
|
|
5024
|
+
def error(self, x: Similarity2) -> float:
|
|
5025
|
+
...
|
|
5026
|
+
def evaluateError(self, x: Similarity2) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
5027
|
+
...
|
|
5028
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
5029
|
+
...
|
|
5030
|
+
def likelihood(self, x: Similarity2) -> float:
|
|
5031
|
+
...
|
|
5032
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
5033
|
+
...
|
|
5034
|
+
def origin(self) -> Similarity2:
|
|
5035
|
+
...
|
|
5036
|
+
def serialize(self) -> str:
|
|
5037
|
+
...
|
|
5038
|
+
class ExtendedPriorFactorSimilarity3(NoiseModelFactor):
|
|
5039
|
+
def __getstate__(self) -> tuple:
|
|
3421
5040
|
...
|
|
3422
5041
|
@typing.overload
|
|
3423
|
-
def __init__(self, key: int,
|
|
5042
|
+
def __init__(self, key: int, origin: Similarity3, noiseModel: noiseModel.Base) -> None:
|
|
3424
5043
|
...
|
|
3425
|
-
class Event:
|
|
3426
5044
|
@typing.overload
|
|
3427
|
-
def __init__(self) -> None:
|
|
5045
|
+
def __init__(self, key: int, origin: Similarity3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
3428
5046
|
...
|
|
3429
5047
|
@typing.overload
|
|
3430
|
-
def __init__(self,
|
|
5048
|
+
def __init__(self, key: int, origin: Similarity3, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3431
5049
|
...
|
|
3432
5050
|
@typing.overload
|
|
3433
|
-
def __init__(self,
|
|
5051
|
+
def __init__(self, key: int, origin: Similarity3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3434
5052
|
...
|
|
3435
|
-
def
|
|
5053
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
3436
5054
|
...
|
|
3437
|
-
def
|
|
5055
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
3438
5056
|
...
|
|
3439
|
-
def
|
|
5057
|
+
def deserialize(self, serialized: str) -> None:
|
|
3440
5058
|
...
|
|
3441
|
-
def
|
|
3442
|
-
"""
|
|
3443
|
-
print with optional string
|
|
3444
|
-
"""
|
|
3445
|
-
def time(self) -> float:
|
|
5059
|
+
def error(self, x: Similarity3) -> float:
|
|
3446
5060
|
...
|
|
3447
|
-
|
|
3448
|
-
def Density(self) -> JacobianFactor:
|
|
5061
|
+
def evaluateError(self, x: Similarity3) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3449
5062
|
...
|
|
3450
|
-
def
|
|
5063
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
3451
5064
|
...
|
|
3452
|
-
def
|
|
5065
|
+
def likelihood(self, x: Similarity3) -> float:
|
|
3453
5066
|
...
|
|
3454
|
-
def
|
|
5067
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
3455
5068
|
...
|
|
3456
|
-
|
|
3457
|
-
def Density(self) -> JacobianFactor:
|
|
5069
|
+
def origin(self) -> Similarity3:
|
|
3458
5070
|
...
|
|
3459
|
-
def
|
|
5071
|
+
def serialize(self) -> str:
|
|
3460
5072
|
...
|
|
3461
|
-
|
|
5073
|
+
class ExtendedPriorFactorStereoPoint2(NoiseModelFactor):
|
|
5074
|
+
def __getstate__(self) -> tuple:
|
|
3462
5075
|
...
|
|
3463
|
-
|
|
5076
|
+
@typing.overload
|
|
5077
|
+
def __init__(self, key: int, origin: StereoPoint2, noiseModel: noiseModel.Base) -> None:
|
|
3464
5078
|
...
|
|
3465
|
-
|
|
3466
|
-
def
|
|
5079
|
+
@typing.overload
|
|
5080
|
+
def __init__(self, key: int, origin: StereoPoint2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
3467
5081
|
...
|
|
3468
|
-
|
|
5082
|
+
@typing.overload
|
|
5083
|
+
def __init__(self, key: int, origin: StereoPoint2, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3469
5084
|
...
|
|
3470
|
-
|
|
5085
|
+
@typing.overload
|
|
5086
|
+
def __init__(self, key: int, origin: StereoPoint2, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3471
5087
|
...
|
|
3472
|
-
def
|
|
5088
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
3473
5089
|
...
|
|
3474
|
-
|
|
3475
|
-
def Density(self) -> JacobianFactor:
|
|
5090
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
3476
5091
|
...
|
|
3477
|
-
def
|
|
5092
|
+
def deserialize(self, serialized: str) -> None:
|
|
3478
5093
|
...
|
|
3479
|
-
def
|
|
5094
|
+
def error(self, x: StereoPoint2) -> float:
|
|
3480
5095
|
...
|
|
3481
|
-
def
|
|
5096
|
+
def evaluateError(self, x: StereoPoint2) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3482
5097
|
...
|
|
3483
|
-
|
|
3484
|
-
def Density(self) -> JacobianFactor:
|
|
5098
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
3485
5099
|
...
|
|
3486
|
-
def
|
|
5100
|
+
def likelihood(self, x: StereoPoint2) -> float:
|
|
3487
5101
|
...
|
|
3488
|
-
def
|
|
5102
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
3489
5103
|
...
|
|
3490
|
-
def
|
|
5104
|
+
def origin(self) -> StereoPoint2:
|
|
3491
5105
|
...
|
|
3492
|
-
|
|
3493
|
-
def Density(self) -> JacobianFactor:
|
|
5106
|
+
def serialize(self) -> str:
|
|
3494
5107
|
...
|
|
3495
|
-
|
|
5108
|
+
class ExtendedPriorFactorUnit3(NoiseModelFactor):
|
|
5109
|
+
def __getstate__(self) -> tuple:
|
|
3496
5110
|
...
|
|
3497
|
-
|
|
5111
|
+
@typing.overload
|
|
5112
|
+
def __init__(self, key: int, origin: Unit3, noiseModel: noiseModel.Base) -> None:
|
|
3498
5113
|
...
|
|
3499
|
-
|
|
5114
|
+
@typing.overload
|
|
5115
|
+
def __init__(self, key: int, origin: Unit3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
3500
5116
|
...
|
|
3501
|
-
|
|
3502
|
-
def
|
|
5117
|
+
@typing.overload
|
|
5118
|
+
def __init__(self, key: int, origin: Unit3, covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3503
5119
|
...
|
|
3504
|
-
|
|
5120
|
+
@typing.overload
|
|
5121
|
+
def __init__(self, key: int, origin: Unit3, mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3505
5122
|
...
|
|
3506
|
-
def
|
|
5123
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
3507
5124
|
...
|
|
3508
|
-
def
|
|
5125
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
3509
5126
|
...
|
|
3510
|
-
|
|
3511
|
-
def Density(self) -> JacobianFactor:
|
|
5127
|
+
def deserialize(self, serialized: str) -> None:
|
|
3512
5128
|
...
|
|
3513
|
-
def
|
|
5129
|
+
def error(self, x: Unit3) -> float:
|
|
3514
5130
|
...
|
|
3515
|
-
def
|
|
5131
|
+
def evaluateError(self, x: Unit3) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3516
5132
|
...
|
|
3517
|
-
def
|
|
5133
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
3518
5134
|
...
|
|
3519
|
-
|
|
3520
|
-
def Density(self) -> JacobianFactor:
|
|
5135
|
+
def likelihood(self, x: Unit3) -> float:
|
|
3521
5136
|
...
|
|
3522
|
-
def
|
|
5137
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
3523
5138
|
...
|
|
3524
|
-
def
|
|
5139
|
+
def origin(self) -> Unit3:
|
|
3525
5140
|
...
|
|
3526
|
-
def
|
|
5141
|
+
def serialize(self) -> str:
|
|
3527
5142
|
...
|
|
3528
|
-
class
|
|
3529
|
-
def
|
|
5143
|
+
class ExtendedPriorFactorVector(NoiseModelFactor):
|
|
5144
|
+
def __getstate__(self) -> tuple:
|
|
3530
5145
|
...
|
|
3531
|
-
|
|
5146
|
+
@typing.overload
|
|
5147
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
3532
5148
|
...
|
|
3533
|
-
|
|
5149
|
+
@typing.overload
|
|
5150
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], noiseModel: noiseModel.Base) -> None:
|
|
3534
5151
|
...
|
|
3535
|
-
|
|
5152
|
+
@typing.overload
|
|
5153
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3536
5154
|
...
|
|
3537
|
-
|
|
3538
|
-
def
|
|
5155
|
+
@typing.overload
|
|
5156
|
+
def __init__(self, key: int, origin: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], mean: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], covariance: numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]) -> None:
|
|
3539
5157
|
...
|
|
3540
|
-
def
|
|
5158
|
+
def __setstate__(self, arg0: tuple) -> None:
|
|
3541
5159
|
...
|
|
3542
|
-
def
|
|
5160
|
+
def covariance(self) -> numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]] | None:
|
|
3543
5161
|
...
|
|
3544
|
-
def
|
|
5162
|
+
def deserialize(self, serialized: str) -> None:
|
|
3545
5163
|
...
|
|
3546
|
-
|
|
3547
|
-
def Density(self) -> JacobianFactor:
|
|
5164
|
+
def error(self, x: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]) -> float:
|
|
3548
5165
|
...
|
|
3549
|
-
def
|
|
5166
|
+
def evaluateError(self, x: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
3550
5167
|
...
|
|
3551
|
-
def
|
|
5168
|
+
def gaussian(self) -> tuple[numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]], numpy.ndarray[tuple[M, N], numpy.dtype[numpy.float64]]] | None:
|
|
3552
5169
|
...
|
|
3553
|
-
def
|
|
5170
|
+
def likelihood(self, x: numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]) -> float:
|
|
5171
|
+
...
|
|
5172
|
+
def mean(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]] | None:
|
|
5173
|
+
...
|
|
5174
|
+
def origin(self) -> numpy.ndarray[tuple[M, typing.Literal[1]], numpy.dtype[numpy.float64]]:
|
|
5175
|
+
...
|
|
5176
|
+
def serialize(self) -> str:
|
|
3554
5177
|
...
|
|
3555
5178
|
class Factor:
|
|
3556
5179
|
def __repr__(self, s: str = 'Factor\n', keyFormatter: typing.Callable[[int], str] = ...) -> str:
|