@aws-sdk/client-lookoutequipment 3.110.0 → 3.118.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/CHANGELOG.md +27 -0
  2. package/README.md +3 -2
  3. package/dist-cjs/LookoutEquipment.js +15 -0
  4. package/dist-cjs/commands/ListInferenceEventsCommand.js +36 -0
  5. package/dist-cjs/commands/index.js +1 -0
  6. package/dist-cjs/models/models_0.js +19 -1
  7. package/dist-cjs/pagination/ListInferenceEventsPaginator.js +36 -0
  8. package/dist-cjs/pagination/index.js +1 -0
  9. package/dist-cjs/protocols/Aws_json1_0.js +104 -1
  10. package/dist-es/LookoutEquipment.js +15 -0
  11. package/dist-es/commands/ListInferenceEventsCommand.js +39 -0
  12. package/dist-es/commands/index.js +1 -0
  13. package/dist-es/models/models_0.js +12 -0
  14. package/dist-es/pagination/ListInferenceEventsPaginator.js +75 -0
  15. package/dist-es/pagination/index.js +1 -0
  16. package/dist-es/protocols/Aws_json1_0.js +116 -0
  17. package/dist-types/LookoutEquipment.d.ts +32 -19
  18. package/dist-types/LookoutEquipmentClient.d.ts +6 -4
  19. package/dist-types/commands/CreateModelCommand.d.ts +4 -4
  20. package/dist-types/commands/DeleteModelCommand.d.ts +3 -2
  21. package/dist-types/commands/DescribeDatasetCommand.d.ts +2 -1
  22. package/dist-types/commands/DescribeModelCommand.d.ts +3 -2
  23. package/dist-types/commands/ListInferenceEventsCommand.d.ts +36 -0
  24. package/dist-types/commands/ListSensorStatisticsCommand.d.ts +3 -3
  25. package/dist-types/commands/StartDataIngestionJobCommand.d.ts +2 -1
  26. package/dist-types/commands/TagResourceCommand.d.ts +4 -4
  27. package/dist-types/commands/index.d.ts +1 -0
  28. package/dist-types/models/models_0.d.ts +310 -349
  29. package/dist-types/pagination/ListInferenceEventsPaginator.d.ts +4 -0
  30. package/dist-types/pagination/index.d.ts +1 -0
  31. package/dist-types/protocols/Aws_json1_0.d.ts +3 -0
  32. package/dist-types/ts3.4/LookoutEquipment.d.ts +5 -0
  33. package/dist-types/ts3.4/LookoutEquipmentClient.d.ts +3 -2
  34. package/dist-types/ts3.4/commands/ListInferenceEventsCommand.d.ts +17 -0
  35. package/dist-types/ts3.4/commands/index.d.ts +1 -0
  36. package/dist-types/ts3.4/pagination/ListInferenceEventsPaginator.d.ts +4 -0
  37. package/dist-types/ts3.4/pagination/index.d.ts +1 -0
  38. package/dist-types/ts3.4/protocols/Aws_json1_0.d.ts +3 -0
  39. package/package.json +4 -4
@@ -72,7 +72,8 @@ export interface CreateDatasetRequest {
72
72
  */
73
73
  DatasetSchema?: DatasetSchema;
74
74
  /**
75
- * <p>Provides the identifier of the KMS key used to encrypt dataset data by Amazon Lookout for Equipment. </p>
75
+ * <p>Provides the identifier of the KMS key used to encrypt dataset data by Amazon Lookout
76
+ * for Equipment. </p>
76
77
  */
77
78
  ServerSideKmsKeyId?: string;
78
79
  /**
@@ -154,8 +155,8 @@ export declare class ThrottlingException extends __BaseException {
154
155
  constructor(opts: __ExceptionOptionType<ThrottlingException, __BaseException>);
155
156
  }
156
157
  /**
157
- * <p> The input fails to satisfy constraints specified by Amazon Lookout for Equipment or a related AWS
158
- * service that's being utilized. </p>
158
+ * <p> The input fails to satisfy constraints specified by Amazon Lookout for Equipment or a
159
+ * related AWS service that's being utilized. </p>
159
160
  */
160
161
  export declare class ValidationException extends __BaseException {
161
162
  readonly name: "ValidationException";
@@ -208,17 +209,18 @@ export declare namespace InferenceS3InputConfiguration {
208
209
  const filterSensitiveLog: (obj: InferenceS3InputConfiguration) => any;
209
210
  }
210
211
  /**
211
- * <p>Specifies configuration information for the input data for the inference, including Amazon S3
212
- * location of input data.. </p>
212
+ * <p>Specifies configuration information for the input data for the inference, including
213
+ * Amazon S3 location of input data.. </p>
213
214
  */
214
215
  export interface InferenceInputConfiguration {
215
216
  /**
216
- * <p> Specifies configuration information for the input data for the inference, including Amazon S3
217
- * location of input data.</p>
217
+ * <p> Specifies configuration information for the input data for the inference, including
218
+ * Amazon S3 location of input data.</p>
218
219
  */
219
220
  S3InputConfiguration?: InferenceS3InputConfiguration;
220
221
  /**
221
- * <p>Indicates the difference between your time zone and Coordinated Universal Time (UTC).</p>
222
+ * <p>Indicates the difference between your time zone and Coordinated Universal Time
223
+ * (UTC).</p>
222
224
  */
223
225
  InputTimeZoneOffset?: string;
224
226
  /**
@@ -294,18 +296,19 @@ export interface CreateInferenceSchedulerRequest {
294
296
  /**
295
297
  * <p>A period of time (in minutes) by which inference on the data is delayed after the data
296
298
  * starts. For instance, if you select an offset delay time of five minutes, inference will
297
- * not begin on the data until the first data measurement after the five minute mark. For example, if
298
- * five minutes is selected, the inference scheduler will wake up at the configured frequency with the
299
- * additional five minute delay time to check the customer S3 bucket. The customer can upload data at
300
- * the same frequency and they don't need to stop and restart the scheduler when uploading new data. </p>
299
+ * not begin on the data until the first data measurement after the five minute mark. For
300
+ * example, if five minutes is selected, the inference scheduler will wake up at the
301
+ * configured frequency with the additional five minute delay time to check the customer S3
302
+ * bucket. The customer can upload data at the same frequency and they don't need to stop and
303
+ * restart the scheduler when uploading new data. </p>
301
304
  */
302
305
  DataDelayOffsetInMinutes?: number;
303
306
  /**
304
307
  * <p> How often data is uploaded to the source S3 bucket for the input data. The value chosen
305
308
  * is the length of time between data uploads. For instance, if you select 5 minutes, Amazon
306
- * Lookout for Equipment will upload the real-time data to the source bucket once every 5 minutes. This frequency
307
- * also determines how often Amazon Lookout for Equipment starts a scheduled inference on your data. In this
308
- * example, it starts once every 5 minutes. </p>
309
+ * Lookout for Equipment will upload the real-time data to the source bucket once every 5
310
+ * minutes. This frequency also determines how often Amazon Lookout for Equipment starts a
311
+ * scheduled inference on your data. In this example, it starts once every 5 minutes. </p>
309
312
  */
310
313
  DataUploadFrequency: DataUploadFrequency | string | undefined;
311
314
  /**
@@ -324,7 +327,8 @@ export interface CreateInferenceSchedulerRequest {
324
327
  */
325
328
  RoleArn: string | undefined;
326
329
  /**
327
- * <p>Provides the identifier of the KMS key used to encrypt inference scheduler data by Amazon Lookout for Equipment. </p>
330
+ * <p>Provides the identifier of the KMS key used to encrypt inference scheduler data by
331
+ * Amazon Lookout for Equipment. </p>
328
332
  */
329
333
  ServerSideKmsKeyId?: string;
330
334
  /**
@@ -397,28 +401,26 @@ export declare enum TargetSamplingRate {
397
401
  }
398
402
  /**
399
403
  * <p>The configuration is the <code>TargetSamplingRate</code>, which is the sampling rate of
400
- * the data after post processing by
401
- * Amazon Lookout for Equipment. For example, if you provide data that
402
- * has been collected at a 1 second level and you want the system to resample
403
- * the data at a 1 minute rate before training, the <code>TargetSamplingRate</code> is 1 minute.</p>
404
- * <p>When providing a value for the <code>TargetSamplingRate</code>, you must
405
- * attach the prefix "PT" to the rate you want. The value for a 1 second rate
406
- * is therefore <i>PT1S</i>, the value for a 15 minute rate
407
- * is <i>PT15M</i>, and the value for a 1 hour rate
408
- * is <i>PT1H</i>
404
+ * the data after post processing by Amazon Lookout for Equipment. For example, if you provide
405
+ * data that has been collected at a 1 second level and you want the system to resample the
406
+ * data at a 1 minute rate before training, the <code>TargetSamplingRate</code> is 1
407
+ * minute.</p>
408
+ * <p>When providing a value for the <code>TargetSamplingRate</code>, you must attach the
409
+ * prefix "PT" to the rate you want. The value for a 1 second rate is therefore
410
+ * <i>PT1S</i>, the value for a 15 minute rate is <i>PT15M</i>,
411
+ * and the value for a 1 hour rate is <i>PT1H</i>
409
412
  * </p>
410
413
  */
411
414
  export interface DataPreProcessingConfiguration {
412
415
  /**
413
- * <p>The sampling rate of the data after post processing by Amazon Lookout for Equipment.
414
- * For example, if you provide data that has been collected at a 1 second level and
415
- * you want the system to resample the data at a 1 minute rate before training,
416
- * the <code>TargetSamplingRate</code> is 1 minute.</p>
417
- * <p>When providing a value for the <code>TargetSamplingRate</code>, you must attach
418
- * the prefix "PT" to the rate you want. The value for a 1 second rate is
419
- * therefore <i>PT1S</i>, the value for a 15 minute
420
- * rate is <i>PT15M</i>, and the value for a 1 hour rate
421
- * is <i>PT1H</i>
416
+ * <p>The sampling rate of the data after post processing by Amazon Lookout for Equipment. For
417
+ * example, if you provide data that has been collected at a 1 second level and you want the
418
+ * system to resample the data at a 1 minute rate before training, the
419
+ * <code>TargetSamplingRate</code> is 1 minute.</p>
420
+ * <p>When providing a value for the <code>TargetSamplingRate</code>, you must attach the
421
+ * prefix "PT" to the rate you want. The value for a 1 second rate is therefore
422
+ * <i>PT1S</i>, the value for a 15 minute rate is <i>PT15M</i>,
423
+ * and the value for a 1 hour rate is <i>PT1H</i>
422
424
  * </p>
423
425
  */
424
426
  TargetSamplingRate?: TargetSamplingRate | string;
@@ -515,20 +517,20 @@ export interface CreateModelRequest {
515
517
  RoleArn?: string;
516
518
  /**
517
519
  * <p>The configuration is the <code>TargetSamplingRate</code>, which is the sampling rate of
518
- * the data after post processing by
519
- * Amazon Lookout for Equipment. For example, if you provide data that
520
- * has been collected at a 1 second level and you want the system to resample
521
- * the data at a 1 minute rate before training, the <code>TargetSamplingRate</code> is 1 minute.</p>
522
- * <p>When providing a value for the <code>TargetSamplingRate</code>, you must
523
- * attach the prefix "PT" to the rate you want. The value for a 1 second rate
524
- * is therefore <i>PT1S</i>, the value for a 15 minute rate
525
- * is <i>PT15M</i>, and the value for a 1 hour rate
526
- * is <i>PT1H</i>
520
+ * the data after post processing by Amazon Lookout for Equipment. For example, if you provide
521
+ * data that has been collected at a 1 second level and you want the system to resample the
522
+ * data at a 1 minute rate before training, the <code>TargetSamplingRate</code> is 1
523
+ * minute.</p>
524
+ * <p>When providing a value for the <code>TargetSamplingRate</code>, you must attach the
525
+ * prefix "PT" to the rate you want. The value for a 1 second rate is therefore
526
+ * <i>PT1S</i>, the value for a 15 minute rate is <i>PT15M</i>,
527
+ * and the value for a 1 hour rate is <i>PT1H</i>
527
528
  * </p>
528
529
  */
529
530
  DataPreProcessingConfiguration?: DataPreProcessingConfiguration;
530
531
  /**
531
- * <p>Provides the identifier of the KMS key used to encrypt model data by Amazon Lookout for Equipment. </p>
532
+ * <p>Provides the identifier of the KMS key used to encrypt model data by Amazon Lookout
533
+ * for Equipment. </p>
532
534
  */
533
535
  ServerSideKmsKeyId?: string;
534
536
  /**
@@ -536,7 +538,9 @@ export interface CreateModelRequest {
536
538
  */
537
539
  Tags?: Tag[];
538
540
  /**
539
- * <p>Indicates that the asset associated with this sensor has been shut off. As long as this condition is met, Lookout for Equipment will not use data from this asset for training, evaluation, or inference.</p>
541
+ * <p>Indicates that the asset associated with this sensor has been shut off. As long as this
542
+ * condition is met, Lookout for Equipment will not use data from this asset for training,
543
+ * evaluation, or inference.</p>
540
544
  */
541
545
  OffCondition?: string;
542
546
  }
@@ -616,17 +620,11 @@ export declare namespace DescribeDataIngestionJobRequest {
616
620
  const filterSensitiveLog: (obj: DescribeDataIngestionJobRequest) => any;
617
621
  }
618
622
  /**
619
- * <p>
620
- *
621
- * Entity that comprises information abount duplicate timestamps in the dataset.
622
- *
623
- * </p>
623
+ * <p> Entity that comprises information abount duplicate timestamps in the dataset. </p>
624
624
  */
625
625
  export interface DuplicateTimestamps {
626
626
  /**
627
- * <p>
628
- * Indicates the total number of duplicate timestamps.
629
- * </p>
627
+ * <p> Indicates the total number of duplicate timestamps. </p>
630
628
  */
631
629
  TotalNumberOfDuplicateTimestamps: number | undefined;
632
630
  }
@@ -637,19 +635,12 @@ export declare namespace DuplicateTimestamps {
637
635
  const filterSensitiveLog: (obj: DuplicateTimestamps) => any;
638
636
  }
639
637
  /**
640
- * <p>
641
- *
642
- * Entity that comprises information on sensors that have sensor data completely missing.
643
- *
644
- * </p>
638
+ * <p> Entity that comprises information on sensors that have sensor data completely missing.
639
+ * </p>
645
640
  */
646
641
  export interface MissingCompleteSensorData {
647
642
  /**
648
- * <p>
649
- *
650
- * Indicates the number of sensors that have data missing completely.
651
- *
652
- * </p>
643
+ * <p> Indicates the number of sensors that have data missing completely. </p>
653
644
  */
654
645
  AffectedSensorCount: number | undefined;
655
646
  }
@@ -660,19 +651,11 @@ export declare namespace MissingCompleteSensorData {
660
651
  const filterSensitiveLog: (obj: MissingCompleteSensorData) => any;
661
652
  }
662
653
  /**
663
- * <p>
664
- *
665
- * Entity that comprises information on sensors that have shorter date range.
666
- *
667
- * </p>
654
+ * <p> Entity that comprises information on sensors that have shorter date range. </p>
668
655
  */
669
656
  export interface SensorsWithShortDateRange {
670
657
  /**
671
- * <p>
672
- *
673
- * Indicates the number of sensors that have less than 90 days of data.
674
- *
675
- * </p>
658
+ * <p> Indicates the number of sensors that have less than 90 days of data. </p>
676
659
  */
677
660
  AffectedSensorCount: number | undefined;
678
661
  }
@@ -683,27 +666,18 @@ export declare namespace SensorsWithShortDateRange {
683
666
  const filterSensitiveLog: (obj: SensorsWithShortDateRange) => any;
684
667
  }
685
668
  /**
686
- * <p>
687
- *
688
- * Entity that comprises aggregated information on sensors having insufficient data.
689
- *
690
- * </p>
669
+ * <p> Entity that comprises aggregated information on sensors having insufficient data.
670
+ * </p>
691
671
  */
692
672
  export interface InsufficientSensorData {
693
673
  /**
694
- * <p>
695
- *
696
- * Parameter that describes the total number of sensors that have data completely missing for it.
697
- *
698
- * </p>
674
+ * <p> Parameter that describes the total number of sensors that have data completely missing
675
+ * for it. </p>
699
676
  */
700
677
  MissingCompleteSensorData: MissingCompleteSensorData | undefined;
701
678
  /**
702
- * <p>
703
- *
704
- * Parameter that describes the total number of sensors that have a short date range of less than 90 days of data overall.
705
- *
706
- * </p>
679
+ * <p> Parameter that describes the total number of sensors that have a short date range of
680
+ * less than 90 days of data overall. </p>
707
681
  */
708
682
  SensorsWithShortDateRange: SensorsWithShortDateRange | undefined;
709
683
  }
@@ -714,27 +688,16 @@ export declare namespace InsufficientSensorData {
714
688
  const filterSensitiveLog: (obj: InsufficientSensorData) => any;
715
689
  }
716
690
  /**
717
- * <p>
718
- *
719
- * Entity that comprises aggregated information on sensors having insufficient data.
720
- *
721
- * </p>
691
+ * <p> Entity that comprises aggregated information on sensors having insufficient data.
692
+ * </p>
722
693
  */
723
694
  export interface InvalidSensorData {
724
695
  /**
725
- * <p>
726
- *
727
- * Indicates the number of sensors that have at least some invalid values.
728
- *
729
- * </p>
696
+ * <p> Indicates the number of sensors that have at least some invalid values. </p>
730
697
  */
731
698
  AffectedSensorCount: number | undefined;
732
699
  /**
733
- * <p>
734
- *
735
- * Indicates the total number of invalid values across all the sensors.
736
- *
737
- * </p>
700
+ * <p> Indicates the total number of invalid values across all the sensors. </p>
738
701
  */
739
702
  TotalNumberOfInvalidValues: number | undefined;
740
703
  }
@@ -745,27 +708,15 @@ export declare namespace InvalidSensorData {
745
708
  const filterSensitiveLog: (obj: InvalidSensorData) => any;
746
709
  }
747
710
  /**
748
- * <p>
749
- *
750
- * Entity that comprises aggregated information on sensors having missing data.
751
- *
752
- * </p>
711
+ * <p> Entity that comprises aggregated information on sensors having missing data. </p>
753
712
  */
754
713
  export interface MissingSensorData {
755
714
  /**
756
- * <p>
757
- *
758
- * Indicates the number of sensors that have atleast some data missing.
759
- *
760
- * </p>
715
+ * <p> Indicates the number of sensors that have atleast some data missing. </p>
761
716
  */
762
717
  AffectedSensorCount: number | undefined;
763
718
  /**
764
- * <p>
765
- *
766
- * Indicates the total number of missing values across all the sensors.
767
- *
768
- * </p>
719
+ * <p> Indicates the total number of missing values across all the sensors. </p>
769
720
  */
770
721
  TotalNumberOfMissingValues: number | undefined;
771
722
  }
@@ -776,19 +727,11 @@ export declare namespace MissingSensorData {
776
727
  const filterSensitiveLog: (obj: MissingSensorData) => any;
777
728
  }
778
729
  /**
779
- * <p>
780
- *
781
- * Entity that comprises information abount unsupported timestamps in the dataset.
782
- *
783
- * </p>
730
+ * <p> Entity that comprises information abount unsupported timestamps in the dataset. </p>
784
731
  */
785
732
  export interface UnsupportedTimestamps {
786
733
  /**
787
- * <p>
788
- *
789
- * Indicates the total number of unsupported timestamps across the ingested data.
790
- *
791
- * </p>
734
+ * <p> Indicates the total number of unsupported timestamps across the ingested data. </p>
792
735
  */
793
736
  TotalNumberOfUnsupportedTimestamps: number | undefined;
794
737
  }
@@ -799,51 +742,35 @@ export declare namespace UnsupportedTimestamps {
799
742
  const filterSensitiveLog: (obj: UnsupportedTimestamps) => any;
800
743
  }
801
744
  /**
802
- * <p>
803
- *
804
- * DataQualitySummary gives aggregated statistics over all the sensors about a completed ingestion job. It primarily gives more information about statistics over different incorrect data like MissingCompleteSensorData, MissingSensorData, UnsupportedDateFormats, InsufficientSensorData, DuplicateTimeStamps.
805
- *
806
- * </p>
745
+ * <p> DataQualitySummary gives aggregated statistics over all the sensors about a completed
746
+ * ingestion job. It primarily gives more information about statistics over different
747
+ * incorrect data like MissingCompleteSensorData, MissingSensorData, UnsupportedDateFormats,
748
+ * InsufficientSensorData, DuplicateTimeStamps. </p>
807
749
  */
808
750
  export interface DataQualitySummary {
809
751
  /**
810
- * <p>
811
- *
812
- * Parameter that gives information about insufficient data for sensors in the dataset. This includes information about those sensors that have complete data missing and those with a short date range.
813
- *
814
- * </p>
752
+ * <p> Parameter that gives information about insufficient data for sensors in the dataset.
753
+ * This includes information about those sensors that have complete data missing and those
754
+ * with a short date range. </p>
815
755
  */
816
756
  InsufficientSensorData: InsufficientSensorData | undefined;
817
757
  /**
818
- * <p>
819
- *
820
- * Parameter that gives information about data that is missing over all the sensors in the input data.
821
- *
822
- * </p>
758
+ * <p> Parameter that gives information about data that is missing over all the sensors in the
759
+ * input data. </p>
823
760
  */
824
761
  MissingSensorData: MissingSensorData | undefined;
825
762
  /**
826
- * <p>
827
- *
828
- * Parameter that gives information about data that is invalid over all the sensors in the input data.
829
- *
830
- * </p>
763
+ * <p> Parameter that gives information about data that is invalid over all the sensors in the
764
+ * input data. </p>
831
765
  */
832
766
  InvalidSensorData: InvalidSensorData | undefined;
833
767
  /**
834
- * <p>
835
- *
836
- * Parameter that gives information about unsupported timestamps in the input data.
837
- *
838
- * </p>
768
+ * <p> Parameter that gives information about unsupported timestamps in the input data.
769
+ * </p>
839
770
  */
840
771
  UnsupportedTimestamps: UnsupportedTimestamps | undefined;
841
772
  /**
842
- * <p>
843
- *
844
- * Parameter that gives information about duplicate timestamps in the input data.
845
- *
846
- * </p>
773
+ * <p> Parameter that gives information about duplicate timestamps in the input data. </p>
847
774
  */
848
775
  DuplicateTimestamps: DuplicateTimestamps | undefined;
849
776
  }
@@ -874,7 +801,8 @@ export declare namespace S3Object {
874
801
  const filterSensitiveLog: (obj: S3Object) => any;
875
802
  }
876
803
  /**
877
- * <p>Gives statistics about how many files have been ingested, and which files have not been ingested, for a particular ingestion job.</p>
804
+ * <p>Gives statistics about how many files have been ingested, and which files have not been
805
+ * ingested, for a particular ingestion job.</p>
878
806
  */
879
807
  export interface IngestedFilesSummary {
880
808
  /**
@@ -886,7 +814,8 @@ export interface IngestedFilesSummary {
886
814
  */
887
815
  IngestedNumberOfFiles: number | undefined;
888
816
  /**
889
- * <p>Indicates the number of files that were discarded. A file could be discarded because its format is invalid (for example, a jpg or pdf) or not readable.</p>
817
+ * <p>Indicates the number of files that were discarded. A file could be discarded because its
818
+ * format is invalid (for example, a jpg or pdf) or not readable.</p>
890
819
  */
891
820
  DiscardedFiles?: S3Object[];
892
821
  }
@@ -911,10 +840,9 @@ export interface IngestionS3InputConfiguration {
911
840
  */
912
841
  Prefix?: string;
913
842
  /**
914
- * <p>
915
- * Pattern for matching the Amazon S3 files which will be used for ingestion.
916
- * If no KeyPattern is provided, we will use the default hierarchy file structure, which is same as KeyPattern {prefix}/{component_name}/*
917
- * </p>
843
+ * <p> Pattern for matching the Amazon S3 files which will be used for ingestion. If
844
+ * no KeyPattern is provided, we will use the default hierarchy file structure, which is same
845
+ * as KeyPattern {prefix}/{component_name}/* </p>
918
846
  */
919
847
  KeyPattern?: string;
920
848
  }
@@ -979,13 +907,14 @@ export interface DescribeDataIngestionJobResponse {
979
907
  */
980
908
  FailedReason?: string;
981
909
  /**
982
- * <p>
983
- * Gives statistics about a completed ingestion job. These statistics primarily relate to quantifying incorrect data such as MissingCompleteSensorData, MissingSensorData, UnsupportedDateFormats, InsufficientSensorData, and DuplicateTimeStamps.
984
- * </p>
910
+ * <p> Gives statistics about a completed ingestion job. These statistics primarily relate to
911
+ * quantifying incorrect data such as MissingCompleteSensorData, MissingSensorData,
912
+ * UnsupportedDateFormats, InsufficientSensorData, and DuplicateTimeStamps. </p>
985
913
  */
986
914
  DataQualitySummary?: DataQualitySummary;
987
915
  /**
988
- * <p>Gives statistics about how many files have been ingested, and which files have not been ingested, for a particular ingestion job.</p>
916
+ * <p>Gives statistics about how many files have been ingested, and which files have not been
917
+ * ingested, for a particular ingestion job.</p>
989
918
  */
990
919
  IngestedFilesSummary?: IngestedFilesSummary;
991
920
  /**
@@ -1041,7 +970,7 @@ export interface DescribeDatasetResponse {
1041
970
  */
1042
971
  DatasetArn?: string;
1043
972
  /**
1044
- * <p>Specifies the time the dataset was created in Amazon Lookout for Equipment. </p>
973
+ * <p>Specifies the time the dataset was created in Lookout for Equipment. </p>
1045
974
  */
1046
975
  CreatedAt?: Date;
1047
976
  /**
@@ -1058,23 +987,25 @@ export interface DescribeDatasetResponse {
1058
987
  */
1059
988
  Schema?: __LazyJsonString | string;
1060
989
  /**
1061
- * <p>Provides the identifier of the KMS key used to encrypt dataset data by Amazon Lookout for Equipment. </p>
990
+ * <p>Provides the identifier of the KMS key used to encrypt dataset data by Amazon Lookout
991
+ * for Equipment. </p>
1062
992
  */
1063
993
  ServerSideKmsKeyId?: string;
1064
994
  /**
1065
- * <p>Specifies the S3 location configuration for the data input for the data ingestion job. </p>
995
+ * <p>Specifies the S3 location configuration for the data input for the data ingestion job.
996
+ * </p>
1066
997
  */
1067
998
  IngestionInputConfiguration?: IngestionInputConfiguration;
1068
999
  /**
1069
- * <p>
1070
- * Gives statistics associated with the given dataset for the latest successful associated ingestion job id. These statistics primarily relate to quantifying incorrect data such as MissingCompleteSensorData, MissingSensorData, UnsupportedDateFormats, InsufficientSensorData, and DuplicateTimeStamps.
1071
- * </p>
1000
+ * <p> Gives statistics associated with the given dataset for the latest successful associated
1001
+ * ingestion job id. These statistics primarily relate to quantifying incorrect data such as
1002
+ * MissingCompleteSensorData, MissingSensorData, UnsupportedDateFormats,
1003
+ * InsufficientSensorData, and DuplicateTimeStamps. </p>
1072
1004
  */
1073
1005
  DataQualitySummary?: DataQualitySummary;
1074
1006
  /**
1075
- * <p>
1076
- * IngestedFilesSummary associated with the given dataset for the latest successful associated ingestion job id.
1077
- * </p>
1007
+ * <p> IngestedFilesSummary associated with the given dataset for the latest successful
1008
+ * associated ingestion job id. </p>
1078
1009
  */
1079
1010
  IngestedFilesSummary?: IngestedFilesSummary;
1080
1011
  /**
@@ -1139,18 +1070,19 @@ export interface DescribeInferenceSchedulerResponse {
1139
1070
  /**
1140
1071
  * <p> A period of time (in minutes) by which inference on the data is delayed after the data
1141
1072
  * starts. For instance, if you select an offset delay time of five minutes, inference will
1142
- * not begin on the data until the first data measurement after the five minute mark. For example, if
1143
- * five minutes is selected, the inference scheduler will wake up at the configured frequency with the
1144
- * additional five minute delay time to check the customer S3 bucket. The customer can upload data at
1145
- * the same frequency and they don't need to stop and restart the scheduler when uploading new data.</p>
1073
+ * not begin on the data until the first data measurement after the five minute mark. For
1074
+ * example, if five minutes is selected, the inference scheduler will wake up at the
1075
+ * configured frequency with the additional five minute delay time to check the customer S3
1076
+ * bucket. The customer can upload data at the same frequency and they don't need to stop and
1077
+ * restart the scheduler when uploading new data.</p>
1146
1078
  */
1147
1079
  DataDelayOffsetInMinutes?: number;
1148
1080
  /**
1149
1081
  * <p>Specifies how often data is uploaded to the source S3 bucket for the input data. This
1150
1082
  * value is the length of time between data uploads. For instance, if you select 5 minutes,
1151
- * Amazon Lookout for Equipment will upload the real-time data to the source bucket once every 5 minutes. This
1152
- * frequency also determines how often Amazon Lookout for Equipment starts a scheduled inference on your data.
1153
- * In this example, it starts once every 5 minutes. </p>
1083
+ * Amazon Lookout for Equipment will upload the real-time data to the source bucket once every
1084
+ * 5 minutes. This frequency also determines how often Amazon Lookout for Equipment starts a
1085
+ * scheduled inference on your data. In this example, it starts once every 5 minutes. </p>
1154
1086
  */
1155
1087
  DataUploadFrequency?: DataUploadFrequency | string;
1156
1088
  /**
@@ -1167,8 +1099,8 @@ export interface DescribeInferenceSchedulerResponse {
1167
1099
  */
1168
1100
  DataInputConfiguration?: InferenceInputConfiguration;
1169
1101
  /**
1170
- * <p> Specifies information for the output results for the inference scheduler,
1171
- * including the output S3 location. </p>
1102
+ * <p> Specifies information for the output results for the inference scheduler, including
1103
+ * the output S3 location. </p>
1172
1104
  */
1173
1105
  DataOutputConfiguration?: InferenceOutputConfiguration;
1174
1106
  /**
@@ -1177,7 +1109,8 @@ export interface DescribeInferenceSchedulerResponse {
1177
1109
  */
1178
1110
  RoleArn?: string;
1179
1111
  /**
1180
- * <p>Provides the identifier of the KMS key used to encrypt inference scheduler data by Amazon Lookout for Equipment. </p>
1112
+ * <p>Provides the identifier of the KMS key used to encrypt inference scheduler data by
1113
+ * Amazon Lookout for Equipment. </p>
1181
1114
  */
1182
1115
  ServerSideKmsKeyId?: string;
1183
1116
  }
@@ -1254,15 +1187,14 @@ export interface DescribeModelResponse {
1254
1187
  RoleArn?: string;
1255
1188
  /**
1256
1189
  * <p>The configuration is the <code>TargetSamplingRate</code>, which is the sampling rate of
1257
- * the data after post processing by
1258
- * Amazon Lookout for Equipment. For example, if you provide data that
1259
- * has been collected at a 1 second level and you want the system to resample
1260
- * the data at a 1 minute rate before training, the <code>TargetSamplingRate</code> is 1 minute.</p>
1261
- * <p>When providing a value for the <code>TargetSamplingRate</code>, you must
1262
- * attach the prefix "PT" to the rate you want. The value for a 1 second rate
1263
- * is therefore <i>PT1S</i>, the value for a 15 minute rate
1264
- * is <i>PT15M</i>, and the value for a 1 hour rate
1265
- * is <i>PT1H</i>
1190
+ * the data after post processing by Amazon Lookout for Equipment. For example, if you provide
1191
+ * data that has been collected at a 1 second level and you want the system to resample the
1192
+ * data at a 1 minute rate before training, the <code>TargetSamplingRate</code> is 1
1193
+ * minute.</p>
1194
+ * <p>When providing a value for the <code>TargetSamplingRate</code>, you must attach the
1195
+ * prefix "PT" to the rate you want. The value for a 1 second rate is therefore
1196
+ * <i>PT1S</i>, the value for a 15 minute rate is <i>PT15M</i>,
1197
+ * and the value for a 1 hour rate is <i>PT1H</i>
1266
1198
  * </p>
1267
1199
  */
1268
1200
  DataPreProcessingConfiguration?: DataPreProcessingConfiguration;
@@ -1285,8 +1217,9 @@ export interface DescribeModelResponse {
1285
1217
  */
1286
1218
  FailedReason?: string;
1287
1219
  /**
1288
- * <p>The Model Metrics show an aggregated summary of the model's performance within the evaluation time
1289
- * range. This is the JSON content of the metrics created when evaluating the model. </p>
1220
+ * <p>The Model Metrics show an aggregated summary of the model's performance within the
1221
+ * evaluation time range. This is the JSON content of the metrics created when evaluating the
1222
+ * model. </p>
1290
1223
  */
1291
1224
  ModelMetrics?: __LazyJsonString | string;
1292
1225
  /**
@@ -1299,11 +1232,14 @@ export interface DescribeModelResponse {
1299
1232
  */
1300
1233
  CreatedAt?: Date;
1301
1234
  /**
1302
- * <p>Provides the identifier of the KMS key used to encrypt model data by Amazon Lookout for Equipment. </p>
1235
+ * <p>Provides the identifier of the KMS key used to encrypt model data by Amazon Lookout
1236
+ * for Equipment. </p>
1303
1237
  */
1304
1238
  ServerSideKmsKeyId?: string;
1305
1239
  /**
1306
- * <p>Indicates that the asset associated with this sensor has been shut off. As long as this condition is met, Lookout for Equipment will not use data from this asset for training, evaluation, or inference.</p>
1240
+ * <p>Indicates that the asset associated with this sensor has been shut off. As long as this
1241
+ * condition is met, Lookout for Equipment will not use data from this asset for training,
1242
+ * evaluation, or inference.</p>
1307
1243
  */
1308
1244
  OffCondition?: string;
1309
1245
  }
@@ -1356,8 +1292,8 @@ export interface DataIngestionJobSummary {
1356
1292
  */
1357
1293
  DatasetArn?: string;
1358
1294
  /**
1359
- * <p> Specifies information for the input data for the data inference job, including data Amazon S3
1360
- * location parameters. </p>
1295
+ * <p> Specifies information for the input data for the data inference job, including data
1296
+ * Amazon S3 location parameters. </p>
1361
1297
  */
1362
1298
  IngestionInputConfiguration?: IngestionInputConfiguration;
1363
1299
  /**
@@ -1456,6 +1392,94 @@ export declare namespace ListDatasetsResponse {
1456
1392
  */
1457
1393
  const filterSensitiveLog: (obj: ListDatasetsResponse) => any;
1458
1394
  }
1395
+ export interface ListInferenceEventsRequest {
1396
+ /**
1397
+ * <p>An opaque pagination token indicating where to continue the listing of inference
1398
+ * events.</p>
1399
+ */
1400
+ NextToken?: string;
1401
+ /**
1402
+ * <p>Specifies the maximum number of inference events to list. </p>
1403
+ */
1404
+ MaxResults?: number;
1405
+ /**
1406
+ * <p>The name of the inference scheduler for the inference events listed. </p>
1407
+ */
1408
+ InferenceSchedulerName: string | undefined;
1409
+ /**
1410
+ * <p> Lookout for Equipment will return all the inference events with start time equal to or greater than the start time given.</p>
1411
+ */
1412
+ IntervalStartTime: Date | undefined;
1413
+ /**
1414
+ * <p>Lookout for Equipment will return all the inference events with end time equal to or less than the end time given.</p>
1415
+ */
1416
+ IntervalEndTime: Date | undefined;
1417
+ }
1418
+ export declare namespace ListInferenceEventsRequest {
1419
+ /**
1420
+ * @internal
1421
+ */
1422
+ const filterSensitiveLog: (obj: ListInferenceEventsRequest) => any;
1423
+ }
1424
+ /**
1425
+ * <p>Contains information about the specific inference event, including start and end time,
1426
+ * diagnostics information, event duration and so on.</p>
1427
+ */
1428
+ export interface InferenceEventSummary {
1429
+ /**
1430
+ * <p> The Amazon Resource Name (ARN) of the inference scheduler being used for the inference
1431
+ * event. </p>
1432
+ */
1433
+ InferenceSchedulerArn?: string;
1434
+ /**
1435
+ * <p>The name of the inference scheduler being used for the inference events. </p>
1436
+ */
1437
+ InferenceSchedulerName?: string;
1438
+ /**
1439
+ * <p>Indicates the starting time of an inference event.
1440
+ * </p>
1441
+ */
1442
+ EventStartTime?: Date;
1443
+ /**
1444
+ * <p>Indicates the ending time of an inference event.
1445
+ * </p>
1446
+ */
1447
+ EventEndTime?: Date;
1448
+ /**
1449
+ * <p> An array which specifies the names and values of all sensors contributing to an inference event.</p>
1450
+ */
1451
+ Diagnostics?: string;
1452
+ /**
1453
+ * <p> Indicates the size of an inference event in seconds.
1454
+ * </p>
1455
+ */
1456
+ EventDurationInSeconds?: number;
1457
+ }
1458
+ export declare namespace InferenceEventSummary {
1459
+ /**
1460
+ * @internal
1461
+ */
1462
+ const filterSensitiveLog: (obj: InferenceEventSummary) => any;
1463
+ }
1464
+ export interface ListInferenceEventsResponse {
1465
+ /**
1466
+ * <p>An opaque pagination token indicating where to continue the listing of inference
1467
+ * executions. </p>
1468
+ */
1469
+ NextToken?: string;
1470
+ /**
1471
+ * <p>Provides an array of information about the individual inference events returned from
1472
+ * the <code>ListInferenceEvents</code> operation, including scheduler used, event start time,
1473
+ * event end time, diagnostics, and so on. </p>
1474
+ */
1475
+ InferenceEventSummaries?: InferenceEventSummary[];
1476
+ }
1477
+ export declare namespace ListInferenceEventsResponse {
1478
+ /**
1479
+ * @internal
1480
+ */
1481
+ const filterSensitiveLog: (obj: ListInferenceEventsResponse) => any;
1482
+ }
1459
1483
  export declare enum InferenceExecutionStatus {
1460
1484
  FAILED = "FAILED",
1461
1485
  IN_PROGRESS = "IN_PROGRESS",
@@ -1476,13 +1500,13 @@ export interface ListInferenceExecutionsRequest {
1476
1500
  */
1477
1501
  InferenceSchedulerName: string | undefined;
1478
1502
  /**
1479
- * <p>The time reference in the inferenced dataset after which Amazon Lookout for Equipment started the
1480
- * inference execution. </p>
1503
+ * <p>The time reference in the inferenced dataset after which Amazon Lookout for Equipment
1504
+ * started the inference execution. </p>
1481
1505
  */
1482
1506
  DataStartTimeAfter?: Date;
1483
1507
  /**
1484
- * <p>The time reference in the inferenced dataset before which Amazon Lookout for Equipment stopped the
1485
- * inference execution. </p>
1508
+ * <p>The time reference in the inferenced dataset before which Amazon Lookout for Equipment
1509
+ * stopped the inference execution. </p>
1486
1510
  */
1487
1511
  DataEndTimeBefore?: Date;
1488
1512
  /**
@@ -1540,7 +1564,8 @@ export interface InferenceExecutionSummary {
1540
1564
  DataInputConfiguration?: InferenceInputConfiguration;
1541
1565
  /**
1542
1566
  * <p> Specifies configuration information for the output results from for the inference
1543
- * execution, including the output Amazon S3 location. </p>
1567
+ * execution, including the output Amazon S3 location.
1568
+ * </p>
1544
1569
  */
1545
1570
  DataOutputConfiguration?: InferenceOutputConfiguration;
1546
1571
  /**
@@ -1635,19 +1660,19 @@ export interface InferenceSchedulerSummary {
1635
1660
  /**
1636
1661
  * <p>A period of time (in minutes) by which inference on the data is delayed after the data
1637
1662
  * starts. For instance, if an offset delay time of five minutes was selected, inference will
1638
- * not begin on the data until the first data measurement after the five minute mark. For example, if
1639
- * five minutes is selected, the inference scheduler will wake up at the configured frequency with the
1640
- * additional five minute delay time to check the customer S3 bucket. The customer can upload data at
1641
- * the same frequency and they don't need to stop and restart the scheduler when uploading new data.
1642
- * </p>
1663
+ * not begin on the data until the first data measurement after the five minute mark. For
1664
+ * example, if five minutes is selected, the inference scheduler will wake up at the
1665
+ * configured frequency with the additional five minute delay time to check the customer S3
1666
+ * bucket. The customer can upload data at the same frequency and they don't need to stop and
1667
+ * restart the scheduler when uploading new data. </p>
1643
1668
  */
1644
1669
  DataDelayOffsetInMinutes?: number;
1645
1670
  /**
1646
1671
  * <p>How often data is uploaded to the source S3 bucket for the input data. This value is the
1647
- * length of time between data uploads. For instance, if you select 5 minutes, Amazon Lookout for Equipment
1648
- * will upload the real-time data to the source bucket once every 5 minutes. This frequency also
1649
- * determines how often Amazon Lookout for Equipment starts a scheduled inference on your data. In this
1650
- * example, it starts once every 5 minutes. </p>
1672
+ * length of time between data uploads. For instance, if you select 5 minutes, Amazon Lookout
1673
+ * for Equipment will upload the real-time data to the source bucket once every 5 minutes.
1674
+ * This frequency also determines how often Amazon Lookout for Equipment starts a scheduled
1675
+ * inference on your data. In this example, it starts once every 5 minutes. </p>
1651
1676
  */
1652
1677
  DataUploadFrequency?: DataUploadFrequency | string;
1653
1678
  }
@@ -1760,27 +1785,22 @@ export declare namespace ListModelsResponse {
1760
1785
  }
1761
1786
  export interface ListSensorStatisticsRequest {
1762
1787
  /**
1763
- * <p>
1764
- * The name of the dataset associated with the list of Sensor Statistics.
1765
- * </p>
1788
+ * <p> The name of the dataset associated with the list of Sensor Statistics. </p>
1766
1789
  */
1767
1790
  DatasetName: string | undefined;
1768
1791
  /**
1769
- * <p>
1770
- * The ingestion job id associated with the list of Sensor Statistics. To get sensor statistics for a particular ingestion job id, both dataset name and ingestion job id must be submitted as inputs.
1771
- * </p>
1792
+ * <p> The ingestion job id associated with the list of Sensor Statistics. To get sensor
1793
+ * statistics for a particular ingestion job id, both dataset name and ingestion job id must
1794
+ * be submitted as inputs. </p>
1772
1795
  */
1773
1796
  IngestionJobId?: string;
1774
1797
  /**
1775
- * <p>
1776
- * Specifies the maximum number of sensors for which to retrieve statistics.
1777
- * </p>
1798
+ * <p> Specifies the maximum number of sensors for which to retrieve statistics. </p>
1778
1799
  */
1779
1800
  MaxResults?: number;
1780
1801
  /**
1781
- * <p>
1782
- * An opaque pagination token indicating where to continue the listing of sensor statistics.
1783
- * </p>
1802
+ * <p> An opaque pagination token indicating where to continue the listing of sensor
1803
+ * statistics. </p>
1784
1804
  */
1785
1805
  NextToken?: string;
1786
1806
  }
@@ -1795,21 +1815,16 @@ export declare enum StatisticalIssueStatus {
1795
1815
  POTENTIAL_ISSUE_DETECTED = "POTENTIAL_ISSUE_DETECTED"
1796
1816
  }
1797
1817
  /**
1798
- * <p>
1799
- * Entity that comprises information on categorical values in data.
1800
- * </p>
1818
+ * <p> Entity that comprises information on categorical values in data. </p>
1801
1819
  */
1802
1820
  export interface CategoricalValues {
1803
1821
  /**
1804
- * <p>
1805
- * Indicates whether there is a potential data issue related to categorical values.
1806
- * </p>
1822
+ * <p> Indicates whether there is a potential data issue related to categorical values.
1823
+ * </p>
1807
1824
  */
1808
1825
  Status: StatisticalIssueStatus | string | undefined;
1809
1826
  /**
1810
- * <p>
1811
- * Indicates the number of categories in the data.
1812
- * </p>
1827
+ * <p> Indicates the number of categories in the data. </p>
1813
1828
  */
1814
1829
  NumberOfCategory?: number;
1815
1830
  }
@@ -1820,27 +1835,15 @@ export declare namespace CategoricalValues {
1820
1835
  const filterSensitiveLog: (obj: CategoricalValues) => any;
1821
1836
  }
1822
1837
  /**
1823
- * <p>
1824
- *
1825
- * Entity that comprises information of count and percentage.
1826
- *
1827
- * </p>
1838
+ * <p> Entity that comprises information of count and percentage. </p>
1828
1839
  */
1829
1840
  export interface CountPercent {
1830
1841
  /**
1831
- * <p>
1832
- *
1833
- * Indicates the count of occurences of the given statistic.
1834
- *
1835
- * </p>
1842
+ * <p> Indicates the count of occurences of the given statistic. </p>
1836
1843
  */
1837
1844
  Count: number | undefined;
1838
1845
  /**
1839
- * <p>
1840
- *
1841
- * Indicates the percentage of occurances of the given statistic.
1842
- *
1843
- * </p>
1846
+ * <p> Indicates the percentage of occurances of the given statistic. </p>
1844
1847
  */
1845
1848
  Percentage: number | undefined;
1846
1849
  }
@@ -1851,27 +1854,21 @@ export declare namespace CountPercent {
1851
1854
  const filterSensitiveLog: (obj: CountPercent) => any;
1852
1855
  }
1853
1856
  /**
1854
- * <p>
1855
- * Entity that comprises information on large gaps between consecutive timestamps in data.
1856
- * </p>
1857
+ * <p> Entity that comprises information on large gaps between consecutive timestamps in data.
1858
+ * </p>
1857
1859
  */
1858
1860
  export interface LargeTimestampGaps {
1859
1861
  /**
1860
- * <p>
1861
- * Indicates whether there is a potential data issue related to large gaps in timestamps.
1862
- * </p>
1862
+ * <p> Indicates whether there is a potential data issue related to large gaps in timestamps.
1863
+ * </p>
1863
1864
  */
1864
1865
  Status: StatisticalIssueStatus | string | undefined;
1865
1866
  /**
1866
- * <p>
1867
- * Indicates the number of large timestamp gaps, if there are any.
1868
- * </p>
1867
+ * <p> Indicates the number of large timestamp gaps, if there are any. </p>
1869
1868
  */
1870
1869
  NumberOfLargeTimestampGaps?: number;
1871
1870
  /**
1872
- * <p>
1873
- * Indicates the size of the largest timestamp gap, in days.
1874
- * </p>
1871
+ * <p> Indicates the size of the largest timestamp gap, in days. </p>
1875
1872
  */
1876
1873
  MaxTimestampGapInDays?: number;
1877
1874
  }
@@ -1887,21 +1884,16 @@ export declare enum Monotonicity {
1887
1884
  STATIC = "STATIC"
1888
1885
  }
1889
1886
  /**
1890
- * <p>
1891
- * Entity that comprises information on monotonic values in the data.
1892
- * </p>
1887
+ * <p> Entity that comprises information on monotonic values in the data. </p>
1893
1888
  */
1894
1889
  export interface MonotonicValues {
1895
1890
  /**
1896
- * <p>
1897
- * Indicates whether there is a potential data issue related to having monotonic values.
1898
- * </p>
1891
+ * <p> Indicates whether there is a potential data issue related to having monotonic values.
1892
+ * </p>
1899
1893
  */
1900
1894
  Status: StatisticalIssueStatus | string | undefined;
1901
1895
  /**
1902
- * <p>
1903
- * Indicates the monotonicity of values. Can be INCREASING, DECREASING, or STATIC.
1904
- * </p>
1896
+ * <p> Indicates the monotonicity of values. Can be INCREASING, DECREASING, or STATIC. </p>
1905
1897
  */
1906
1898
  Monotonicity?: Monotonicity | string;
1907
1899
  }
@@ -1912,15 +1904,12 @@ export declare namespace MonotonicValues {
1912
1904
  const filterSensitiveLog: (obj: MonotonicValues) => any;
1913
1905
  }
1914
1906
  /**
1915
- * <p>
1916
- * Entity that comprises information on operating modes in data.
1917
- * </p>
1907
+ * <p> Entity that comprises information on operating modes in data. </p>
1918
1908
  */
1919
1909
  export interface MultipleOperatingModes {
1920
1910
  /**
1921
- * <p>
1922
- * Indicates whether there is a potential data issue related to having multiple operating modes.
1923
- * </p>
1911
+ * <p> Indicates whether there is a potential data issue related to having multiple operating
1912
+ * modes. </p>
1924
1913
  */
1925
1914
  Status: StatisticalIssueStatus | string | undefined;
1926
1915
  }
@@ -1931,101 +1920,72 @@ export declare namespace MultipleOperatingModes {
1931
1920
  const filterSensitiveLog: (obj: MultipleOperatingModes) => any;
1932
1921
  }
1933
1922
  /**
1934
- * <p>
1935
- *
1936
- * Summary of ingestion statistics like whether data exists, number of missing values, number of invalid values and so on related to the particular sensor.
1937
- *
1938
- * </p>
1923
+ * <p> Summary of ingestion statistics like whether data exists, number of missing values,
1924
+ * number of invalid values and so on related to the particular sensor. </p>
1939
1925
  */
1940
1926
  export interface SensorStatisticsSummary {
1941
1927
  /**
1942
- * <p>
1943
- *
1944
- * Name of the component to which the particular sensor belongs for which the statistics belong to.
1945
- *
1946
- * </p>
1928
+ * <p> Name of the component to which the particular sensor belongs for which the statistics
1929
+ * belong to. </p>
1947
1930
  */
1948
1931
  ComponentName?: string;
1949
1932
  /**
1950
- * <p>
1951
- *
1952
- * Name of the sensor that the statistics belong to.
1953
- *
1954
- * </p>
1933
+ * <p> Name of the sensor that the statistics belong to. </p>
1955
1934
  */
1956
1935
  SensorName?: string;
1957
1936
  /**
1958
- * <p>
1959
- *
1960
- * Parameter that indicates whether data exists for the sensor that the statistics belong to.
1961
- *
1962
- * </p>
1937
+ * <p> Parameter that indicates whether data exists for the sensor that the statistics belong
1938
+ * to. </p>
1963
1939
  */
1964
1940
  DataExists?: boolean;
1965
1941
  /**
1966
- * <p>
1967
- *
1968
- * Parameter that describes the total number of, and percentage of, values that are missing for the sensor that the statistics belong to.
1969
- *
1970
- * </p>
1942
+ * <p> Parameter that describes the total number of, and percentage of, values that are
1943
+ * missing for the sensor that the statistics belong to. </p>
1971
1944
  */
1972
1945
  MissingValues?: CountPercent;
1973
1946
  /**
1974
- * <p>
1975
- *
1976
- * Parameter that describes the total number of, and percentage of, values that are invalid for the sensor that the statistics belong to.
1977
- *
1978
- * </p>
1947
+ * <p> Parameter that describes the total number of, and percentage of, values that are
1948
+ * invalid for the sensor that the statistics belong to. </p>
1979
1949
  */
1980
1950
  InvalidValues?: CountPercent;
1981
1951
  /**
1982
- * <p>
1983
- *
1984
- * Parameter that describes the total number of invalid date entries associated with the sensor that the statistics belong to.
1985
- *
1986
- * </p>
1952
+ * <p> Parameter that describes the total number of invalid date entries associated with the
1953
+ * sensor that the statistics belong to. </p>
1987
1954
  */
1988
1955
  InvalidDateEntries?: CountPercent;
1989
1956
  /**
1990
- * <p>
1991
- * Parameter that describes the total number of duplicate timestamp records associated with the sensor that the statistics belong to.
1992
- * </p>
1957
+ * <p> Parameter that describes the total number of duplicate timestamp records associated
1958
+ * with the sensor that the statistics belong to. </p>
1993
1959
  */
1994
1960
  DuplicateTimestamps?: CountPercent;
1995
1961
  /**
1996
- * <p>
1997
- * Parameter that describes potential risk about whether data associated with the sensor is categorical.
1998
- * </p>
1962
+ * <p> Parameter that describes potential risk about whether data associated with the sensor
1963
+ * is categorical. </p>
1999
1964
  */
2000
1965
  CategoricalValues?: CategoricalValues;
2001
1966
  /**
2002
- * <p>
2003
- * Parameter that describes potential risk about whether data associated with the sensor has more than one operating mode.
2004
- * </p>
1967
+ * <p> Parameter that describes potential risk about whether data associated with the sensor
1968
+ * has more than one operating mode. </p>
2005
1969
  */
2006
1970
  MultipleOperatingModes?: MultipleOperatingModes;
2007
1971
  /**
2008
- * <p>
2009
- * Parameter that describes potential risk about whether data associated with the sensor contains one or more large gaps between consecutive timestamps.
2010
- * </p>
1972
+ * <p> Parameter that describes potential risk about whether data associated with the sensor
1973
+ * contains one or more large gaps between consecutive timestamps. </p>
2011
1974
  */
2012
1975
  LargeTimestampGaps?: LargeTimestampGaps;
2013
1976
  /**
2014
- * <p>
2015
- * Parameter that describes potential risk about whether data associated with the sensor is mostly monotonic.
2016
- * </p>
1977
+ * <p> Parameter that describes potential risk about whether data associated with the sensor
1978
+ * is mostly monotonic. </p>
2017
1979
  */
2018
1980
  MonotonicValues?: MonotonicValues;
2019
1981
  /**
2020
- * <p>
2021
- * Indicates the time reference to indicate the beginning of valid data associated with the sensor that the statistics belong to.
2022
- * </p>
1982
+ * <p> Indicates the time reference to indicate the beginning of valid data associated with
1983
+ * the sensor that the statistics belong to. </p>
2023
1984
  */
2024
1985
  DataStartTime?: Date;
2025
1986
  /**
2026
- * <p>
2027
- * Indicates the time reference to indicate the end of valid data associated with the sensor that the statistics belong to.
2028
- * </p>
1987
+ * <p> Indicates the time reference to indicate the end of valid data associated with the
1988
+ * sensor that the statistics belong to. </p>
2029
1989
  */
2030
1990
  DataEndTime?: Date;
2031
1991
  }
@@ -2037,15 +1997,14 @@ export declare namespace SensorStatisticsSummary {
2037
1997
  }
2038
1998
  export interface ListSensorStatisticsResponse {
2039
1999
  /**
2040
- * <p>
2041
- * Provides ingestion-based statistics regarding the specified sensor with respect to various validation types, such as whether data exists, the number and percentage of missing values, and the number and percentage of duplicate timestamps.
2042
- * </p>
2000
+ * <p> Provides ingestion-based statistics regarding the specified sensor with respect to
2001
+ * various validation types, such as whether data exists, the number and percentage of missing
2002
+ * values, and the number and percentage of duplicate timestamps. </p>
2043
2003
  */
2044
2004
  SensorStatisticsSummaries?: SensorStatisticsSummary[];
2045
2005
  /**
2046
- * <p>
2047
- * An opaque pagination token indicating where to continue the listing of sensor statistics.
2048
- * </p>
2006
+ * <p> An opaque pagination token indicating where to continue the listing of sensor
2007
+ * statistics. </p>
2049
2008
  */
2050
2009
  NextToken?: string;
2051
2010
  }
@@ -2264,18 +2223,19 @@ export interface UpdateInferenceSchedulerRequest {
2264
2223
  /**
2265
2224
  * <p> A period of time (in minutes) by which inference on the data is delayed after the data
2266
2225
  * starts. For instance, if you select an offset delay time of five minutes, inference will
2267
- * not begin on the data until the first data measurement after the five minute mark. For example, if
2268
- * five minutes is selected, the inference scheduler will wake up at the configured frequency with the
2269
- * additional five minute delay time to check the customer S3 bucket. The customer can upload data at
2270
- * the same frequency and they don't need to stop and restart the scheduler when uploading new data.</p>
2226
+ * not begin on the data until the first data measurement after the five minute mark. For
2227
+ * example, if five minutes is selected, the inference scheduler will wake up at the
2228
+ * configured frequency with the additional five minute delay time to check the customer S3
2229
+ * bucket. The customer can upload data at the same frequency and they don't need to stop and
2230
+ * restart the scheduler when uploading new data.</p>
2271
2231
  */
2272
2232
  DataDelayOffsetInMinutes?: number;
2273
2233
  /**
2274
2234
  * <p>How often data is uploaded to the source S3 bucket for the input data. The value chosen
2275
2235
  * is the length of time between data uploads. For instance, if you select 5 minutes, Amazon
2276
- * Lookout for Equipment will upload the real-time data to the source bucket once every 5 minutes. This frequency
2277
- * also determines how often Amazon Lookout for Equipment starts a scheduled inference on your data. In this
2278
- * example, it starts once every 5 minutes. </p>
2236
+ * Lookout for Equipment will upload the real-time data to the source bucket once every 5
2237
+ * minutes. This frequency also determines how often Amazon Lookout for Equipment starts a
2238
+ * scheduled inference on your data. In this example, it starts once every 5 minutes. </p>
2279
2239
  */
2280
2240
  DataUploadFrequency?: DataUploadFrequency | string;
2281
2241
  /**
@@ -2284,7 +2244,8 @@ export interface UpdateInferenceSchedulerRequest {
2284
2244
  */
2285
2245
  DataInputConfiguration?: InferenceInputConfiguration;
2286
2246
  /**
2287
- * <p> Specifies information for the output results from the inference scheduler, including the output S3 location. </p>
2247
+ * <p> Specifies information for the output results from the inference scheduler, including
2248
+ * the output S3 location. </p>
2288
2249
  */
2289
2250
  DataOutputConfiguration?: InferenceOutputConfiguration;
2290
2251
  /**