aws-sdk-supplychain 1.14.0 → 1.16.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -36,8 +36,6 @@ require 'aws-sdk-core/plugins/telemetry.rb'
36
36
  require 'aws-sdk-core/plugins/sign.rb'
37
37
  require 'aws-sdk-core/plugins/protocols/rest_json.rb'
38
38
 
39
- Aws::Plugins::GlobalConfiguration.add_identifier(:supplychain)
40
-
41
39
  module Aws::SupplyChain
42
40
  # An API client for SupplyChain. To construct a client, you need to configure a `:region` and `:credentials`.
43
41
  #
@@ -465,7 +463,15 @@ module Aws::SupplyChain
465
463
  # permissions for AWS Supply Chain to read the file.
466
464
  #
467
465
  # @option params [String] :client_token
468
- # An idempotency token.
466
+ # An idempotency token ensures the API request is only completed no more
467
+ # than once. This way, retrying the request will not trigger the
468
+ # operation multiple times. A client token is a unique, case-sensitive
469
+ # string of 33 to 128 ASCII characters. To make an idempotent API
470
+ # request, specify a client token in the request. You should not reuse
471
+ # the same client token for other requests. If you retry a successful
472
+ # request with the same client token, the request will succeed with no
473
+ # further actions being taken, and you will receive the same API
474
+ # response as the original successful request.
469
475
  #
470
476
  # **A suitable default value is auto-generated.** You should normally
471
477
  # not need to pass this option.**
@@ -509,6 +515,659 @@ module Aws::SupplyChain
509
515
  req.send_request(options)
510
516
  end
511
517
 
518
+ # Create DataIntegrationFlow to map one or more different sources to one
519
+ # target using the SQL transformation query.
520
+ #
521
+ # @option params [required, String] :instance_id
522
+ # The Amazon Web Services Supply Chain instance identifier.
523
+ #
524
+ # @option params [required, String] :name
525
+ # Name of the DataIntegrationFlow.
526
+ #
527
+ # @option params [required, Array<Types::DataIntegrationFlowSource>] :sources
528
+ # The source configurations for DataIntegrationFlow.
529
+ #
530
+ # @option params [required, Types::DataIntegrationFlowTransformation] :transformation
531
+ # The transformation configurations for DataIntegrationFlow.
532
+ #
533
+ # @option params [required, Types::DataIntegrationFlowTarget] :target
534
+ # The target configurations for DataIntegrationFlow.
535
+ #
536
+ # @option params [Hash<String,String>] :tags
537
+ # The tags of the DataIntegrationFlow to be created
538
+ #
539
+ # @return [Types::CreateDataIntegrationFlowResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
540
+ #
541
+ # * {Types::CreateDataIntegrationFlowResponse#instance_id #instance_id} => String
542
+ # * {Types::CreateDataIntegrationFlowResponse#name #name} => String
543
+ #
544
+ #
545
+ # @example Example: Successful CreateDataIntegrationFlow for s3 to dataset flow
546
+ #
547
+ # resp = client.create_data_integration_flow({
548
+ # name: "testStagingFlow",
549
+ # instance_id: "8850c54e-e187-4fa7-89d4-6370f165174d",
550
+ # sources: [
551
+ # {
552
+ # s3_source: {
553
+ # bucket_name: "aws-supply-chain-data-b8c7bb28-a576-4334-b481-6d6e8e47371f",
554
+ # prefix: "example-prefix",
555
+ # },
556
+ # source_name: "testSourceName",
557
+ # source_type: "S3",
558
+ # },
559
+ # ],
560
+ # tags: {
561
+ # "tagKey1" => "tagValue1",
562
+ # },
563
+ # target: {
564
+ # dataset_target: {
565
+ # dataset_identifier: "arn:aws:scn:us-east-1:123456789012:instance/8850c54e-e187-4fa7-89d4-6370f165174d/namespaces/default/datasets/my_staging_dataset",
566
+ # },
567
+ # target_type: "DATASET",
568
+ # },
569
+ # transformation: {
570
+ # sql_transformation: {
571
+ # query: "SELECT * FROM testSourceName",
572
+ # },
573
+ # transformation_type: "SQL",
574
+ # },
575
+ # })
576
+ #
577
+ # resp.to_h outputs the following:
578
+ # {
579
+ # name: "testStagingFlow",
580
+ # instance_id: "8850c54e-e187-4fa7-89d4-6370f165174d",
581
+ # }
582
+ #
583
+ # @example Example: Successful CreateDataIntegrationFlow for dataset to dataset flow
584
+ #
585
+ # resp = client.create_data_integration_flow({
586
+ # name: "trading-partner",
587
+ # instance_id: "8850c54e-e187-4fa7-89d4-6370f165174d",
588
+ # sources: [
589
+ # {
590
+ # dataset_source: {
591
+ # dataset_identifier: "arn:aws:scn:us-east-1:123456789012:instance/8850c54e-e187-4fa7-89d4-6370f165174d/namespaces/default/datasets/my_staging_dataset1",
592
+ # },
593
+ # source_name: "testSourceName1",
594
+ # source_type: "DATASET",
595
+ # },
596
+ # {
597
+ # dataset_source: {
598
+ # dataset_identifier: "arn:aws:scn:us-east-1:123456789012:instance/8850c54e-e187-4fa7-89d4-6370f165174d/namespaces/default/datasets/my_staging_dataset2",
599
+ # },
600
+ # source_name: "testSourceName2",
601
+ # source_type: "DATASET",
602
+ # },
603
+ # ],
604
+ # tags: {
605
+ # "tagKey1" => "tagValue1",
606
+ # },
607
+ # target: {
608
+ # dataset_target: {
609
+ # dataset_identifier: "arn:aws:scn:us-east-1:123456789012:instance/8850c54e-e187-4fa7-89d4-6370f165174d/namespaces/asc/datasets/trading_partner",
610
+ # },
611
+ # target_type: "DATASET",
612
+ # },
613
+ # transformation: {
614
+ # sql_transformation: {
615
+ # query: "SELECT S1.id AS id, S1.poc_org_unit_description AS description, S1.company_id AS company_id, S1.tpartner_type AS tpartner_type, S1.geo_id AS geo_id, S1.eff_start_date AS eff_start_date, S1.eff_end_date AS eff_end_date FROM testSourceName1 AS S1 LEFT JOIN testSourceName2 as S2 ON S1.id=S2.id",
616
+ # },
617
+ # transformation_type: "SQL",
618
+ # },
619
+ # })
620
+ #
621
+ # resp.to_h outputs the following:
622
+ # {
623
+ # name: "trading-partner",
624
+ # instance_id: "8850c54e-e187-4fa7-89d4-6370f165174d",
625
+ # }
626
+ #
627
+ # @example Request syntax with placeholder values
628
+ #
629
+ # resp = client.create_data_integration_flow({
630
+ # instance_id: "UUID", # required
631
+ # name: "DataIntegrationFlowName", # required
632
+ # sources: [ # required
633
+ # {
634
+ # source_type: "S3", # required, accepts S3, DATASET
635
+ # source_name: "DataIntegrationFlowSourceName", # required
636
+ # s3_source: {
637
+ # bucket_name: "S3BucketName", # required
638
+ # prefix: "DataIntegrationFlowS3Prefix", # required
639
+ # options: {
640
+ # file_type: "CSV", # accepts CSV, PARQUET, JSON
641
+ # },
642
+ # },
643
+ # dataset_source: {
644
+ # dataset_identifier: "DatasetIdentifier", # required
645
+ # options: {
646
+ # load_type: "INCREMENTAL", # accepts INCREMENTAL, REPLACE
647
+ # dedupe_records: false,
648
+ # },
649
+ # },
650
+ # },
651
+ # ],
652
+ # transformation: { # required
653
+ # transformation_type: "SQL", # required, accepts SQL, NONE
654
+ # sql_transformation: {
655
+ # query: "DataIntegrationFlowSQLQuery", # required
656
+ # },
657
+ # },
658
+ # target: { # required
659
+ # target_type: "S3", # required, accepts S3, DATASET
660
+ # s3_target: {
661
+ # bucket_name: "S3BucketName", # required
662
+ # prefix: "DataIntegrationFlowS3Prefix", # required
663
+ # options: {
664
+ # file_type: "CSV", # accepts CSV, PARQUET, JSON
665
+ # },
666
+ # },
667
+ # dataset_target: {
668
+ # dataset_identifier: "DatasetIdentifier", # required
669
+ # options: {
670
+ # load_type: "INCREMENTAL", # accepts INCREMENTAL, REPLACE
671
+ # dedupe_records: false,
672
+ # },
673
+ # },
674
+ # },
675
+ # tags: {
676
+ # "TagKey" => "TagValue",
677
+ # },
678
+ # })
679
+ #
680
+ # @example Response structure
681
+ #
682
+ # resp.instance_id #=> String
683
+ # resp.name #=> String
684
+ #
685
+ # @see http://docs.aws.amazon.com/goto/WebAPI/supplychain-2024-01-01/CreateDataIntegrationFlow AWS API Documentation
686
+ #
687
+ # @overload create_data_integration_flow(params = {})
688
+ # @param [Hash] params ({})
689
+ def create_data_integration_flow(params = {}, options = {})
690
+ req = build_request(:create_data_integration_flow, params)
691
+ req.send_request(options)
692
+ end
693
+
694
+ # Create a data lake dataset.
695
+ #
696
+ # @option params [required, String] :instance_id
697
+ # The Amazon Web Services Supply Chain instance identifier.
698
+ #
699
+ # @option params [required, String] :namespace
700
+ # The name space of the dataset.
701
+ #
702
+ # * **asc** - For information on the Amazon Web Services Supply Chain
703
+ # supported datasets see
704
+ # [https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html][1].
705
+ #
706
+ # * **default** - For datasets with custom user-defined schemas.
707
+ #
708
+ #
709
+ #
710
+ # [1]: https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html
711
+ #
712
+ # @option params [required, String] :name
713
+ # The name of the dataset. For **asc** name space, the name must be one
714
+ # of the supported data entities under
715
+ # [https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html][1].
716
+ #
717
+ #
718
+ #
719
+ # [1]: https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html
720
+ #
721
+ # @option params [Types::DataLakeDatasetSchema] :schema
722
+ # The custom schema of the data lake dataset and is only required when
723
+ # the name space is *default*.
724
+ #
725
+ # @option params [String] :description
726
+ # The description of the dataset.
727
+ #
728
+ # @option params [Hash<String,String>] :tags
729
+ # The tags of the dataset.
730
+ #
731
+ # @return [Types::CreateDataLakeDatasetResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
732
+ #
733
+ # * {Types::CreateDataLakeDatasetResponse#dataset #dataset} => Types::DataLakeDataset
734
+ #
735
+ #
736
+ # @example Example: Create an AWS Supply Chain inbound order dataset
737
+ #
738
+ # resp = client.create_data_lake_dataset({
739
+ # name: "inbound_order",
740
+ # description: "This is an AWS Supply Chain inbound order dataset",
741
+ # instance_id: "1877dd20-dee9-4639-8e99-cb67acf21fe5",
742
+ # namespace: "asc",
743
+ # tags: {
744
+ # "tagKey1" => "tagValue1",
745
+ # "tagKey2" => "tagValue2",
746
+ # },
747
+ # })
748
+ #
749
+ # resp.to_h outputs the following:
750
+ # {
751
+ # dataset: {
752
+ # name: "inbound_order",
753
+ # arn: "arn:aws:scn:us-east-1:012345678910:instance/1877dd20-dee9-4639-8e99-cb67acf21fe5/namespaces/asc/datasets/inbound_order",
754
+ # created_time: Time.parse(1727116807.751),
755
+ # description: "This is an AWS Supply Chain inbound order dataset",
756
+ # instance_id: "1877dd20-dee9-4639-8e99-cb67acf21fe5",
757
+ # last_modified_time: Time.parse(1727116807.751),
758
+ # namespace: "asc",
759
+ # schema: {
760
+ # name: "InboundOrder",
761
+ # fields: [
762
+ # {
763
+ # name: "id",
764
+ # type: "STRING",
765
+ # is_required: true,
766
+ # },
767
+ # {
768
+ # name: "tpartner_id",
769
+ # type: "STRING",
770
+ # is_required: true,
771
+ # },
772
+ # {
773
+ # name: "connection_id",
774
+ # type: "STRING",
775
+ # is_required: true,
776
+ # },
777
+ # {
778
+ # name: "order_type",
779
+ # type: "STRING",
780
+ # is_required: false,
781
+ # },
782
+ # {
783
+ # name: "order_status",
784
+ # type: "STRING",
785
+ # is_required: false,
786
+ # },
787
+ # {
788
+ # name: "inbound_order_url",
789
+ # type: "STRING",
790
+ # is_required: false,
791
+ # },
792
+ # {
793
+ # name: "order_creation_date",
794
+ # type: "TIMESTAMP",
795
+ # is_required: false,
796
+ # },
797
+ # {
798
+ # name: "company_id",
799
+ # type: "STRING",
800
+ # is_required: false,
801
+ # },
802
+ # {
803
+ # name: "to_site_id",
804
+ # type: "STRING",
805
+ # is_required: false,
806
+ # },
807
+ # {
808
+ # name: "order_currency_uom",
809
+ # type: "STRING",
810
+ # is_required: false,
811
+ # },
812
+ # {
813
+ # name: "vendor_currency_uom",
814
+ # type: "STRING",
815
+ # is_required: false,
816
+ # },
817
+ # {
818
+ # name: "exchange_rate",
819
+ # type: "DOUBLE",
820
+ # is_required: false,
821
+ # },
822
+ # {
823
+ # name: "exchange_rate_date",
824
+ # type: "TIMESTAMP",
825
+ # is_required: false,
826
+ # },
827
+ # {
828
+ # name: "incoterm",
829
+ # type: "STRING",
830
+ # is_required: false,
831
+ # },
832
+ # {
833
+ # name: "incoterm2",
834
+ # type: "STRING",
835
+ # is_required: false,
836
+ # },
837
+ # {
838
+ # name: "incoterm_location_1",
839
+ # type: "STRING",
840
+ # is_required: false,
841
+ # },
842
+ # {
843
+ # name: "incoterm_location_2",
844
+ # type: "STRING",
845
+ # is_required: false,
846
+ # },
847
+ # {
848
+ # name: "submitted_date",
849
+ # type: "TIMESTAMP",
850
+ # is_required: false,
851
+ # },
852
+ # {
853
+ # name: "agreement_start_date",
854
+ # type: "TIMESTAMP",
855
+ # is_required: false,
856
+ # },
857
+ # {
858
+ # name: "agreement_end_date",
859
+ # type: "TIMESTAMP",
860
+ # is_required: false,
861
+ # },
862
+ # {
863
+ # name: "shipping_instr_code",
864
+ # type: "STRING",
865
+ # is_required: false,
866
+ # },
867
+ # {
868
+ # name: "payment_terms_code",
869
+ # type: "STRING",
870
+ # is_required: false,
871
+ # },
872
+ # {
873
+ # name: "std_terms_agreement",
874
+ # type: "STRING",
875
+ # is_required: false,
876
+ # },
877
+ # {
878
+ # name: "std_terms_agreement_ver",
879
+ # type: "STRING",
880
+ # is_required: false,
881
+ # },
882
+ # {
883
+ # name: "agreement_number",
884
+ # type: "STRING",
885
+ # is_required: false,
886
+ # },
887
+ # {
888
+ # name: "source",
889
+ # type: "STRING",
890
+ # is_required: false,
891
+ # },
892
+ # {
893
+ # name: "source_update_dttm",
894
+ # type: "TIMESTAMP",
895
+ # is_required: false,
896
+ # },
897
+ # {
898
+ # name: "source_event_id",
899
+ # type: "STRING",
900
+ # is_required: false,
901
+ # },
902
+ # {
903
+ # name: "db_creation_dttm",
904
+ # type: "TIMESTAMP",
905
+ # is_required: false,
906
+ # },
907
+ # {
908
+ # name: "db_updation_dttm",
909
+ # type: "TIMESTAMP",
910
+ # is_required: false,
911
+ # },
912
+ # ],
913
+ # },
914
+ # },
915
+ # }
916
+ #
917
+ # @example Example: Create a custom dataset
918
+ #
919
+ # resp = client.create_data_lake_dataset({
920
+ # name: "my_dataset",
921
+ # description: "This is a custom dataset",
922
+ # instance_id: "1877dd20-dee9-4639-8e99-cb67acf21fe5",
923
+ # namespace: "default",
924
+ # schema: {
925
+ # name: "MyDataset",
926
+ # fields: [
927
+ # {
928
+ # name: "id",
929
+ # type: "INT",
930
+ # is_required: true,
931
+ # },
932
+ # {
933
+ # name: "description",
934
+ # type: "STRING",
935
+ # is_required: true,
936
+ # },
937
+ # {
938
+ # name: "price",
939
+ # type: "DOUBLE",
940
+ # is_required: false,
941
+ # },
942
+ # {
943
+ # name: "creation_time",
944
+ # type: "TIMESTAMP",
945
+ # is_required: false,
946
+ # },
947
+ # ],
948
+ # },
949
+ # tags: {
950
+ # "tagKey1" => "tagValue1",
951
+ # "tagKey2" => "tagValue2",
952
+ # },
953
+ # })
954
+ #
955
+ # resp.to_h outputs the following:
956
+ # {
957
+ # dataset: {
958
+ # name: "my_dataset",
959
+ # arn: "arn:aws:scn:us-east-1:012345678910:instance/1877dd20-dee9-4639-8e99-cb67acf21fe5/namespaces/default/datasets/my_dataset",
960
+ # created_time: Time.parse(1727116807.751),
961
+ # description: "This is a custom dataset",
962
+ # instance_id: "1877dd20-dee9-4639-8e99-cb67acf21fe5",
963
+ # last_modified_time: Time.parse(1727116807.751),
964
+ # namespace: "default",
965
+ # schema: {
966
+ # name: "MyDataset",
967
+ # fields: [
968
+ # {
969
+ # name: "id",
970
+ # type: "INT",
971
+ # is_required: true,
972
+ # },
973
+ # {
974
+ # name: "description",
975
+ # type: "STRING",
976
+ # is_required: true,
977
+ # },
978
+ # {
979
+ # name: "price",
980
+ # type: "DOUBLE",
981
+ # is_required: false,
982
+ # },
983
+ # {
984
+ # name: "creation_time",
985
+ # type: "TIMESTAMP",
986
+ # is_required: false,
987
+ # },
988
+ # ],
989
+ # },
990
+ # },
991
+ # }
992
+ #
993
+ # @example Request syntax with placeholder values
994
+ #
995
+ # resp = client.create_data_lake_dataset({
996
+ # instance_id: "UUID", # required
997
+ # namespace: "DataLakeDatasetNamespace", # required
998
+ # name: "DataLakeDatasetName", # required
999
+ # schema: {
1000
+ # name: "DataLakeDatasetSchemaName", # required
1001
+ # fields: [ # required
1002
+ # {
1003
+ # name: "DataLakeDatasetSchemaFieldName", # required
1004
+ # type: "INT", # required, accepts INT, DOUBLE, STRING, TIMESTAMP
1005
+ # is_required: false, # required
1006
+ # },
1007
+ # ],
1008
+ # },
1009
+ # description: "DataLakeDatasetDescription",
1010
+ # tags: {
1011
+ # "TagKey" => "TagValue",
1012
+ # },
1013
+ # })
1014
+ #
1015
+ # @example Response structure
1016
+ #
1017
+ # resp.dataset.instance_id #=> String
1018
+ # resp.dataset.namespace #=> String
1019
+ # resp.dataset.name #=> String
1020
+ # resp.dataset.arn #=> String
1021
+ # resp.dataset.schema.name #=> String
1022
+ # resp.dataset.schema.fields #=> Array
1023
+ # resp.dataset.schema.fields[0].name #=> String
1024
+ # resp.dataset.schema.fields[0].type #=> String, one of "INT", "DOUBLE", "STRING", "TIMESTAMP"
1025
+ # resp.dataset.schema.fields[0].is_required #=> Boolean
1026
+ # resp.dataset.description #=> String
1027
+ # resp.dataset.created_time #=> Time
1028
+ # resp.dataset.last_modified_time #=> Time
1029
+ #
1030
+ # @see http://docs.aws.amazon.com/goto/WebAPI/supplychain-2024-01-01/CreateDataLakeDataset AWS API Documentation
1031
+ #
1032
+ # @overload create_data_lake_dataset(params = {})
1033
+ # @param [Hash] params ({})
1034
+ def create_data_lake_dataset(params = {}, options = {})
1035
+ req = build_request(:create_data_lake_dataset, params)
1036
+ req.send_request(options)
1037
+ end
1038
+
1039
+ # Delete the DataIntegrationFlow.
1040
+ #
1041
+ # @option params [required, String] :instance_id
1042
+ # The Amazon Web Services Supply Chain instance identifier.
1043
+ #
1044
+ # @option params [required, String] :name
1045
+ # The name of the DataIntegrationFlow to be deleted.
1046
+ #
1047
+ # @return [Types::DeleteDataIntegrationFlowResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
1048
+ #
1049
+ # * {Types::DeleteDataIntegrationFlowResponse#instance_id #instance_id} => String
1050
+ # * {Types::DeleteDataIntegrationFlowResponse#name #name} => String
1051
+ #
1052
+ #
1053
+ # @example Example: Successful DeleteDataIntegrationFlow
1054
+ #
1055
+ # resp = client.delete_data_integration_flow({
1056
+ # name: "testStagingFlow",
1057
+ # instance_id: "8850c54e-e187-4fa7-89d4-6370f165174d",
1058
+ # })
1059
+ #
1060
+ # resp.to_h outputs the following:
1061
+ # {
1062
+ # name: "testStagingFlow",
1063
+ # instance_id: "8850c54e-e187-4fa7-89d4-6370f165174d",
1064
+ # }
1065
+ #
1066
+ # @example Request syntax with placeholder values
1067
+ #
1068
+ # resp = client.delete_data_integration_flow({
1069
+ # instance_id: "UUID", # required
1070
+ # name: "DataIntegrationFlowName", # required
1071
+ # })
1072
+ #
1073
+ # @example Response structure
1074
+ #
1075
+ # resp.instance_id #=> String
1076
+ # resp.name #=> String
1077
+ #
1078
+ # @see http://docs.aws.amazon.com/goto/WebAPI/supplychain-2024-01-01/DeleteDataIntegrationFlow AWS API Documentation
1079
+ #
1080
+ # @overload delete_data_integration_flow(params = {})
1081
+ # @param [Hash] params ({})
1082
+ def delete_data_integration_flow(params = {}, options = {})
1083
+ req = build_request(:delete_data_integration_flow, params)
1084
+ req.send_request(options)
1085
+ end
1086
+
1087
+ # Delete a data lake dataset.
1088
+ #
1089
+ # @option params [required, String] :instance_id
1090
+ # The AWS Supply Chain instance identifier.
1091
+ #
1092
+ # @option params [required, String] :namespace
1093
+ # The namespace of the dataset. The available values are:
1094
+ #
1095
+ # * asc: for [ AWS Supply Chain supported datasets ][1].
1096
+ #
1097
+ # * default: for datasets with custom user-defined schemas.
1098
+ #
1099
+ #
1100
+ #
1101
+ # [1]: https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html
1102
+ #
1103
+ # @option params [required, String] :name
1104
+ # The name of the dataset. If the namespace is *asc*, the name must be
1105
+ # one of the supported [data entities ][1].
1106
+ #
1107
+ #
1108
+ #
1109
+ # [1]: https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html
1110
+ #
1111
+ # @return [Types::DeleteDataLakeDatasetResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
1112
+ #
1113
+ # * {Types::DeleteDataLakeDatasetResponse#instance_id #instance_id} => String
1114
+ # * {Types::DeleteDataLakeDatasetResponse#namespace #namespace} => String
1115
+ # * {Types::DeleteDataLakeDatasetResponse#name #name} => String
1116
+ #
1117
+ #
1118
+ # @example Example: Delete an AWS Supply Chain inbound_order dataset
1119
+ #
1120
+ # resp = client.delete_data_lake_dataset({
1121
+ # name: "inbound_order",
1122
+ # instance_id: "1877dd20-dee9-4639-8e99-cb67acf21fe5",
1123
+ # namespace: "asc",
1124
+ # })
1125
+ #
1126
+ # resp.to_h outputs the following:
1127
+ # {
1128
+ # name: "inbound_order",
1129
+ # instance_id: "1877dd20-dee9-4639-8e99-cb67acf21fe5",
1130
+ # namespace: "asc",
1131
+ # }
1132
+ #
1133
+ # @example Example: Delete a custom dataset
1134
+ #
1135
+ # resp = client.delete_data_lake_dataset({
1136
+ # name: "my_dataset",
1137
+ # instance_id: "1877dd20-dee9-4639-8e99-cb67acf21fe5",
1138
+ # namespace: "default",
1139
+ # })
1140
+ #
1141
+ # resp.to_h outputs the following:
1142
+ # {
1143
+ # name: "my_dataset",
1144
+ # instance_id: "1877dd20-dee9-4639-8e99-cb67acf21fe5",
1145
+ # namespace: "default",
1146
+ # }
1147
+ #
1148
+ # @example Request syntax with placeholder values
1149
+ #
1150
+ # resp = client.delete_data_lake_dataset({
1151
+ # instance_id: "UUID", # required
1152
+ # namespace: "DataLakeDatasetNamespace", # required
1153
+ # name: "DataLakeDatasetName", # required
1154
+ # })
1155
+ #
1156
+ # @example Response structure
1157
+ #
1158
+ # resp.instance_id #=> String
1159
+ # resp.namespace #=> String
1160
+ # resp.name #=> String
1161
+ #
1162
+ # @see http://docs.aws.amazon.com/goto/WebAPI/supplychain-2024-01-01/DeleteDataLakeDataset AWS API Documentation
1163
+ #
1164
+ # @overload delete_data_lake_dataset(params = {})
1165
+ # @param [Hash] params ({})
1166
+ def delete_data_lake_dataset(params = {}, options = {})
1167
+ req = build_request(:delete_data_lake_dataset, params)
1168
+ req.send_request(options)
1169
+ end
1170
+
512
1171
  # Get status and details of a BillOfMaterialsImportJob.
513
1172
  #
514
1173
  # @option params [required, String] :instance_id
@@ -581,6 +1240,884 @@ module Aws::SupplyChain
581
1240
  req.send_request(options)
582
1241
  end
583
1242
 
1243
+ # View the DataIntegrationFlow details.
1244
+ #
1245
+ # @option params [required, String] :instance_id
1246
+ # The Amazon Web Services Supply Chain instance identifier.
1247
+ #
1248
+ # @option params [required, String] :name
1249
+ # The name of the DataIntegrationFlow created.
1250
+ #
1251
+ # @return [Types::GetDataIntegrationFlowResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
1252
+ #
1253
+ # * {Types::GetDataIntegrationFlowResponse#flow #flow} => Types::DataIntegrationFlow
1254
+ #
1255
+ #
1256
+ # @example Example: Successful GetDataIntegrationFlow
1257
+ #
1258
+ # resp = client.get_data_integration_flow({
1259
+ # name: "testStagingFlow",
1260
+ # instance_id: "8850c54e-e187-4fa7-89d4-6370f165174d",
1261
+ # })
1262
+ #
1263
+ # resp.to_h outputs the following:
1264
+ # {
1265
+ # flow: {
1266
+ # name: "testStagingFlow",
1267
+ # created_time: Time.parse(1724956400.44),
1268
+ # instance_id: "8850c54e-e187-4fa7-89d4-6370f165174d",
1269
+ # last_modified_time: Time.parse(1724956400.44),
1270
+ # sources: [
1271
+ # {
1272
+ # s3_source: {
1273
+ # bucket_name: "aws-supply-chain-data-b8c7bb28-a576-4334-b481-6d6e8e47371f",
1274
+ # prefix: "example-prefix",
1275
+ # },
1276
+ # source_name: "testSourceName",
1277
+ # source_type: "S3",
1278
+ # },
1279
+ # ],
1280
+ # target: {
1281
+ # dataset_target: {
1282
+ # dataset_identifier: "arn:aws:scn:us-east-1:123456789012:instance/8850c54e-e187-4fa7-89d4-6370f165174d/namespaces/default/datasets/my_staging_dataset",
1283
+ # },
1284
+ # target_type: "DATASET",
1285
+ # },
1286
+ # transformation: {
1287
+ # sql_transformation: {
1288
+ # query: "SELECT * FROM testSourceName",
1289
+ # },
1290
+ # transformation_type: "SQL",
1291
+ # },
1292
+ # },
1293
+ # }
1294
+ #
1295
+ # @example Request syntax with placeholder values
1296
+ #
1297
+ # resp = client.get_data_integration_flow({
1298
+ # instance_id: "UUID", # required
1299
+ # name: "DataIntegrationFlowName", # required
1300
+ # })
1301
+ #
1302
+ # @example Response structure
1303
+ #
1304
+ # resp.flow.instance_id #=> String
1305
+ # resp.flow.name #=> String
1306
+ # resp.flow.sources #=> Array
1307
+ # resp.flow.sources[0].source_type #=> String, one of "S3", "DATASET"
1308
+ # resp.flow.sources[0].source_name #=> String
1309
+ # resp.flow.sources[0].s3_source.bucket_name #=> String
1310
+ # resp.flow.sources[0].s3_source.prefix #=> String
1311
+ # resp.flow.sources[0].s3_source.options.file_type #=> String, one of "CSV", "PARQUET", "JSON"
1312
+ # resp.flow.sources[0].dataset_source.dataset_identifier #=> String
1313
+ # resp.flow.sources[0].dataset_source.options.load_type #=> String, one of "INCREMENTAL", "REPLACE"
1314
+ # resp.flow.sources[0].dataset_source.options.dedupe_records #=> Boolean
1315
+ # resp.flow.transformation.transformation_type #=> String, one of "SQL", "NONE"
1316
+ # resp.flow.transformation.sql_transformation.query #=> String
1317
+ # resp.flow.target.target_type #=> String, one of "S3", "DATASET"
1318
+ # resp.flow.target.s3_target.bucket_name #=> String
1319
+ # resp.flow.target.s3_target.prefix #=> String
1320
+ # resp.flow.target.s3_target.options.file_type #=> String, one of "CSV", "PARQUET", "JSON"
1321
+ # resp.flow.target.dataset_target.dataset_identifier #=> String
1322
+ # resp.flow.target.dataset_target.options.load_type #=> String, one of "INCREMENTAL", "REPLACE"
1323
+ # resp.flow.target.dataset_target.options.dedupe_records #=> Boolean
1324
+ # resp.flow.created_time #=> Time
1325
+ # resp.flow.last_modified_time #=> Time
1326
+ #
1327
+ # @see http://docs.aws.amazon.com/goto/WebAPI/supplychain-2024-01-01/GetDataIntegrationFlow AWS API Documentation
1328
+ #
1329
+ # @overload get_data_integration_flow(params = {})
1330
+ # @param [Hash] params ({})
1331
+ def get_data_integration_flow(params = {}, options = {})
1332
+ req = build_request(:get_data_integration_flow, params)
1333
+ req.send_request(options)
1334
+ end
1335
+
1336
+ # Get a data lake dataset.
1337
+ #
1338
+ # @option params [required, String] :instance_id
1339
+ # The Amazon Web Services Supply Chain instance identifier.
1340
+ #
1341
+ # @option params [required, String] :namespace
1342
+ # The name space of the dataset. The available values are:
1343
+ #
1344
+ # * **asc** - For information on the Amazon Web Services Supply Chain
1345
+ # supported datasets see
1346
+ # [https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html][1].
1347
+ #
1348
+ # * **default** - For datasets with custom user-defined schemas.
1349
+ #
1350
+ #
1351
+ #
1352
+ # [1]: https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html
1353
+ #
1354
+ # @option params [required, String] :name
1355
+ # The name of the dataset. For **asc** name space, the name must be one
1356
+ # of the supported data entities under
1357
+ # [https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html][1].
1358
+ #
1359
+ #
1360
+ #
1361
+ # [1]: https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html
1362
+ #
1363
+ # @return [Types::GetDataLakeDatasetResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
1364
+ #
1365
+ # * {Types::GetDataLakeDatasetResponse#dataset #dataset} => Types::DataLakeDataset
1366
+ #
1367
+ #
1368
+ # @example Example: Get properties of an existing AWS Supply Chain inbound order dataset
1369
+ #
1370
+ # resp = client.get_data_lake_dataset({
1371
+ # name: "inbound_order",
1372
+ # instance_id: "1877dd20-dee9-4639-8e99-cb67acf21fe5",
1373
+ # namespace: "asc",
1374
+ # })
1375
+ #
1376
+ # resp.to_h outputs the following:
1377
+ # {
1378
+ # dataset: {
1379
+ # name: "inbound_order",
1380
+ # arn: "arn:aws:scn:us-east-1:012345678910:instance/1877dd20-dee9-4639-8e99-cb67acf21fe5/namespaces/asc/datasets/inbound_order",
1381
+ # created_time: Time.parse(1727116807.751),
1382
+ # description: "This is an AWS Supply Chain inbound order dataset",
1383
+ # instance_id: "1877dd20-dee9-4639-8e99-cb67acf21fe5",
1384
+ # last_modified_time: Time.parse(1727116807.751),
1385
+ # namespace: "asc",
1386
+ # schema: {
1387
+ # name: "InboundOrder",
1388
+ # fields: [
1389
+ # {
1390
+ # name: "id",
1391
+ # type: "STRING",
1392
+ # is_required: true,
1393
+ # },
1394
+ # {
1395
+ # name: "tpartner_id",
1396
+ # type: "STRING",
1397
+ # is_required: true,
1398
+ # },
1399
+ # {
1400
+ # name: "connection_id",
1401
+ # type: "STRING",
1402
+ # is_required: true,
1403
+ # },
1404
+ # {
1405
+ # name: "order_type",
1406
+ # type: "STRING",
1407
+ # is_required: false,
1408
+ # },
1409
+ # {
1410
+ # name: "order_status",
1411
+ # type: "STRING",
1412
+ # is_required: false,
1413
+ # },
1414
+ # {
1415
+ # name: "inbound_order_url",
1416
+ # type: "STRING",
1417
+ # is_required: false,
1418
+ # },
1419
+ # {
1420
+ # name: "order_creation_date",
1421
+ # type: "TIMESTAMP",
1422
+ # is_required: false,
1423
+ # },
1424
+ # {
1425
+ # name: "company_id",
1426
+ # type: "STRING",
1427
+ # is_required: false,
1428
+ # },
1429
+ # {
1430
+ # name: "to_site_id",
1431
+ # type: "STRING",
1432
+ # is_required: false,
1433
+ # },
1434
+ # {
1435
+ # name: "order_currency_uom",
1436
+ # type: "STRING",
1437
+ # is_required: false,
1438
+ # },
1439
+ # {
1440
+ # name: "vendor_currency_uom",
1441
+ # type: "STRING",
1442
+ # is_required: false,
1443
+ # },
1444
+ # {
1445
+ # name: "exchange_rate",
1446
+ # type: "DOUBLE",
1447
+ # is_required: false,
1448
+ # },
1449
+ # {
1450
+ # name: "exchange_rate_date",
1451
+ # type: "TIMESTAMP",
1452
+ # is_required: false,
1453
+ # },
1454
+ # {
1455
+ # name: "incoterm",
1456
+ # type: "STRING",
1457
+ # is_required: false,
1458
+ # },
1459
+ # {
1460
+ # name: "incoterm2",
1461
+ # type: "STRING",
1462
+ # is_required: false,
1463
+ # },
1464
+ # {
1465
+ # name: "incoterm_location_1",
1466
+ # type: "STRING",
1467
+ # is_required: false,
1468
+ # },
1469
+ # {
1470
+ # name: "incoterm_location_2",
1471
+ # type: "STRING",
1472
+ # is_required: false,
1473
+ # },
1474
+ # {
1475
+ # name: "submitted_date",
1476
+ # type: "TIMESTAMP",
1477
+ # is_required: false,
1478
+ # },
1479
+ # {
1480
+ # name: "agreement_start_date",
1481
+ # type: "TIMESTAMP",
1482
+ # is_required: false,
1483
+ # },
1484
+ # {
1485
+ # name: "agreement_end_date",
1486
+ # type: "TIMESTAMP",
1487
+ # is_required: false,
1488
+ # },
1489
+ # {
1490
+ # name: "shipping_instr_code",
1491
+ # type: "STRING",
1492
+ # is_required: false,
1493
+ # },
1494
+ # {
1495
+ # name: "payment_terms_code",
1496
+ # type: "STRING",
1497
+ # is_required: false,
1498
+ # },
1499
+ # {
1500
+ # name: "std_terms_agreement",
1501
+ # type: "STRING",
1502
+ # is_required: false,
1503
+ # },
1504
+ # {
1505
+ # name: "std_terms_agreement_ver",
1506
+ # type: "STRING",
1507
+ # is_required: false,
1508
+ # },
1509
+ # {
1510
+ # name: "agreement_number",
1511
+ # type: "STRING",
1512
+ # is_required: false,
1513
+ # },
1514
+ # {
1515
+ # name: "source",
1516
+ # type: "STRING",
1517
+ # is_required: false,
1518
+ # },
1519
+ # {
1520
+ # name: "source_update_dttm",
1521
+ # type: "TIMESTAMP",
1522
+ # is_required: false,
1523
+ # },
1524
+ # {
1525
+ # name: "source_event_id",
1526
+ # type: "STRING",
1527
+ # is_required: false,
1528
+ # },
1529
+ # {
1530
+ # name: "db_creation_dttm",
1531
+ # type: "TIMESTAMP",
1532
+ # is_required: false,
1533
+ # },
1534
+ # {
1535
+ # name: "db_updation_dttm",
1536
+ # type: "TIMESTAMP",
1537
+ # is_required: false,
1538
+ # },
1539
+ # ],
1540
+ # },
1541
+ # },
1542
+ # }
1543
+ #
1544
+ # @example Example: Get proporties of an existing custom dataset
1545
+ #
1546
+ # resp = client.get_data_lake_dataset({
1547
+ # name: "my_dataset",
1548
+ # instance_id: "1877dd20-dee9-4639-8e99-cb67acf21fe5",
1549
+ # namespace: "default",
1550
+ # })
1551
+ #
1552
+ # resp.to_h outputs the following:
1553
+ # {
1554
+ # dataset: {
1555
+ # name: "my_dataset",
1556
+ # arn: "arn:aws:scn:us-east-1:012345678910:instance/1877dd20-dee9-4639-8e99-cb67acf21fe5/namespaces/default/datasets/my_dataset",
1557
+ # created_time: Time.parse(1727116807.751),
1558
+ # description: "This is a custom dataset",
1559
+ # instance_id: "1877dd20-dee9-4639-8e99-cb67acf21fe5",
1560
+ # last_modified_time: Time.parse(1727116807.751),
1561
+ # namespace: "default",
1562
+ # schema: {
1563
+ # name: "MyDataset",
1564
+ # fields: [
1565
+ # {
1566
+ # name: "id",
1567
+ # type: "INT",
1568
+ # is_required: true,
1569
+ # },
1570
+ # {
1571
+ # name: "description",
1572
+ # type: "STRING",
1573
+ # is_required: true,
1574
+ # },
1575
+ # {
1576
+ # name: "price",
1577
+ # type: "DOUBLE",
1578
+ # is_required: false,
1579
+ # },
1580
+ # {
1581
+ # name: "creation_time",
1582
+ # type: "TIMESTAMP",
1583
+ # is_required: false,
1584
+ # },
1585
+ # ],
1586
+ # },
1587
+ # },
1588
+ # }
1589
+ #
1590
+ # @example Request syntax with placeholder values
1591
+ #
1592
+ # resp = client.get_data_lake_dataset({
1593
+ # instance_id: "UUID", # required
1594
+ # namespace: "DataLakeDatasetNamespace", # required
1595
+ # name: "DataLakeDatasetName", # required
1596
+ # })
1597
+ #
1598
+ # @example Response structure
1599
+ #
1600
+ # resp.dataset.instance_id #=> String
1601
+ # resp.dataset.namespace #=> String
1602
+ # resp.dataset.name #=> String
1603
+ # resp.dataset.arn #=> String
1604
+ # resp.dataset.schema.name #=> String
1605
+ # resp.dataset.schema.fields #=> Array
1606
+ # resp.dataset.schema.fields[0].name #=> String
1607
+ # resp.dataset.schema.fields[0].type #=> String, one of "INT", "DOUBLE", "STRING", "TIMESTAMP"
1608
+ # resp.dataset.schema.fields[0].is_required #=> Boolean
1609
+ # resp.dataset.description #=> String
1610
+ # resp.dataset.created_time #=> Time
1611
+ # resp.dataset.last_modified_time #=> Time
1612
+ #
1613
+ # @see http://docs.aws.amazon.com/goto/WebAPI/supplychain-2024-01-01/GetDataLakeDataset AWS API Documentation
1614
+ #
1615
+ # @overload get_data_lake_dataset(params = {})
1616
+ # @param [Hash] params ({})
1617
+ def get_data_lake_dataset(params = {}, options = {})
1618
+ req = build_request(:get_data_lake_dataset, params)
1619
+ req.send_request(options)
1620
+ end
1621
+
1622
+ # Lists all the DataIntegrationFlows in a paginated way.
1623
+ #
1624
+ # @option params [required, String] :instance_id
1625
+ # The Amazon Web Services Supply Chain instance identifier.
1626
+ #
1627
+ # @option params [String] :next_token
1628
+ # The pagination token to fetch the next page of the
1629
+ # DataIntegrationFlows.
1630
+ #
1631
+ # @option params [Integer] :max_results
1632
+ # Specify the maximum number of DataIntegrationFlows to fetch in one
1633
+ # paginated request.
1634
+ #
1635
+ # @return [Types::ListDataIntegrationFlowsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
1636
+ #
1637
+ # * {Types::ListDataIntegrationFlowsResponse#flows #flows} => Array&lt;Types::DataIntegrationFlow&gt;
1638
+ # * {Types::ListDataIntegrationFlowsResponse#next_token #next_token} => String
1639
+ #
1640
+ # The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
1641
+ #
1642
+ #
1643
+ # @example Example: Successful ListDataIntegrationFlow
1644
+ #
1645
+ # resp = client.list_data_integration_flows({
1646
+ # instance_id: "8850c54e-e187-4fa7-89d4-6370f165174d",
1647
+ # })
1648
+ #
1649
+ # resp.to_h outputs the following:
1650
+ # {
1651
+ # flows: [
1652
+ # {
1653
+ # name: "testStagingFlow",
1654
+ # created_time: Time.parse(1724956400.44),
1655
+ # instance_id: "8850c54e-e187-4fa7-89d4-6370f165174d",
1656
+ # last_modified_time: Time.parse(1724956400.44),
1657
+ # sources: [
1658
+ # {
1659
+ # s3_source: {
1660
+ # bucket_name: "aws-supply-chain-data-b8c7bb28-a576-4334-b481-6d6e8e47371f",
1661
+ # prefix: "example-prefix",
1662
+ # },
1663
+ # source_name: "testSourceName",
1664
+ # source_type: "S3",
1665
+ # },
1666
+ # ],
1667
+ # target: {
1668
+ # dataset_target: {
1669
+ # dataset_identifier: "arn:aws:scn:us-east-1:123456789012:instance/8850c54e-e187-4fa7-89d4-6370f165174d/namespaces/default/datasets/my_staging_dataset",
1670
+ # },
1671
+ # target_type: "DATASET",
1672
+ # },
1673
+ # transformation: {
1674
+ # sql_transformation: {
1675
+ # query: "SELECT * FROM testSourceName",
1676
+ # },
1677
+ # transformation_type: "SQL",
1678
+ # },
1679
+ # },
1680
+ # {
1681
+ # name: "trading-partner",
1682
+ # created_time: Time.parse(17235763506.88),
1683
+ # instance_id: "8850c54e-e187-4fa7-89d4-6370f165174d",
1684
+ # last_modified_time: Time.parse(17235763506.88),
1685
+ # sources: [
1686
+ # {
1687
+ # dataset_source: {
1688
+ # dataset_identifier: "arn:aws:scn:us-east-1:123456789012:instance/8850c54e-e187-4fa7-89d4-6370f165174d/namespaces/default/datasets/my_staging_dataset1",
1689
+ # },
1690
+ # source_name: "testSourceName1",
1691
+ # source_type: "DATASET",
1692
+ # },
1693
+ # {
1694
+ # dataset_source: {
1695
+ # dataset_identifier: "arn:aws:scn:us-east-1:123456789012:instance/8850c54e-e187-4fa7-89d4-6370f165174d/namespaces/default/datasets/my_staging_dataset2",
1696
+ # },
1697
+ # source_name: "testSourceName2",
1698
+ # source_type: "DATASET",
1699
+ # },
1700
+ # ],
1701
+ # target: {
1702
+ # dataset_target: {
1703
+ # dataset_identifier: "arn:aws:scn:us-east-1:123456789012:instance/8850c54e-e187-4fa7-89d4-6370f165174d/namespaces/asc/datasets/trading_partner",
1704
+ # },
1705
+ # target_type: "DATASET",
1706
+ # },
1707
+ # transformation: {
1708
+ # sql_transformation: {
1709
+ # query: "SELECT S1.id AS id, S1.poc_org_unit_description AS description, S1.company_id AS company_id, S1.tpartner_type AS tpartner_type, S1.geo_id AS geo_id, S1.eff_start_date AS eff_start_date, S1.eff_end_date AS eff_end_date FROM testSourceName1 AS S1 LEFT JOIN testSourceName2 as S2 ON S1.id=S2.id",
1710
+ # },
1711
+ # transformation_type: "SQL",
1712
+ # },
1713
+ # },
1714
+ # ],
1715
+ # }
1716
+ #
1717
+ # @example Request syntax with placeholder values
1718
+ #
1719
+ # resp = client.list_data_integration_flows({
1720
+ # instance_id: "UUID", # required
1721
+ # next_token: "DataIntegrationFlowNextToken",
1722
+ # max_results: 1,
1723
+ # })
1724
+ #
1725
+ # @example Response structure
1726
+ #
1727
+ # resp.flows #=> Array
1728
+ # resp.flows[0].instance_id #=> String
1729
+ # resp.flows[0].name #=> String
1730
+ # resp.flows[0].sources #=> Array
1731
+ # resp.flows[0].sources[0].source_type #=> String, one of "S3", "DATASET"
1732
+ # resp.flows[0].sources[0].source_name #=> String
1733
+ # resp.flows[0].sources[0].s3_source.bucket_name #=> String
1734
+ # resp.flows[0].sources[0].s3_source.prefix #=> String
1735
+ # resp.flows[0].sources[0].s3_source.options.file_type #=> String, one of "CSV", "PARQUET", "JSON"
1736
+ # resp.flows[0].sources[0].dataset_source.dataset_identifier #=> String
1737
+ # resp.flows[0].sources[0].dataset_source.options.load_type #=> String, one of "INCREMENTAL", "REPLACE"
1738
+ # resp.flows[0].sources[0].dataset_source.options.dedupe_records #=> Boolean
1739
+ # resp.flows[0].transformation.transformation_type #=> String, one of "SQL", "NONE"
1740
+ # resp.flows[0].transformation.sql_transformation.query #=> String
1741
+ # resp.flows[0].target.target_type #=> String, one of "S3", "DATASET"
1742
+ # resp.flows[0].target.s3_target.bucket_name #=> String
1743
+ # resp.flows[0].target.s3_target.prefix #=> String
1744
+ # resp.flows[0].target.s3_target.options.file_type #=> String, one of "CSV", "PARQUET", "JSON"
1745
+ # resp.flows[0].target.dataset_target.dataset_identifier #=> String
1746
+ # resp.flows[0].target.dataset_target.options.load_type #=> String, one of "INCREMENTAL", "REPLACE"
1747
+ # resp.flows[0].target.dataset_target.options.dedupe_records #=> Boolean
1748
+ # resp.flows[0].created_time #=> Time
1749
+ # resp.flows[0].last_modified_time #=> Time
1750
+ # resp.next_token #=> String
1751
+ #
1752
+ # @see http://docs.aws.amazon.com/goto/WebAPI/supplychain-2024-01-01/ListDataIntegrationFlows AWS API Documentation
1753
+ #
1754
+ # @overload list_data_integration_flows(params = {})
1755
+ # @param [Hash] params ({})
1756
+ def list_data_integration_flows(params = {}, options = {})
1757
+ req = build_request(:list_data_integration_flows, params)
1758
+ req.send_request(options)
1759
+ end
1760
+
1761
+ # List the data lake datasets for a specific instance and name space.
1762
+ #
1763
+ # @option params [required, String] :instance_id
1764
+ # The Amazon Web Services Supply Chain instance identifier.
1765
+ #
1766
+ # @option params [required, String] :namespace
1767
+ # The namespace of the dataset. The available values are:
1768
+ #
1769
+ # * asc: for [ AWS Supply Chain supported datasets ][1].
1770
+ #
1771
+ # * default: for datasets with custom user-defined schemas.
1772
+ #
1773
+ #
1774
+ #
1775
+ # [1]: https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html
1776
+ #
1777
+ # @option params [String] :next_token
1778
+ # The pagination token to fetch next page of datasets.
1779
+ #
1780
+ # @option params [Integer] :max_results
1781
+ # The max number of datasets to fetch in this paginated request.
1782
+ #
1783
+ # @return [Types::ListDataLakeDatasetsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
1784
+ #
1785
+ # * {Types::ListDataLakeDatasetsResponse#datasets #datasets} => Array&lt;Types::DataLakeDataset&gt;
1786
+ # * {Types::ListDataLakeDatasetsResponse#next_token #next_token} => String
1787
+ #
1788
+ # The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
1789
+ #
1790
+ #
1791
+ # @example Example: List AWS Supply Chain datasets
1792
+ #
1793
+ # resp = client.list_data_lake_datasets({
1794
+ # instance_id: "1877dd20-dee9-4639-8e99-cb67acf21fe5",
1795
+ # namespace: "asc",
1796
+ # })
1797
+ #
1798
+ # resp.to_h outputs the following:
1799
+ # {
1800
+ # datasets: [
1801
+ # {
1802
+ # name: "inbound_order",
1803
+ # arn: "arn:aws:scn:us-east-1:012345678910:instance/1877dd20-dee9-4639-8e99-cb67acf21fe5/namespaces/asc/datasets/inbound_order",
1804
+ # created_time: Time.parse(1727116807.751),
1805
+ # description: "This is an AWS Supply Chain inbound order dataset",
1806
+ # instance_id: "1877dd20-dee9-4639-8e99-cb67acf21fe5",
1807
+ # last_modified_time: Time.parse(1727116807.751),
1808
+ # namespace: "asc",
1809
+ # schema: {
1810
+ # name: "InboundOrder",
1811
+ # fields: [
1812
+ # {
1813
+ # name: "id",
1814
+ # type: "STRING",
1815
+ # is_required: true,
1816
+ # },
1817
+ # {
1818
+ # name: "tpartner_id",
1819
+ # type: "STRING",
1820
+ # is_required: true,
1821
+ # },
1822
+ # {
1823
+ # name: "connection_id",
1824
+ # type: "STRING",
1825
+ # is_required: true,
1826
+ # },
1827
+ # {
1828
+ # name: "order_type",
1829
+ # type: "STRING",
1830
+ # is_required: false,
1831
+ # },
1832
+ # {
1833
+ # name: "order_status",
1834
+ # type: "STRING",
1835
+ # is_required: false,
1836
+ # },
1837
+ # {
1838
+ # name: "inbound_order_url",
1839
+ # type: "STRING",
1840
+ # is_required: false,
1841
+ # },
1842
+ # {
1843
+ # name: "order_creation_date",
1844
+ # type: "TIMESTAMP",
1845
+ # is_required: false,
1846
+ # },
1847
+ # {
1848
+ # name: "company_id",
1849
+ # type: "STRING",
1850
+ # is_required: false,
1851
+ # },
1852
+ # {
1853
+ # name: "to_site_id",
1854
+ # type: "STRING",
1855
+ # is_required: false,
1856
+ # },
1857
+ # {
1858
+ # name: "order_currency_uom",
1859
+ # type: "STRING",
1860
+ # is_required: false,
1861
+ # },
1862
+ # {
1863
+ # name: "vendor_currency_uom",
1864
+ # type: "STRING",
1865
+ # is_required: false,
1866
+ # },
1867
+ # {
1868
+ # name: "exchange_rate",
1869
+ # type: "DOUBLE",
1870
+ # is_required: false,
1871
+ # },
1872
+ # {
1873
+ # name: "exchange_rate_date",
1874
+ # type: "TIMESTAMP",
1875
+ # is_required: false,
1876
+ # },
1877
+ # {
1878
+ # name: "incoterm",
1879
+ # type: "STRING",
1880
+ # is_required: false,
1881
+ # },
1882
+ # {
1883
+ # name: "incoterm2",
1884
+ # type: "STRING",
1885
+ # is_required: false,
1886
+ # },
1887
+ # {
1888
+ # name: "incoterm_location_1",
1889
+ # type: "STRING",
1890
+ # is_required: false,
1891
+ # },
1892
+ # {
1893
+ # name: "incoterm_location_2",
1894
+ # type: "STRING",
1895
+ # is_required: false,
1896
+ # },
1897
+ # {
1898
+ # name: "submitted_date",
1899
+ # type: "TIMESTAMP",
1900
+ # is_required: false,
1901
+ # },
1902
+ # {
1903
+ # name: "agreement_start_date",
1904
+ # type: "TIMESTAMP",
1905
+ # is_required: false,
1906
+ # },
1907
+ # {
1908
+ # name: "agreement_end_date",
1909
+ # type: "TIMESTAMP",
1910
+ # is_required: false,
1911
+ # },
1912
+ # {
1913
+ # name: "shipping_instr_code",
1914
+ # type: "STRING",
1915
+ # is_required: false,
1916
+ # },
1917
+ # {
1918
+ # name: "payment_terms_code",
1919
+ # type: "STRING",
1920
+ # is_required: false,
1921
+ # },
1922
+ # {
1923
+ # name: "std_terms_agreement",
1924
+ # type: "STRING",
1925
+ # is_required: false,
1926
+ # },
1927
+ # {
1928
+ # name: "std_terms_agreement_ver",
1929
+ # type: "STRING",
1930
+ # is_required: false,
1931
+ # },
1932
+ # {
1933
+ # name: "agreement_number",
1934
+ # type: "STRING",
1935
+ # is_required: false,
1936
+ # },
1937
+ # {
1938
+ # name: "source",
1939
+ # type: "STRING",
1940
+ # is_required: false,
1941
+ # },
1942
+ # {
1943
+ # name: "source_update_dttm",
1944
+ # type: "TIMESTAMP",
1945
+ # is_required: false,
1946
+ # },
1947
+ # {
1948
+ # name: "source_event_id",
1949
+ # type: "STRING",
1950
+ # is_required: false,
1951
+ # },
1952
+ # {
1953
+ # name: "db_creation_dttm",
1954
+ # type: "TIMESTAMP",
1955
+ # is_required: false,
1956
+ # },
1957
+ # {
1958
+ # name: "db_updation_dttm",
1959
+ # type: "TIMESTAMP",
1960
+ # is_required: false,
1961
+ # },
1962
+ # ],
1963
+ # },
1964
+ # },
1965
+ # ],
1966
+ # }
1967
+ #
1968
+ # @example Example: List custom datasets using pagination
1969
+ #
1970
+ # resp = client.list_data_lake_datasets({
1971
+ # instance_id: "1877dd20-dee9-4639-8e99-cb67acf21fe5",
1972
+ # max_results: 2,
1973
+ # namespace: "default",
1974
+ # next_token: "next_token_returned_from_previous_list_request",
1975
+ # })
1976
+ #
1977
+ # resp.to_h outputs the following:
1978
+ # {
1979
+ # datasets: [
1980
+ # {
1981
+ # name: "my_dataset",
1982
+ # arn: "arn:aws:scn:us-east-1:012345678910:instance/1877dd20-dee9-4639-8e99-cb67acf21fe5/namespaces/default/datasets/my_dataset",
1983
+ # created_time: Time.parse(1727116807.751),
1984
+ # description: "This is a custom dataset",
1985
+ # instance_id: "1877dd20-dee9-4639-8e99-cb67acf21fe5",
1986
+ # last_modified_time: Time.parse(1727116807.751),
1987
+ # namespace: "default",
1988
+ # schema: {
1989
+ # name: "MyDataset",
1990
+ # fields: [
1991
+ # {
1992
+ # name: "id",
1993
+ # type: "INT",
1994
+ # is_required: true,
1995
+ # },
1996
+ # {
1997
+ # name: "description",
1998
+ # type: "STRING",
1999
+ # is_required: true,
2000
+ # },
2001
+ # {
2002
+ # name: "price",
2003
+ # type: "DOUBLE",
2004
+ # is_required: false,
2005
+ # },
2006
+ # {
2007
+ # name: "creation_time",
2008
+ # type: "TIMESTAMP",
2009
+ # is_required: false,
2010
+ # },
2011
+ # ],
2012
+ # },
2013
+ # },
2014
+ # {
2015
+ # name: "my_dataset_2",
2016
+ # arn: "arn:aws:scn:us-east-1:012345678910:instance/1877dd20-dee9-4639-8e99-cb67acf21fe5/namespaces/default/datasets/my_dataset_2",
2017
+ # created_time: Time.parse(1727116907.751),
2018
+ # description: "This is a custom dataset 2",
2019
+ # instance_id: "1877dd20-dee9-4639-8e99-cb67acf21fe5",
2020
+ # last_modified_time: Time.parse(1727116907.751),
2021
+ # namespace: "default",
2022
+ # schema: {
2023
+ # name: "MyDataset2",
2024
+ # fields: [
2025
+ # {
2026
+ # name: "id",
2027
+ # type: "INT",
2028
+ # is_required: true,
2029
+ # },
2030
+ # {
2031
+ # name: "description",
2032
+ # type: "STRING",
2033
+ # is_required: true,
2034
+ # },
2035
+ # ],
2036
+ # },
2037
+ # },
2038
+ # ],
2039
+ # next_token: "next_token_for_next_list_request",
2040
+ # }
2041
+ #
2042
+ # @example Request syntax with placeholder values
2043
+ #
2044
+ # resp = client.list_data_lake_datasets({
2045
+ # instance_id: "UUID", # required
2046
+ # namespace: "DataLakeDatasetNamespace", # required
2047
+ # next_token: "DataLakeDatasetNextToken",
2048
+ # max_results: 1,
2049
+ # })
2050
+ #
2051
+ # @example Response structure
2052
+ #
2053
+ # resp.datasets #=> Array
2054
+ # resp.datasets[0].instance_id #=> String
2055
+ # resp.datasets[0].namespace #=> String
2056
+ # resp.datasets[0].name #=> String
2057
+ # resp.datasets[0].arn #=> String
2058
+ # resp.datasets[0].schema.name #=> String
2059
+ # resp.datasets[0].schema.fields #=> Array
2060
+ # resp.datasets[0].schema.fields[0].name #=> String
2061
+ # resp.datasets[0].schema.fields[0].type #=> String, one of "INT", "DOUBLE", "STRING", "TIMESTAMP"
2062
+ # resp.datasets[0].schema.fields[0].is_required #=> Boolean
2063
+ # resp.datasets[0].description #=> String
2064
+ # resp.datasets[0].created_time #=> Time
2065
+ # resp.datasets[0].last_modified_time #=> Time
2066
+ # resp.next_token #=> String
2067
+ #
2068
+ # @see http://docs.aws.amazon.com/goto/WebAPI/supplychain-2024-01-01/ListDataLakeDatasets AWS API Documentation
2069
+ #
2070
+ # @overload list_data_lake_datasets(params = {})
2071
+ # @param [Hash] params ({})
2072
+ def list_data_lake_datasets(params = {}, options = {})
2073
+ req = build_request(:list_data_lake_datasets, params)
2074
+ req.send_request(options)
2075
+ end
2076
+
2077
+ # List all the tags for an Amazon Web ServicesSupply Chain resource.
2078
+ #
2079
+ # @option params [required, String] :resource_arn
2080
+ # The Amazon Web Services Supply chain resource ARN that needs tags to
2081
+ # be listed.
2082
+ #
2083
+ # @return [Types::ListTagsForResourceResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
2084
+ #
2085
+ # * {Types::ListTagsForResourceResponse#tags #tags} => Hash&lt;String,String&gt;
2086
+ #
2087
+ #
2088
+ # @example Example: Successful ListTagsForResource
2089
+ #
2090
+ # resp = client.list_tags_for_resource({
2091
+ # resource_arn: "arn:aws:scn:us-east-1:123456789012:instance/8850c54e-e187-4fa7-89d4-6370f165174d/data-integration-flows/my_flow1",
2092
+ # })
2093
+ #
2094
+ # resp.to_h outputs the following:
2095
+ # {
2096
+ # tags: {
2097
+ # "tagKey1" => "tagValue1",
2098
+ # },
2099
+ # }
2100
+ #
2101
+ # @example Request syntax with placeholder values
2102
+ #
2103
+ # resp = client.list_tags_for_resource({
2104
+ # resource_arn: "AscResourceArn", # required
2105
+ # })
2106
+ #
2107
+ # @example Response structure
2108
+ #
2109
+ # resp.tags #=> Hash
2110
+ # resp.tags["TagKey"] #=> String
2111
+ #
2112
+ # @see http://docs.aws.amazon.com/goto/WebAPI/supplychain-2024-01-01/ListTagsForResource AWS API Documentation
2113
+ #
2114
+ # @overload list_tags_for_resource(params = {})
2115
+ # @param [Hash] params ({})
2116
+ def list_tags_for_resource(params = {}, options = {})
2117
+ req = build_request(:list_tags_for_resource, params)
2118
+ req.send_request(options)
2119
+ end
2120
+
584
2121
  # Send the transactional data payload for the event with real-time data
585
2122
  # for analysis or monitoring. The real-time data events are stored in an
586
2123
  # Amazon Web Services service before being processed and stored in data
@@ -869,6 +2406,624 @@ module Aws::SupplyChain
869
2406
  req.send_request(options)
870
2407
  end
871
2408
 
2409
+ # Create tags for an Amazon Web Services Supply chain resource.
2410
+ #
2411
+ # @option params [required, String] :resource_arn
2412
+ # The Amazon Web Services Supply chain resource ARN that needs to be
2413
+ # tagged.
2414
+ #
2415
+ # @option params [required, Hash<String,String>] :tags
2416
+ # The tags of the Amazon Web Services Supply chain resource to be
2417
+ # created.
2418
+ #
2419
+ # @return [Struct] Returns an empty {Seahorse::Client::Response response}.
2420
+ #
2421
+ #
2422
+ # @example Example: Successful TagResource
2423
+ #
2424
+ # resp = client.tag_resource({
2425
+ # resource_arn: "arn:aws:scn:us-east-1:123456789012:instance/8850c54e-e187-4fa7-89d4-6370f165174d/data-integration-flows/my_flow1",
2426
+ # tags: {
2427
+ # "tagKey1" => "tagValue1",
2428
+ # },
2429
+ # })
2430
+ #
2431
+ # resp.to_h outputs the following:
2432
+ # {
2433
+ # }
2434
+ #
2435
+ # @example Request syntax with placeholder values
2436
+ #
2437
+ # resp = client.tag_resource({
2438
+ # resource_arn: "AscResourceArn", # required
2439
+ # tags: { # required
2440
+ # "TagKey" => "TagValue",
2441
+ # },
2442
+ # })
2443
+ #
2444
+ # @see http://docs.aws.amazon.com/goto/WebAPI/supplychain-2024-01-01/TagResource AWS API Documentation
2445
+ #
2446
+ # @overload tag_resource(params = {})
2447
+ # @param [Hash] params ({})
2448
+ def tag_resource(params = {}, options = {})
2449
+ req = build_request(:tag_resource, params)
2450
+ req.send_request(options)
2451
+ end
2452
+
2453
+ # Delete tags for an Amazon Web Services Supply chain resource.
2454
+ #
2455
+ # @option params [required, String] :resource_arn
2456
+ # The Amazon Web Services Supply chain resource ARN that needs to be
2457
+ # untagged.
2458
+ #
2459
+ # @option params [required, Array<String>] :tag_keys
2460
+ # The list of tag keys to be deleted for an Amazon Web Services Supply
2461
+ # Chain resource.
2462
+ #
2463
+ # @return [Struct] Returns an empty {Seahorse::Client::Response response}.
2464
+ #
2465
+ #
2466
+ # @example Example: Successful UntagResource
2467
+ #
2468
+ # resp = client.untag_resource({
2469
+ # resource_arn: "arn:aws:scn:us-east-1:123456789012:instance/8850c54e-e187-4fa7-89d4-6370f165174d/data-integration-flows/my_flow1",
2470
+ # tag_keys: [
2471
+ # "tagKey1",
2472
+ # ],
2473
+ # })
2474
+ #
2475
+ # resp.to_h outputs the following:
2476
+ # {
2477
+ # }
2478
+ #
2479
+ # @example Request syntax with placeholder values
2480
+ #
2481
+ # resp = client.untag_resource({
2482
+ # resource_arn: "AscResourceArn", # required
2483
+ # tag_keys: ["TagKey"], # required
2484
+ # })
2485
+ #
2486
+ # @see http://docs.aws.amazon.com/goto/WebAPI/supplychain-2024-01-01/UntagResource AWS API Documentation
2487
+ #
2488
+ # @overload untag_resource(params = {})
2489
+ # @param [Hash] params ({})
2490
+ def untag_resource(params = {}, options = {})
2491
+ req = build_request(:untag_resource, params)
2492
+ req.send_request(options)
2493
+ end
2494
+
2495
+ # Update the DataIntegrationFlow.
2496
+ #
2497
+ # @option params [required, String] :instance_id
2498
+ # The Amazon Web Services Supply Chain instance identifier.
2499
+ #
2500
+ # @option params [required, String] :name
2501
+ # The name of the DataIntegrationFlow to be updated.
2502
+ #
2503
+ # @option params [Array<Types::DataIntegrationFlowSource>] :sources
2504
+ # The new source configurations for the DataIntegrationFlow.
2505
+ #
2506
+ # @option params [Types::DataIntegrationFlowTransformation] :transformation
2507
+ # The new transformation configurations for the DataIntegrationFlow.
2508
+ #
2509
+ # @option params [Types::DataIntegrationFlowTarget] :target
2510
+ # The new target configurations for the DataIntegrationFlow.
2511
+ #
2512
+ # @return [Types::UpdateDataIntegrationFlowResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
2513
+ #
2514
+ # * {Types::UpdateDataIntegrationFlowResponse#flow #flow} => Types::DataIntegrationFlow
2515
+ #
2516
+ #
2517
+ # @example Example: Successful UpdateDataIntegrationFlow for s3 to dataset flow to update SQL transformation
2518
+ #
2519
+ # resp = client.update_data_integration_flow({
2520
+ # name: "testStagingFlow",
2521
+ # instance_id: "8850c54e-e187-4fa7-89d4-6370f165174d",
2522
+ # sources: [
2523
+ # {
2524
+ # s3_source: {
2525
+ # bucket_name: "aws-supply-chain-data-b8c7bb28-a576-4334-b481-6d6e8e47371f",
2526
+ # prefix: "example-prefix",
2527
+ # },
2528
+ # source_name: "testSourceName",
2529
+ # source_type: "S3",
2530
+ # },
2531
+ # ],
2532
+ # target: {
2533
+ # dataset_target: {
2534
+ # dataset_identifier: "arn:aws:scn:us-east-1:123456789012:instance/8850c54e-e187-4fa7-89d4-6370f165174d/namespaces/default/datasets/my_staging_dataset",
2535
+ # },
2536
+ # target_type: "DATASET",
2537
+ # },
2538
+ # transformation: {
2539
+ # sql_transformation: {
2540
+ # query: "SELECT connection_id, bukrs AS id, txtmd AS description FROM testSourceName WHERE langu = 'E'",
2541
+ # },
2542
+ # transformation_type: "SQL",
2543
+ # },
2544
+ # })
2545
+ #
2546
+ # resp.to_h outputs the following:
2547
+ # {
2548
+ # flow: {
2549
+ # name: "testStagingFlow",
2550
+ # created_time: Time.parse(1724956400.44),
2551
+ # instance_id: "8850c54e-e187-4fa7-89d4-6370f165174d",
2552
+ # last_modified_time: Time.parse(1732456405.77),
2553
+ # sources: [
2554
+ # {
2555
+ # s3_source: {
2556
+ # bucket_name: "aws-supply-chain-data-b8c7bb28-a576-4334-b481-6d6e8e47371f",
2557
+ # prefix: "example-prefix",
2558
+ # },
2559
+ # source_name: "testSourceName",
2560
+ # source_type: "S3",
2561
+ # },
2562
+ # ],
2563
+ # target: {
2564
+ # dataset_target: {
2565
+ # dataset_identifier: "arn:aws:scn:us-east-1:123456789012:instance/8850c54e-e187-4fa7-89d4-6370f165174d/namespaces/default/datasets/my_staging_dataset",
2566
+ # },
2567
+ # target_type: "DATASET",
2568
+ # },
2569
+ # transformation: {
2570
+ # sql_transformation: {
2571
+ # query: "SELECT connection_id, bukrs AS id, txtmd AS description FROM testSourceName WHERE langu = 'E'",
2572
+ # },
2573
+ # transformation_type: "SQL",
2574
+ # },
2575
+ # },
2576
+ # }
2577
+ #
2578
+ # @example Example: Successful UpdateDataIntegrationFlow for dataset to dataset flow to update sources
2579
+ #
2580
+ # resp = client.update_data_integration_flow({
2581
+ # name: "trading-partner",
2582
+ # instance_id: "8850c54e-e187-4fa7-89d4-6370f165174d",
2583
+ # sources: [
2584
+ # {
2585
+ # dataset_source: {
2586
+ # dataset_identifier: "arn:aws:scn:us-east-1:123456789012:instance/8850c54e-e187-4fa7-89d4-6370f165174d/namespaces/default/datasets/my_staging_dataset1",
2587
+ # },
2588
+ # source_name: "testSourceName1",
2589
+ # source_type: "DATASET",
2590
+ # },
2591
+ # {
2592
+ # dataset_source: {
2593
+ # dataset_identifier: "arn:aws:scn:us-east-1:123456789012:instance/8850c54e-e187-4fa7-89d4-6370f165174d/namespaces/default/datasets/my_staging_dataset2_updated",
2594
+ # },
2595
+ # source_name: "testSourceName2",
2596
+ # source_type: "DATASET",
2597
+ # },
2598
+ # ],
2599
+ # target: {
2600
+ # dataset_target: {
2601
+ # dataset_identifier: "arn:aws:scn:us-east-1:123456789012:instance/8850c54e-e187-4fa7-89d4-6370f165174d/namespaces/asc/datasets/trading_partner",
2602
+ # },
2603
+ # target_type: "DATASET",
2604
+ # },
2605
+ # transformation: {
2606
+ # sql_transformation: {
2607
+ # query: "SELECT S1.id AS id, S1.poc_org_unit_description AS description, S1.company_id AS company_id, S1.tpartner_type AS tpartner_type, S1.geo_id AS geo_id, S1.eff_start_date AS eff_start_date, S1.eff_end_date AS eff_end_date FROM testSourceName1 AS S1 LEFT JOIN testSourceName2 as S2 ON S1.id=S2.id",
2608
+ # },
2609
+ # transformation_type: "SQL",
2610
+ # },
2611
+ # })
2612
+ #
2613
+ # resp.to_h outputs the following:
2614
+ # {
2615
+ # flow: {
2616
+ # name: "trading-partner",
2617
+ # created_time: Time.parse(1724956400.44),
2618
+ # instance_id: "8850c54e-e187-4fa7-89d4-6370f165174d",
2619
+ # last_modified_time: Time.parse(1732456405.77),
2620
+ # sources: [
2621
+ # {
2622
+ # dataset_source: {
2623
+ # dataset_identifier: "arn:aws:scn:us-east-1:123456789012:instance/8850c54e-e187-4fa7-89d4-6370f165174d/namespaces/default/datasets/my_staging_dataset1",
2624
+ # },
2625
+ # source_name: "testSourceName1",
2626
+ # source_type: "DATASET",
2627
+ # },
2628
+ # {
2629
+ # dataset_source: {
2630
+ # dataset_identifier: "arn:aws:scn:us-east-1:123456789012:instance/8850c54e-e187-4fa7-89d4-6370f165174d/namespaces/default/datasets/my_staging_dataset2_updated",
2631
+ # },
2632
+ # source_name: "testSourceName2",
2633
+ # source_type: "DATASET",
2634
+ # },
2635
+ # ],
2636
+ # target: {
2637
+ # dataset_target: {
2638
+ # dataset_identifier: "arn:aws:scn:us-east-1:123456789012:instance/8850c54e-e187-4fa7-89d4-6370f165174d/namespaces/asc/datasets/trading_partner",
2639
+ # },
2640
+ # target_type: "DATASET",
2641
+ # },
2642
+ # transformation: {
2643
+ # sql_transformation: {
2644
+ # query: "SELECT S1.id AS id, S1.poc_org_unit_description AS description, S1.company_id AS company_id, S1.tpartner_type AS tpartner_type, S1.geo_id AS geo_id, S1.eff_start_date AS eff_start_date, S1.eff_end_date AS eff_end_date FROM testSourceName1 AS S1 LEFT JOIN testSourceName2 as S2 ON S1.id=S2.id",
2645
+ # },
2646
+ # transformation_type: "SQL",
2647
+ # },
2648
+ # },
2649
+ # }
2650
+ #
2651
+ # @example Request syntax with placeholder values
2652
+ #
2653
+ # resp = client.update_data_integration_flow({
2654
+ # instance_id: "UUID", # required
2655
+ # name: "DataIntegrationFlowName", # required
2656
+ # sources: [
2657
+ # {
2658
+ # source_type: "S3", # required, accepts S3, DATASET
2659
+ # source_name: "DataIntegrationFlowSourceName", # required
2660
+ # s3_source: {
2661
+ # bucket_name: "S3BucketName", # required
2662
+ # prefix: "DataIntegrationFlowS3Prefix", # required
2663
+ # options: {
2664
+ # file_type: "CSV", # accepts CSV, PARQUET, JSON
2665
+ # },
2666
+ # },
2667
+ # dataset_source: {
2668
+ # dataset_identifier: "DatasetIdentifier", # required
2669
+ # options: {
2670
+ # load_type: "INCREMENTAL", # accepts INCREMENTAL, REPLACE
2671
+ # dedupe_records: false,
2672
+ # },
2673
+ # },
2674
+ # },
2675
+ # ],
2676
+ # transformation: {
2677
+ # transformation_type: "SQL", # required, accepts SQL, NONE
2678
+ # sql_transformation: {
2679
+ # query: "DataIntegrationFlowSQLQuery", # required
2680
+ # },
2681
+ # },
2682
+ # target: {
2683
+ # target_type: "S3", # required, accepts S3, DATASET
2684
+ # s3_target: {
2685
+ # bucket_name: "S3BucketName", # required
2686
+ # prefix: "DataIntegrationFlowS3Prefix", # required
2687
+ # options: {
2688
+ # file_type: "CSV", # accepts CSV, PARQUET, JSON
2689
+ # },
2690
+ # },
2691
+ # dataset_target: {
2692
+ # dataset_identifier: "DatasetIdentifier", # required
2693
+ # options: {
2694
+ # load_type: "INCREMENTAL", # accepts INCREMENTAL, REPLACE
2695
+ # dedupe_records: false,
2696
+ # },
2697
+ # },
2698
+ # },
2699
+ # })
2700
+ #
2701
+ # @example Response structure
2702
+ #
2703
+ # resp.flow.instance_id #=> String
2704
+ # resp.flow.name #=> String
2705
+ # resp.flow.sources #=> Array
2706
+ # resp.flow.sources[0].source_type #=> String, one of "S3", "DATASET"
2707
+ # resp.flow.sources[0].source_name #=> String
2708
+ # resp.flow.sources[0].s3_source.bucket_name #=> String
2709
+ # resp.flow.sources[0].s3_source.prefix #=> String
2710
+ # resp.flow.sources[0].s3_source.options.file_type #=> String, one of "CSV", "PARQUET", "JSON"
2711
+ # resp.flow.sources[0].dataset_source.dataset_identifier #=> String
2712
+ # resp.flow.sources[0].dataset_source.options.load_type #=> String, one of "INCREMENTAL", "REPLACE"
2713
+ # resp.flow.sources[0].dataset_source.options.dedupe_records #=> Boolean
2714
+ # resp.flow.transformation.transformation_type #=> String, one of "SQL", "NONE"
2715
+ # resp.flow.transformation.sql_transformation.query #=> String
2716
+ # resp.flow.target.target_type #=> String, one of "S3", "DATASET"
2717
+ # resp.flow.target.s3_target.bucket_name #=> String
2718
+ # resp.flow.target.s3_target.prefix #=> String
2719
+ # resp.flow.target.s3_target.options.file_type #=> String, one of "CSV", "PARQUET", "JSON"
2720
+ # resp.flow.target.dataset_target.dataset_identifier #=> String
2721
+ # resp.flow.target.dataset_target.options.load_type #=> String, one of "INCREMENTAL", "REPLACE"
2722
+ # resp.flow.target.dataset_target.options.dedupe_records #=> Boolean
2723
+ # resp.flow.created_time #=> Time
2724
+ # resp.flow.last_modified_time #=> Time
2725
+ #
2726
+ # @see http://docs.aws.amazon.com/goto/WebAPI/supplychain-2024-01-01/UpdateDataIntegrationFlow AWS API Documentation
2727
+ #
2728
+ # @overload update_data_integration_flow(params = {})
2729
+ # @param [Hash] params ({})
2730
+ def update_data_integration_flow(params = {}, options = {})
2731
+ req = build_request(:update_data_integration_flow, params)
2732
+ req.send_request(options)
2733
+ end
2734
+
2735
+ # Update a data lake dataset.
2736
+ #
2737
+ # @option params [required, String] :instance_id
2738
+ # The Amazon Web Services Chain instance identifier.
2739
+ #
2740
+ # @option params [required, String] :namespace
2741
+ # The name space of the dataset. The available values are:
2742
+ #
2743
+ # * **asc** - For information on the Amazon Web Services Supply Chain
2744
+ # supported datasets see
2745
+ # [https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html][1].
2746
+ #
2747
+ # * **default** - For datasets with custom user-defined schemas.
2748
+ #
2749
+ #
2750
+ #
2751
+ # [1]: https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html
2752
+ #
2753
+ # @option params [required, String] :name
2754
+ # The name of the dataset. For **asc** name space, the name must be one
2755
+ # of the supported data entities under
2756
+ # [https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html][1].
2757
+ #
2758
+ #
2759
+ #
2760
+ # [1]: https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html
2761
+ #
2762
+ # @option params [String] :description
2763
+ # The updated description of the data lake dataset.
2764
+ #
2765
+ # @return [Types::UpdateDataLakeDatasetResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
2766
+ #
2767
+ # * {Types::UpdateDataLakeDatasetResponse#dataset #dataset} => Types::DataLakeDataset
2768
+ #
2769
+ #
2770
+ # @example Example: Update description of an existing AWS Supply Chain inbound order dataset
2771
+ #
2772
+ # resp = client.update_data_lake_dataset({
2773
+ # name: "inbound_order",
2774
+ # description: "This is an updated AWS Supply Chain inbound order dataset",
2775
+ # instance_id: "1877dd20-dee9-4639-8e99-cb67acf21fe5",
2776
+ # namespace: "asc",
2777
+ # })
2778
+ #
2779
+ # resp.to_h outputs the following:
2780
+ # {
2781
+ # dataset: {
2782
+ # name: "inbound_order",
2783
+ # arn: "arn:aws:scn:us-east-1:012345678910:instance/1877dd20-dee9-4639-8e99-cb67acf21fe5/namespaces/asc/datasets/inbound_order",
2784
+ # created_time: Time.parse(1727116807.751),
2785
+ # description: "This is an updated AWS Supply Chain inbound order dataset",
2786
+ # instance_id: "1877dd20-dee9-4639-8e99-cb67acf21fe5",
2787
+ # last_modified_time: Time.parse(1727117453.568),
2788
+ # namespace: "asc",
2789
+ # schema: {
2790
+ # name: "InboundOrder",
2791
+ # fields: [
2792
+ # {
2793
+ # name: "id",
2794
+ # type: "STRING",
2795
+ # is_required: true,
2796
+ # },
2797
+ # {
2798
+ # name: "tpartner_id",
2799
+ # type: "STRING",
2800
+ # is_required: true,
2801
+ # },
2802
+ # {
2803
+ # name: "connection_id",
2804
+ # type: "STRING",
2805
+ # is_required: true,
2806
+ # },
2807
+ # {
2808
+ # name: "order_type",
2809
+ # type: "STRING",
2810
+ # is_required: false,
2811
+ # },
2812
+ # {
2813
+ # name: "order_status",
2814
+ # type: "STRING",
2815
+ # is_required: false,
2816
+ # },
2817
+ # {
2818
+ # name: "inbound_order_url",
2819
+ # type: "STRING",
2820
+ # is_required: false,
2821
+ # },
2822
+ # {
2823
+ # name: "order_creation_date",
2824
+ # type: "TIMESTAMP",
2825
+ # is_required: false,
2826
+ # },
2827
+ # {
2828
+ # name: "company_id",
2829
+ # type: "STRING",
2830
+ # is_required: false,
2831
+ # },
2832
+ # {
2833
+ # name: "to_site_id",
2834
+ # type: "STRING",
2835
+ # is_required: false,
2836
+ # },
2837
+ # {
2838
+ # name: "order_currency_uom",
2839
+ # type: "STRING",
2840
+ # is_required: false,
2841
+ # },
2842
+ # {
2843
+ # name: "vendor_currency_uom",
2844
+ # type: "STRING",
2845
+ # is_required: false,
2846
+ # },
2847
+ # {
2848
+ # name: "exchange_rate",
2849
+ # type: "DOUBLE",
2850
+ # is_required: false,
2851
+ # },
2852
+ # {
2853
+ # name: "exchange_rate_date",
2854
+ # type: "TIMESTAMP",
2855
+ # is_required: false,
2856
+ # },
2857
+ # {
2858
+ # name: "incoterm",
2859
+ # type: "STRING",
2860
+ # is_required: false,
2861
+ # },
2862
+ # {
2863
+ # name: "incoterm2",
2864
+ # type: "STRING",
2865
+ # is_required: false,
2866
+ # },
2867
+ # {
2868
+ # name: "incoterm_location_1",
2869
+ # type: "STRING",
2870
+ # is_required: false,
2871
+ # },
2872
+ # {
2873
+ # name: "incoterm_location_2",
2874
+ # type: "STRING",
2875
+ # is_required: false,
2876
+ # },
2877
+ # {
2878
+ # name: "submitted_date",
2879
+ # type: "TIMESTAMP",
2880
+ # is_required: false,
2881
+ # },
2882
+ # {
2883
+ # name: "agreement_start_date",
2884
+ # type: "TIMESTAMP",
2885
+ # is_required: false,
2886
+ # },
2887
+ # {
2888
+ # name: "agreement_end_date",
2889
+ # type: "TIMESTAMP",
2890
+ # is_required: false,
2891
+ # },
2892
+ # {
2893
+ # name: "shipping_instr_code",
2894
+ # type: "STRING",
2895
+ # is_required: false,
2896
+ # },
2897
+ # {
2898
+ # name: "payment_terms_code",
2899
+ # type: "STRING",
2900
+ # is_required: false,
2901
+ # },
2902
+ # {
2903
+ # name: "std_terms_agreement",
2904
+ # type: "STRING",
2905
+ # is_required: false,
2906
+ # },
2907
+ # {
2908
+ # name: "std_terms_agreement_ver",
2909
+ # type: "STRING",
2910
+ # is_required: false,
2911
+ # },
2912
+ # {
2913
+ # name: "agreement_number",
2914
+ # type: "STRING",
2915
+ # is_required: false,
2916
+ # },
2917
+ # {
2918
+ # name: "source",
2919
+ # type: "STRING",
2920
+ # is_required: false,
2921
+ # },
2922
+ # {
2923
+ # name: "source_update_dttm",
2924
+ # type: "TIMESTAMP",
2925
+ # is_required: false,
2926
+ # },
2927
+ # {
2928
+ # name: "source_event_id",
2929
+ # type: "STRING",
2930
+ # is_required: false,
2931
+ # },
2932
+ # {
2933
+ # name: "db_creation_dttm",
2934
+ # type: "TIMESTAMP",
2935
+ # is_required: false,
2936
+ # },
2937
+ # {
2938
+ # name: "db_updation_dttm",
2939
+ # type: "TIMESTAMP",
2940
+ # is_required: false,
2941
+ # },
2942
+ # ],
2943
+ # },
2944
+ # },
2945
+ # }
2946
+ #
2947
+ # @example Example: Update description of an existing custom dataset
2948
+ #
2949
+ # resp = client.update_data_lake_dataset({
2950
+ # name: "my_dataset",
2951
+ # description: "This is an updated custom dataset",
2952
+ # instance_id: "1877dd20-dee9-4639-8e99-cb67acf21fe5",
2953
+ # namespace: "default",
2954
+ # })
2955
+ #
2956
+ # resp.to_h outputs the following:
2957
+ # {
2958
+ # dataset: {
2959
+ # name: "my_dataset",
2960
+ # arn: "arn:aws:scn:us-east-1:012345678910:instance/1877dd20-dee9-4639-8e99-cb67acf21fe5/namespaces/default/datasets/my_dataset",
2961
+ # created_time: Time.parse(1727116807.751),
2962
+ # description: "This is an updated custom dataset",
2963
+ # instance_id: "1877dd20-dee9-4639-8e99-cb67acf21fe5",
2964
+ # last_modified_time: Time.parse(1727117453.568),
2965
+ # namespace: "default",
2966
+ # schema: {
2967
+ # name: "MyDataset",
2968
+ # fields: [
2969
+ # {
2970
+ # name: "id",
2971
+ # type: "INT",
2972
+ # is_required: true,
2973
+ # },
2974
+ # {
2975
+ # name: "description",
2976
+ # type: "STRING",
2977
+ # is_required: true,
2978
+ # },
2979
+ # {
2980
+ # name: "price",
2981
+ # type: "DOUBLE",
2982
+ # is_required: false,
2983
+ # },
2984
+ # {
2985
+ # name: "creation_time",
2986
+ # type: "TIMESTAMP",
2987
+ # is_required: false,
2988
+ # },
2989
+ # ],
2990
+ # },
2991
+ # },
2992
+ # }
2993
+ #
2994
+ # @example Request syntax with placeholder values
2995
+ #
2996
+ # resp = client.update_data_lake_dataset({
2997
+ # instance_id: "UUID", # required
2998
+ # namespace: "DataLakeDatasetNamespace", # required
2999
+ # name: "DataLakeDatasetName", # required
3000
+ # description: "DataLakeDatasetDescription",
3001
+ # })
3002
+ #
3003
+ # @example Response structure
3004
+ #
3005
+ # resp.dataset.instance_id #=> String
3006
+ # resp.dataset.namespace #=> String
3007
+ # resp.dataset.name #=> String
3008
+ # resp.dataset.arn #=> String
3009
+ # resp.dataset.schema.name #=> String
3010
+ # resp.dataset.schema.fields #=> Array
3011
+ # resp.dataset.schema.fields[0].name #=> String
3012
+ # resp.dataset.schema.fields[0].type #=> String, one of "INT", "DOUBLE", "STRING", "TIMESTAMP"
3013
+ # resp.dataset.schema.fields[0].is_required #=> Boolean
3014
+ # resp.dataset.description #=> String
3015
+ # resp.dataset.created_time #=> Time
3016
+ # resp.dataset.last_modified_time #=> Time
3017
+ #
3018
+ # @see http://docs.aws.amazon.com/goto/WebAPI/supplychain-2024-01-01/UpdateDataLakeDataset AWS API Documentation
3019
+ #
3020
+ # @overload update_data_lake_dataset(params = {})
3021
+ # @param [Hash] params ({})
3022
+ def update_data_lake_dataset(params = {}, options = {})
3023
+ req = build_request(:update_data_lake_dataset, params)
3024
+ req.send_request(options)
3025
+ end
3026
+
872
3027
  # @!endgroup
873
3028
 
874
3029
  # @param params ({})
@@ -887,7 +3042,7 @@ module Aws::SupplyChain
887
3042
  tracer: tracer
888
3043
  )
889
3044
  context[:gem_name] = 'aws-sdk-supplychain'
890
- context[:gem_version] = '1.14.0'
3045
+ context[:gem_version] = '1.16.0'
891
3046
  Seahorse::Client::Request.new(handlers, context)
892
3047
  end
893
3048