teradataml 20.0.0.3__py3-none-any.whl → 20.0.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of teradataml might be problematic. Click here for more details.

Files changed (84) hide show
  1. teradataml/LICENSE-3RD-PARTY.pdf +0 -0
  2. teradataml/README.md +119 -0
  3. teradataml/_version.py +1 -1
  4. teradataml/analytics/analytic_function_executor.py +18 -6
  5. teradataml/analytics/byom/__init__.py +1 -1
  6. teradataml/analytics/sqle/__init__.py +4 -1
  7. teradataml/analytics/valib.py +18 -4
  8. teradataml/automl/__init__.py +51 -6
  9. teradataml/automl/data_preparation.py +56 -33
  10. teradataml/automl/data_transformation.py +58 -33
  11. teradataml/automl/feature_engineering.py +12 -5
  12. teradataml/automl/model_training.py +34 -13
  13. teradataml/common/__init__.py +1 -2
  14. teradataml/common/constants.py +64 -40
  15. teradataml/common/messagecodes.py +13 -3
  16. teradataml/common/messages.py +4 -1
  17. teradataml/common/sqlbundle.py +40 -10
  18. teradataml/common/utils.py +113 -39
  19. teradataml/common/warnings.py +11 -0
  20. teradataml/context/context.py +141 -17
  21. teradataml/data/amazon_reviews_25.csv +26 -0
  22. teradataml/data/byom_example.json +11 -0
  23. teradataml/data/docs/byom/docs/DataRobotPredict.py +2 -2
  24. teradataml/data/docs/byom/docs/DataikuPredict.py +40 -1
  25. teradataml/data/docs/byom/docs/H2OPredict.py +2 -2
  26. teradataml/data/docs/byom/docs/ONNXEmbeddings.py +242 -0
  27. teradataml/data/docs/byom/docs/ONNXPredict.py +2 -2
  28. teradataml/data/docs/byom/docs/PMMLPredict.py +2 -2
  29. teradataml/data/docs/sqle/docs_17_20/Shap.py +28 -6
  30. teradataml/data/docs/uaf/docs_17_20/DWT2D.py +4 -1
  31. teradataml/data/hnsw_alter_data.csv +5 -0
  32. teradataml/data/hnsw_data.csv +10 -0
  33. teradataml/data/jsons/byom/h2opredict.json +1 -1
  34. teradataml/data/jsons/byom/onnxembeddings.json +266 -0
  35. teradataml/data/jsons/sqle/17.20/TD_Shap.json +0 -1
  36. teradataml/data/jsons/sqle/20.00/TD_HNSW.json +296 -0
  37. teradataml/data/jsons/sqle/20.00/TD_HNSWPredict.json +206 -0
  38. teradataml/data/jsons/sqle/20.00/TD_HNSWSummary.json +32 -0
  39. teradataml/data/jsons/sqle/20.00/TD_KMeans.json +2 -2
  40. teradataml/data/jsons/sqle/20.00/TD_SMOTE.json +1 -1
  41. teradataml/data/jsons/sqle/20.00/TD_VectorDistance.json +5 -5
  42. teradataml/data/teradataml_example.json +8 -0
  43. teradataml/data/vectordistance_example.json +1 -1
  44. teradataml/dataframe/copy_to.py +8 -3
  45. teradataml/dataframe/data_transfer.py +11 -1
  46. teradataml/dataframe/dataframe.py +517 -121
  47. teradataml/dataframe/dataframe_utils.py +152 -20
  48. teradataml/dataframe/functions.py +26 -11
  49. teradataml/dataframe/setop.py +11 -6
  50. teradataml/dataframe/sql.py +2 -2
  51. teradataml/dbutils/dbutils.py +525 -129
  52. teradataml/hyperparameter_tuner/optimizer.py +12 -1
  53. teradataml/opensource/{sklearn/_sklearn_wrapper.py → _base.py} +317 -1011
  54. teradataml/opensource/_class.py +141 -17
  55. teradataml/opensource/{constants.py → _constants.py} +7 -3
  56. teradataml/opensource/_lightgbm.py +52 -53
  57. teradataml/opensource/_sklearn.py +1008 -0
  58. teradataml/opensource/_wrapper_utils.py +5 -5
  59. teradataml/options/__init__.py +47 -15
  60. teradataml/options/configure.py +103 -25
  61. teradataml/options/display.py +13 -2
  62. teradataml/plot/axis.py +47 -8
  63. teradataml/plot/figure.py +33 -0
  64. teradataml/plot/plot.py +63 -13
  65. teradataml/scriptmgmt/UserEnv.py +2 -2
  66. teradataml/scriptmgmt/lls_utils.py +63 -26
  67. teradataml/store/__init__.py +1 -2
  68. teradataml/store/feature_store/feature_store.py +102 -7
  69. teradataml/table_operators/Apply.py +32 -18
  70. teradataml/table_operators/Script.py +3 -1
  71. teradataml/table_operators/TableOperator.py +3 -1
  72. teradataml/utils/dtypes.py +47 -0
  73. teradataml/utils/internal_buffer.py +18 -0
  74. teradataml/utils/validators.py +68 -9
  75. {teradataml-20.0.0.3.dist-info → teradataml-20.0.0.4.dist-info}/METADATA +123 -2
  76. {teradataml-20.0.0.3.dist-info → teradataml-20.0.0.4.dist-info}/RECORD +79 -75
  77. teradataml/data/SQL_Fundamentals.pdf +0 -0
  78. teradataml/libaed_0_1.dylib +0 -0
  79. teradataml/libaed_0_1.so +0 -0
  80. teradataml/opensource/sklearn/__init__.py +0 -0
  81. teradataml/store/vector_store/__init__.py +0 -1586
  82. {teradataml-20.0.0.3.dist-info → teradataml-20.0.0.4.dist-info}/WHEEL +0 -0
  83. {teradataml-20.0.0.3.dist-info → teradataml-20.0.0.4.dist-info}/top_level.txt +0 -0
  84. {teradataml-20.0.0.3.dist-info → teradataml-20.0.0.4.dist-info}/zip-safe +0 -0
@@ -0,0 +1,26 @@
1
+ "rev_id","aid","rev_name","helpful","rev_text","rating","prodsummary","unixrevtime","revtime"
2
+ A10000012B7CGYKOMPQ4L,"000100039X",Adam,"[0, 0]",Spiritually and mentally inspiring! A book that allows you to question your morals and will help you discover who you really are!,5.00,Wonderful!,1355616000,"12 16, 2012"
3
+ A2KU9IU07LOJS1,"000100039X",Amazon Customer,"[0, 0]",This book has been a classic for many years. It has so much wisdom in it that it can be read numerous times and new things will come out each time. My favorite chapter is the one on children.,5.00,Great classic that everyone should read,1384905600,"11 20, 2013"
4
+ A19N3FCQCLJYUA,"000100039X",Amazon Customer,"[1, 1]","I discovered The Prophet fifty years ago in college and have read it dozens of times since then. Now, in addition to my beat up hard copy, I have a portable e-copy. If you were looking for an example of an intelligently designed (pun intended) book of spiritual guidance, this would be it. It doesn""t care how you picture, name or define "God" or whether you give him a gender and a personality. It just cuts to the heart of how to live and how to relate to others. If Jesus and the Buddha teamed up to write a book, it might come out like this.When I first read it, I found some passages difficult to grasp. Looking back, I now think that it was not because they were hard to understand, but because I did not want to hear gently spoken, intelligently phrased ideas that contradicted my own. Now, if I could have only one book, this would probably be it.",5.00,A book everyone "should" read,1358899200,"01 23, 2013"
5
+ A5E9TSD20U9PR,"000100039X",April,"[0, 0]","For those who don""t know Gibran, get to know his work. The Prophet is a must read. His outlook on life is truly an inspiring guide on how to approach major life decisions. This is a relationship with a man""s work you won""t regret having.",5.00,Just beautiful.,1377475200,"08 26, 2013"
6
+ A1BM81XB4QHOA3,"000100039X","""Ahoro Blethends """"Seriously""""""","[0, 0]","This book provides a reflection that you can apply to your own life.And, a way for you to try and assess whether you are truly doing the right thing and making the most of your short time on this plane.",5.00,Must Read for Life Afficianados,1390003200,"01 18, 2014"
7
+ A26GKZPS079GFF,"000100039X",Areej,"[2, 3]","I would have to say that this is the best book I""ve ever read.. I could feel every word deep in my heart everytime, of the many times I""ve read it! I would never get enough of it! its a treasure..",5.00,Touches my heart.. again and.. again...,982972800,"02 24, 2001"
8
+ A1MOSTXNIO5MPJ,"000100039X",Alan Krug,"[0, 0]","I first read THE PROPHET in college back in the 60""s. The book had a revival as did anything metaphysical in the turbulent 60""s. It had a profound effect on me and became a book I always took with me. After graduation I joined the Peace Corps and during stressful training in country (Liberia) at times of illness and the night before I left, this book gave me great comfort. I read it before I married, just before and again after my children were born and again after two near fatal illnesses. I am always amazed that there is a chapter that reaches out to you, grabs you and offers both comfort and hope for the future.Gibran offers timeless insights and love with each word. I think that we as a nation should read AND learn the lessons here. It is definitely a time for thought and reflection this book could guide us through.",5.00,Timeless for every good and bad time in your life.,1317081600,"09 27, 2011"
9
+ A1TT4CY55WLHAR,"000100039X",anonymous,"[0, 0]","I have the 1972 version, bought in 1974. The 1972 version originally had a dust jacket but my dust jacket is long gone. This particular rendition has had many re-printings, for a reason: it""s very popular. The textured paper, old-style typography, and leather cover are better than a plain-old paperback. The size is diminutive, which is perfect for this book.",5.00,"textured paper, old-style typography, and leather cover",1342396800,"07 16, 2012"
10
+ A3FFNE1DR5SI1W,"000100039X",A. Morelli,"[1, 1]","Can""t say enough about Kahlil Gibran""s work among this piece. Everybody in the whole world should read this! There is almost too much to take in, really appreciate and put towards our daily lives. In my opinion, it is just one of the most beautiful literature pieces ever written. Would recommend to as a gift for anyone spiritual/poetic/philosophy/educational piece or just something to enjoy here and there. 5 stars for sure!",5.00,phenomenal piece of literature!,1340755200,"06 27, 2012"
11
+ A1340OFLZBW5NG,"000100039X",Amazon Customer,"[0, 0]",I LOVE this book... his writing seems to just flow from page to page. I get something different from this book each time I read it..,5.00,Perhaps the greatest book that I have ever read,1231977600,"01 15, 2009"
12
+ A29TRDMK51GKZR,"000100039X",Alpine Plume,"[0, 0]","Deep, moving dramatic verses of the heart and soul.Truths of ancient wisdom from a true and romantic poet.Relevant for all eternity.",5.00,Such Beauty,1383436800,"11 3, 2013"
13
+ A3FI0744PG1WYG,"000100039X","""Always Reading """"tkm""""""","[0, 0]","This is a timeless classic. Over the years I""ve given it as a gift more times than I can count, and will continue to do so. Addresses real life issues in a beautiful way and makes us reexamine our own attitude about how we see what happens in our lives. So easy to read over and over.",5.00,The Prophet,1390953600,"01 29, 2014"
14
+ A2XQ5LZHTD4AFT,"000100039X",Alaturka,"[7, 9]","A timeless classic. It is a very demanding and assuming title, but Gibran backs it up with some excellent style and content. If he had the means to publish it a century or two earlier, he could have inspired a new religion.From the mouth of an old man about to sail away to a far away destination, we hear the wisdom of life and all important aspects of it. It is a messege. A guide book. A Sufi sermon. Much is put in perspective without any hint of a dogma. There is much that hints at his birth place, Lebanon where many of the old prophets walked the Earth and where this book project first germinated most likely.Probably becuase it was written in English originally, the writing flows, it is pleasant to read, and the charcoal drawings of the author decorating the pages is a plus. I loved the cover.",5.00,A Modern Rumi,1033948800,"10 7, 2002"
15
+ A2LBBQHYLEHM7P,"000100039X","""Amazon Customer """"Full Frontal Nerdity""""""","[0, 0]","An amazing work. Realizing extensive use of Biblical imagery and sentence structure, "The Prophet" by Khalil Gibran is a literary classic. Influencing the Free Love movement of the 1960""s, Gibran""s master work explores themes of love, longing and loss.",5.00,A Modern Classic,1379808000,"09 22, 2013"
16
+ AENNW2G826191,"000100039X",Ashish A,"[1, 4]","Its a thin book, very readable and has interesting 1-2 page thoughts on various entities like anger, children, religion, speech, silence and its COOL.........reading. Ofcourse if one needs to imbibe the thoughts of the author, it has to be consumed slowly and perhaps revisited but leaves you pretty heady and clear about certain things.",3.00,Good Read,963446400,"07 13, 2000"
17
+ A2X4HE21JTAL98,"000100039X",Antiquarian,"[3, 5]","Anything I""ve read by Gibran is, in my mind, flawless. This, the most famous of his works, is no exception. It is simple, yet deep; honest and profound; moving and inspirational. Gibran""s work is one of a kind, and can be far more encouraging and moving than any self-help program or therapy or anything like that. The poetic style, the aphorisms, the parables, the almost biblical feel, are all just what over-worked, over-stressed, modern and spiritually starved worldly people need.",5.00,Flawless,1132099200,"11 16, 2005"
18
+ A3V1MKC2BVWY48,"000100039X",Alex Dawson,"[0, 0]","Reading this made my mind feel like a still pool of water, cool and quiet in a mossy grotto. It""s direct and simple wisdom has a depth of complexity that takes a quiet day to sink in, leaving you at peace. It is best to set time aside for it, relax, absorb, and let it softly clear your mind.",5.00,This book will bring you peace,1390780800,"01 27, 2014"
19
+ A1KQ80Y692CDOI,"000100039X",Atown,"[2, 9]","I read this about a year ago and can""t recall a great deal of the book. From what I do recall it was like a poem all the way through. While the writing was beautiful, I found it ambiguous and befuddled with meaning that I could not identify with. When Gibran speaks of God, I cannot identify because I have since abandoned those philosophies. It is thus difficult to revisit them in this book. I have the feeling a may have missed something great about this book. Indeed, I pulled wisdom from parts, but rather than go back and read it again, for now, perhaps I will move on to another of the many books out there that are enlightening and worth reading. Someday, I would like to read this again and dig deeper.",2.00,Eloquent,1206057600,"03 21, 2008"
20
+ AUTNO7VDY4H4A,"000100039X",Austin guy,"[0, 0]","Loved this book since first I read it, years gone by. Purchased this copy for a friend who has not ever read Gibran.",5.00,"A great book, buying it for a friend.",1371427200,"06 17, 2013"
21
+ A2WVHIRDMLM82E,"000100039X",Amazon Customer,"[0, 0]","This book has so much you can take out of it to use in your real life. Amazing, and one of my favorite reads of all time.",5.00,Amazing,1394928000,"03 16, 2014"
22
+ A2I35JB67U20C0,"000100039X",Amazon Customer,"[0, 0]","When I was in college in the 70""s this book had a revival and I did not read it then. Recently a friend (who is 90) and I were talking about work and she said: "work is love made visible," and told me it was from the Prophet. I though that was so beautiful I got the book and was not disappointed.You see, if you have ideas and you do not realize them, then they are nothing, and if your ideas do not come from love and joy, then they are bitter and what they produce will be bitter, but if they flow from love and joy, then their realization will be love and joy, thus work is love made visible.He says it WAY better that I do and says much more in just a few paragraphs. But as you can see from the example, what he says is not religious, but positive ways of looking at things. Since I rediscoverd the book I have given away many copies and everyone I have given it to sincerly thanked me.",5.00,Everyone should have this book,983318400,"02 28, 2001"
23
+ A12387207U8U24,"000100039X",Alex,"[0, 0]","As you read, Gibran""s poetry brings spiritual and visual beauty to life within you. Gibran is justly famous for rich metaphors that brilliantly highlight the pursuit of Truth and Goodness amidst all the darkness and light of human nature.",5.00,Graet Work,1206662400,"03 28, 2008"
24
+ A2S166WSCFIFP5,"000100039X","""adead_poet@hotmail.com """"adead_poet@hotmail.com""""""","[0, 2]","This is one my must have books. It is a masterpiece of spirituality. I""ll be the first to admit, its literary quality isn""t much. It is rather simplistically written, but the message behind it is so powerful that you have to read it. It will take you to enlightenment.",5.00,close to god,1071100800,"12 11, 2003"
25
+ A27ZH1AQORJ1L,"000100039X","""anybody else or """"amanuet""""""","[3, 3]","This book is everything that is simple, delicate, true, and beautiful.I have read few books so touching and enlightening; "The Prophet" is a true masterpiece that has that feeling of ancient wisdom in it. The wisdom of the text is gentle, yet insistent, it lets you understand things you""ve always known.My feelings defy description.",5.00,Enchanting,1066003200,"10 13, 2003"
26
+ ARDQ9KNB8K22N,"000100039X",Anwar,"[1, 1]","Cool book, I really like the quality of the production. Black clothbound with gold embossing and nice paper, looks to be cotton rag. I am proud to include it in my collection. Intrigueing story full of the sort of passion that does not cloud the mind or divert truth but instead is revealing and living. Very readable, the stories are short and highly economic so that one is likely to read for only a minute or two until something insightful is revealed. It is full of timeless truths which are of lasting value to the reader and inform life. The language and delivery is familiar yet of a quality that is penetrating in a similar way as it might be listening to a surmon by Jesus. I give it 5 stars. I considered giving only 4 stars because the format is fairly predictable but I have decided that this quality is actually a strength as one can pick up the book at any point and continue until the end...and then start over for that matter. Excellent for busy people or commuters!",5.00,"""""""The Prophet"""" is cool""",1329264000,"02 15, 2012"
@@ -14,5 +14,16 @@
14
14
  "petal_length" : "float",
15
15
  "petal_width" : "float",
16
16
  "species": "integer"
17
+ },
18
+ "amazon_reviews_25": {
19
+ "rev_id": "VARCHAR(64000)",
20
+ "aid": "VARCHAR(64000)",
21
+ "rev_name": "VARCHAR(64000)",
22
+ "helpful": "VARCHAR(64000)",
23
+ "rev_text": "VARCHAR(64000)",
24
+ "rating": "DECIMAL(10,2)",
25
+ "prodsummary": "VARCHAR(64000)",
26
+ "unixrevtime": "BIGINT",
27
+ "revtime": "VARCHAR(64000)"
17
28
  }
18
29
  }
@@ -23,7 +23,7 @@ def DataRobotPredict(modeldata=None, newdata=None, accumulate=None, model_output
23
23
  Required Argument.
24
24
  Specifies the name(s) of input teradataml DataFrame column(s) to
25
25
  copy to the output.
26
- Types: str OR list of Strings (str)
26
+ Types: str OR list of Strings (str) OR Feature OR list of Features
27
27
 
28
28
  model_output_fields:
29
29
  Optional Argument.
@@ -108,7 +108,7 @@ def DataRobotPredict(modeldata=None, newdata=None, accumulate=None, model_output
108
108
  for each argument that accepts teradataml DataFrame as
109
109
  input and can be accessed as:
110
110
  * "<input_data_arg_name>_partition_column" accepts str or
111
- list of str (Strings)
111
+ list of str (Strings) or PartitionKind
112
112
  * "<input_data_arg_name>_hash_column" accepts str or list
113
113
  of str (Strings)
114
114
  * "<input_data_arg_name>_order_column" accepts str or list
@@ -24,7 +24,7 @@ def DataikuPredict(modeldata=None, newdata=None, accumulate=None, model_output_f
24
24
  Specifies the name(s) of input teradataml DataFrame column(s) to
25
25
  copy to the output. By default, the function copies all input
26
26
  teradataml DataFrame columns to the output.
27
- Types: str OR list of Strings (str)
27
+ Types: str OR list of Strings (str) OR Feature OR list of Features
28
28
 
29
29
  model_output_fields:
30
30
  Optional Argument.
@@ -86,6 +86,45 @@ def DataikuPredict(modeldata=None, newdata=None, accumulate=None, model_output_f
86
86
  Default Value: False
87
87
  Types: bool
88
88
 
89
+ **generic_arguments:
90
+ Specifies the generic keyword arguments SQLE functions accept. Below
91
+ are the generic keyword arguments:
92
+ persist:
93
+ Optional Argument.
94
+ Specifies whether to persist the results of the
95
+ function in a table or not. When set to True,
96
+ results are persisted in a table; otherwise,
97
+ results are garbage collected at the end of the
98
+ session.
99
+ Default Value: False
100
+ Types: bool
101
+
102
+ volatile:
103
+ Optional Argument.
104
+ Specifies whether to put the results of the
105
+ function in a volatile table or not. When set to
106
+ True, results are stored in a volatile table,
107
+ otherwise not.
108
+ Default Value: False
109
+ Types: bool
110
+
111
+ Function allows the user to partition, hash, order or local
112
+ order the input data. These generic arguments are available
113
+ for each argument that accepts teradataml DataFrame as
114
+ input and can be accessed as:
115
+ * "<input_data_arg_name>_partition_column" accepts str or
116
+ list of str (Strings) or PartitionKind
117
+ * "<input_data_arg_name>_hash_column" accepts str or list
118
+ of str (Strings)
119
+ * "<input_data_arg_name>_order_column" accepts str or list
120
+ of str (Strings)
121
+ * "local_order_<input_data_arg_name>" accepts boolean
122
+ Note:
123
+ These generic arguments are supported by teradataml if
124
+ the underlying SQL Engine function supports, else an
125
+ exception is raised.
126
+
127
+
89
128
  RETURNS:
90
129
  Instance of DataikuPredict.
91
130
  Output teradataml DataFrame can be accessed using attribute
@@ -38,7 +38,7 @@ def H2OPredict(modeldata=None, newdata=None, accumulate=None, model_output_field
38
38
  Required Argument.
39
39
  Specifies the name(s) of input teradataml DataFrame column(s)
40
40
  to copy to the output DataFrame.
41
- Types: str OR list of Strings (str)
41
+ Types: str OR list of Strings (str) OR Feature OR list of Features
42
42
 
43
43
  model_output_fields:
44
44
  Optional Argument.
@@ -147,7 +147,7 @@ def H2OPredict(modeldata=None, newdata=None, accumulate=None, model_output_field
147
147
  for each argument that accepts teradataml DataFrame as
148
148
  input and can be accessed as:
149
149
  * "<input_data_arg_name>_partition_column" accepts str or
150
- list of str (Strings)
150
+ list of str (Strings) or PartitionKind
151
151
  * "<input_data_arg_name>_hash_column" accepts str or list
152
152
  of str (Strings)
153
153
  * "<input_data_arg_name>_order_column" accepts str or list
@@ -0,0 +1,242 @@
1
+ def ONNXEmbeddings(newdata=None, modeldata=None, tokenizerdata=None, accumulate=None, model_output_tensor=None,
2
+ encode_max_length=512, show_model_properties=False, output_column_prefix="emb_",
3
+ output_format="VARBYTE(3072)", overwrite_cached_models="false", is_debug=False,
4
+ enable_memory_check=False, **generic_arguments):
5
+ """
6
+ DESCRIPTION:
7
+ The ONNXEmbeddings() function is used to calculate embeddings values in
8
+ Vantage with a HuggingFace model that has been created outside Vantage
9
+ and exported to Vantage using ONNX format.
10
+
11
+ PARAMETERS:
12
+ newdata:
13
+ Required Argument.
14
+ Specifies the input teradataml DataFrame that contains
15
+ the data to be scored.
16
+ Types: teradataml DataFrame
17
+
18
+ modeldata:
19
+ Required Argument.
20
+ Specifies the model teradataml DataFrame to be used for
21
+ scoring.
22
+ Note:
23
+ * Use `retrieve_byom()` to get the teradataml DataFrame that contains the model.
24
+ Types: teradataml DataFrame
25
+
26
+ tokenizerdata:
27
+ Required Argument.
28
+ Specifies the tokenizer teradataml DataFrame
29
+ which contains the tokenizer json file.
30
+ Types: teradataml DataFrame
31
+
32
+ accumulate:
33
+ Required Argument.
34
+ Specifies the name(s) of input teradataml DataFrame column(s) to
35
+ copy to the output. By default, the function copies all input
36
+ teradataml DataFrame columns to the output.
37
+ Types: str OR list of Strings (str) OR Feature OR list of Features
38
+
39
+ model_output_tensor:
40
+ Required Argument.
41
+ Specifies the column of the model's possible output fields
42
+ that the user wants to calculate and output.
43
+ Types: str
44
+
45
+ encode_max_length:
46
+ Optional Argument.
47
+ Specifies the maximum length of the tokenizer output token
48
+ encodings(only applies for models with symbolic dimensions).
49
+ Default Value: 512
50
+ Types: int
51
+
52
+ show_model_properties:
53
+ Optional Argument.
54
+ Specifies the default or expanded "model_input_fields_map" based on
55
+ input model for defaults or "model_input_fields_map" for expansion.
56
+ Default Value: False
57
+ Types: bool
58
+
59
+ output_column_prefix:
60
+ Optional Argument.
61
+ Specifies the column prefix for each of the output columns
62
+ when using float32 "output_format".
63
+ Default Value: "emb_"
64
+ Types: str
65
+
66
+ output_format:
67
+ Optional Argument.
68
+ Specifies the output format for the model embeddings output.
69
+ Default Value: "VARBYTE(3072)"
70
+ Types: str
71
+
72
+ overwrite_cached_models:
73
+ Optional Argument.
74
+ Specifies the model name that needs to be removed from the cache.
75
+ When a model loaded into the memory of the node fits in the cache,
76
+ it stays in the cache until being evicted to make space for another
77
+ model that needs to be loaded. Therefore, a model can remain in the
78
+ cache even after the completion of function execution. Other functions
79
+ that use the same model can use it, saving the cost of reloading it
80
+ into memory. User should overwrite a cached model only when it is updated,
81
+ to make sure that the Predict function uses the updated model instead
82
+ of the cached model.
83
+ Note:
84
+ Do not use the "overwrite_cached_models" argument except when user
85
+ is trying to replace a previously cached model. Using the argument
86
+ in other cases, including in concurrent queries or multiple times
87
+ within a short period of time lead to an OOM error.
88
+ Default Value: "false"
89
+ Permitted Values: true, t, yes, y, 1, false, f, no, n, 0, *,
90
+ current_cached_model
91
+ Types: str
92
+
93
+ is_debug:
94
+ Optional Argument.
95
+ Specifies whether debug statements are added to a trace table or not.
96
+ When set to True, debug statements are added to a trace table that must
97
+ be created beforehand.
98
+ Notes:
99
+ * Only available with BYOM version 3.00.00.02 and later.
100
+ * To save logs for debugging, user can create an error log by using
101
+ the is_debug=True parameter in the predict functions.
102
+ A database trace table is used to collect this information which
103
+ does impact performance of the function, so using small data input
104
+ sizes is recommended.
105
+ * To generate this log, user must do the following:
106
+ 1. Create a global trace table with columns vproc_ID BYTE(2),
107
+ Sequence INTEGER, Trace_Output VARCHAR(31000)
108
+ 2. Turn on session function tracing:
109
+ SET SESSION FUNCTION TRACE USING '' FOR TABLE <trace_table_name_created_in_step_1>;
110
+ 3. Execute function with "is_debug" set to True.
111
+ 4. Debug information is logged to the table created in step 1.
112
+ 5. To turn off the logging, either disconnect from the session or
113
+ run following SQL:
114
+ SET SESSION FUNCTION TRACE OFF;
115
+ The trace table is temporary and the information is deleted if user
116
+ logs off from the session. If long term persistence is necessary,
117
+ user can copy the table to a permanent table before leaving the
118
+ session.
119
+ Default Value: False
120
+ Types: bool
121
+
122
+ enable_memory_check:
123
+ Optional Argument.
124
+ Specifies whether there is enough native memory for large models.
125
+ Default Value: True
126
+ Types: bool
127
+
128
+ **generic_arguments:
129
+ Specifies the generic keyword arguments SQLE functions accept. Below
130
+ are the generic keyword arguments:
131
+ persist:
132
+ Optional Argument.
133
+ Specifies whether to persist the results of the
134
+ function in a table or not. When set to True,
135
+ results are persisted in a table; otherwise,
136
+ results are garbage collected at the end of the
137
+ session.
138
+ Default Value: False
139
+ Types: bool
140
+
141
+ volatile:
142
+ Optional Argument.
143
+ Specifies whether to put the results of the
144
+ function in a volatile table or not. When set to
145
+ True, results are stored in a volatile table,
146
+ otherwise not.
147
+ Default Value: False
148
+ Types: bool
149
+
150
+ Function allows the user to partition, hash, order or local
151
+ order the input data. These generic arguments are available
152
+ for each argument that accepts teradataml DataFrame as
153
+ input and can be accessed as:
154
+ * "<input_data_arg_name>_partition_column" accepts str or
155
+ list of str (Strings) or PartitionKind
156
+ * "<input_data_arg_name>_hash_column" accepts str or list
157
+ of str (Strings)
158
+ * "<input_data_arg_name>_order_column" accepts str or list
159
+ of str (Strings)
160
+ * "local_order_<input_data_arg_name>" accepts boolean
161
+ Note:
162
+ These generic arguments are supported by teradataml if
163
+ the underlying SQL Engine function supports, else an
164
+ exception is raised.
165
+
166
+ RETURNS:
167
+ Instance of ONNXEmbeddings.
168
+ Output teradataml DataFrame can be accessed using attribute
169
+ references, such as ONNXEmbeddings.<attribute_name>.
170
+ Output teradataml DataFrame attribute name is:
171
+ result
172
+
173
+
174
+ RAISES:
175
+ TeradataMlException, TypeError, ValueError
176
+
177
+
178
+ EXAMPLES:
179
+ # Notes:
180
+ # 1. Get the connection to Vantage to execute the function.
181
+ # 2. One must import the required functions mentioned in
182
+ # the example from teradataml.
183
+ # 3. Function will raise error if not supported on the Vantage
184
+ # user is connected to.
185
+ # 4. To execute BYOM functions, set 'configure.byom_install_location' to the
186
+ # database name where BYOM functions are installed.
187
+
188
+ # Import required libraries / functions.
189
+ import os, teradataml
190
+ from teradataml import get_connection, DataFrame
191
+ from teradataml import save_byom, retrieve_byom, load_example_data
192
+ from teradataml import configure, display_analytic_functions, execute_sql
193
+
194
+ # Load example data.
195
+ load_example_data("byom", "amazon_reviews_25")
196
+
197
+ # Create teradataml DataFrame objects.
198
+ amazon_reviews_25 = DataFrame.from_table("amazon_reviews_25")
199
+
200
+ # Assigning txt column name to rev_txt column.
201
+ amazon_reviews_25 = amazon_reviews_25.assign(txt=amazon_reviews_25.rev_text)
202
+
203
+ # Set install location of BYOM functions.
204
+ configure.byom_install_location = "td_mldb"
205
+
206
+ # Check the list of available analytic functions.
207
+ display_analytic_functions(type="BYOM")
208
+
209
+ # Retrieve model.
210
+ modeldata = retrieve_byom("bge-small-en-v1.5", table_name="onnx_models")
211
+ tokenizerdata = retrieve_byom("bge-small-en-v1.5", table_name="embeddings_tokenizers")
212
+
213
+ # Assigning tokenizer_id, tokenizer to model_id, model in embeddings_tokenizers.
214
+ tokenizerdata_a1 = tokenizerdata.assign(tokenizer_id=tokenizerdata.model_id)
215
+ tokenizerdata_a2 = tokenizerdata_a1.assign(tokenizer=tokenizerdata_a1.model)
216
+
217
+ # Example 1: Calculate embedding values in Vantage with a bge-small-en-v1.5
218
+ # model that has been created outside the Vantage by removing all
219
+ # the all cached models.
220
+ ONNXEmbeddings_out_1 = ONNXEmbeddings(modeldata=modeldata,
221
+ tokenizerdata=tokenizerdata_a2.select(['tokenizer_id', 'tokenizer']),
222
+ newdata=amazon_reviews_25.select(["rev_id", "txt"]),
223
+ accumulate='rev_id',
224
+ model_output_tensor='sentence_embedding'
225
+ )
226
+
227
+ # Print the results.
228
+ print(ONNXEmbeddings_out_1.result)
229
+
230
+ # Example 2: Showcasing the model properties of bge-small-en-v1.5 model that has been
231
+ # created outside the Vantage by showcasing.
232
+ ONNXEmbeddings_out_2 = ONNXEmbeddings(modeldata=modeldata,
233
+ tokenizerdata=tokenizerdata_a2.select(['tokenizer_id', 'tokenizer']),
234
+ newdata=amazon_reviews_25.select(["rev_id", "txt"]),
235
+ accumulate='rev_id',
236
+ model_output_tensor='sentence_embedding',
237
+ show_model_properties=True
238
+ )
239
+
240
+ # Print the results.
241
+ print(ONNXEmbeddings_out_2.result)
242
+ """
@@ -40,7 +40,7 @@ def ONNXPredict(newdata=None, modeldata=None, accumulate=None, model_output_fiel
40
40
  Required Argument.
41
41
  Specifies the name(s) of input teradataml DataFrame column(s) to
42
42
  copy to the output.
43
- Types: str OR list of Strings (str)
43
+ Types: str OR list of Strings (str) OR Feature OR list of Features
44
44
 
45
45
  model_output_fields:
46
46
  Optional Argument.
@@ -146,7 +146,7 @@ def ONNXPredict(newdata=None, modeldata=None, accumulate=None, model_output_fiel
146
146
  for each argument that accepts teradataml DataFrame as
147
147
  input and can be accessed as:
148
148
  * "<input_data_arg_name>_partition_column" accepts str or
149
- list of str (Strings)
149
+ list of str (Strings) or PartitionKind
150
150
  * "<input_data_arg_name>_hash_column" accepts str or list
151
151
  of str (Strings)
152
152
  * "<input_data_arg_name>_order_column" accepts str or list
@@ -52,7 +52,7 @@ def PMMLPredict(newdata=None, modeldata=None, accumulate=None,
52
52
  Required Argument.
53
53
  Specifies the name(s) of input teradataml DataFrame column(s)
54
54
  to copy to the output DataFrame.
55
- Types: str OR list of Strings (str)
55
+ Types: str OR list of Strings (str) OR Feature OR list of Features
56
56
 
57
57
  model_output_fields:
58
58
  Optional Argument.
@@ -143,7 +143,7 @@ def PMMLPredict(newdata=None, modeldata=None, accumulate=None,
143
143
  for each argument that accepts teradataml DataFrame as
144
144
  input and can be accessed as:
145
145
  * "<input_data_arg_name>_partition_column" accepts str or
146
- list of str (Strings)
146
+ list of str (Strings) or PartitionKind
147
147
  * "<input_data_arg_name>_hash_column" accepts str or list
148
148
  of str (Strings)
149
149
  * "<input_data_arg_name>_order_column" accepts str or list
@@ -1,4 +1,4 @@
1
- def Shap(data = None, object = None, id_column=None, training_function = "TD_GLM",
1
+ def Shap(data = None, object = None, id_column=None, training_function = None,
2
2
  model_type = "Regression", input_columns = None, detailed = False,
3
3
  accumulate = None, num_parallel_trees = 1000, num_boost_rounds = 10,
4
4
  **generic_arguments):
@@ -29,7 +29,6 @@ def Shap(data = None, object = None, id_column=None, training_function = "TD_GLM
29
29
  training_function:
30
30
  Required Argument.
31
31
  Specifies the model type name.
32
- Default Value: "TD_GLM"
33
32
  Permitted Values: TD_GLM, TD_DECISIONFOREST, TD_XGBOOST
34
33
  Types: str
35
34
 
@@ -50,6 +49,9 @@ def Shap(data = None, object = None, id_column=None, training_function = "TD_GLM
50
49
  Optional Argument.
51
50
  Specifies whether to output detailed shap information about the
52
51
  forest trees.
52
+ Note:
53
+ * It is only supported for "TD_XGBOOST" and "TD_DECISIONFOREST"
54
+ training functions.
53
55
  Default Value: False
54
56
  Types: bool
55
57
 
@@ -151,10 +153,10 @@ def Shap(data = None, object = None, id_column=None, training_function = "TD_GLM
151
153
 
152
154
  # Example 1: Shap for classification model.
153
155
  XGBoost_out = XGBoost(data=iris_input,
154
- input_columns=['sepal_length', 'sepal_width', 'petal_length', 'petal_width'],
155
- response_column = 'species',
156
- model_type='Classification',
157
- iter_num=25)
156
+ input_columns=['sepal_length', 'sepal_width', 'petal_length', 'petal_width'],
157
+ response_column = 'species',
158
+ model_type='Classification',
159
+ iter_num=25)
158
160
 
159
161
  Shap_out = Shap(data=iris_input,
160
162
  object=XGBoost_out.result,
@@ -200,4 +202,24 @@ def Shap(data = None, object = None, id_column=None, training_function = "TD_GLM
200
202
 
201
203
  # Print the result DataFrame.
202
204
  print(Shap_out2.output_data)
205
+
206
+ # Example 3: Shap for GLM model.
207
+ from teradataml import GLM
208
+ GLM_out = GLM(data=transform_obj.result,
209
+ input_columns=['MedInc', 'HouseAge', 'AveRooms',
210
+ 'AveBedrms', 'Population', 'AveOccup',
211
+ 'Latitude', 'Longitude'],
212
+ response_column="MedHouseVal",
213
+ family="GAUSSIAN")
214
+
215
+ Shap_out3 = Shap(data=transform_obj.result,
216
+ object=GLM_out.result,
217
+ id_column='id',
218
+ training_function="TD_GLM",
219
+ model_type="Regression",
220
+ input_columns=['MedInc', 'HouseAge', 'AveRooms','AveBedrms', 'Population', 'AveOccup','Latitude', 'Longitude'],
221
+ detailed=False)
222
+
223
+ # Print the result DataFrame.
224
+ print(Shap_out3.output_data)
203
225
  """
@@ -202,7 +202,10 @@ def DWT2D(data1=None, data1_filter_expr=None, data2=None,
202
202
  data2_filter_expr=data2.id==1,
203
203
  input_fmt_input_mode="MANY2ONE")
204
204
 
205
- # Example 1: Perform discrete wavelet transform (DWT) for two-dimensional data
205
+ # Print the result DataFrame.
206
+ print(uaf_out.result)
207
+
208
+ # Example 2: Perform discrete wavelet transform (DWT) for two-dimensional data
206
209
  # using only one matrix as input and wavelet as 'haar'.
207
210
  uaf_out = DWT2D(data1=data1_matrix_df,
208
211
  wavelet='haar')
@@ -0,0 +1,5 @@
1
+ id,array_col
2
+ 10,"1,1"
3
+ 11,"2,2"
4
+ 12,"3,3"
5
+ 13,"4,4"
@@ -0,0 +1,10 @@
1
+ id,array_col
2
+ 1,"18,18"
3
+ 2,"19,19"
4
+ 3,"20,20"
5
+ 4,"55,55"
6
+ 5,"56,56"
7
+ 6,"57,57"
8
+ 7,"88,88"
9
+ 8,"89,89"
10
+ 9,"90,90"
@@ -142,7 +142,7 @@
142
142
  "rDescription": " Specifies the model type as 'DAI' or 'OpenSource' for H2O model prediction. ",
143
143
  "description": " Specifies the model type as 'DAI' or 'OpenSource' for H2O model prediction. ",
144
144
  "datatype": "STRING",
145
- "allowsLists": true,
145
+ "allowsLists": false,
146
146
  "rName": "model.type",
147
147
  "useInR": true,
148
148
  "rOrderNum": 6