azure_openai_client 0.0.1 → 0.0.2

Sign up to get free protection for your applications and to get access to all the features.
Files changed (107) hide show
  1. checksums.yaml +4 -4
  2. data/Gemfile +10 -0
  3. data/Gemfile.lock +76 -0
  4. data/README.md +50 -87
  5. data/Rakefile +10 -0
  6. data/azure_openai_client.gemspec +38 -0
  7. data/docs/ChatCompletionsCreate200Response.md +28 -0
  8. data/docs/ChatCompletionsCreate200ResponseChoicesInner.md +22 -0
  9. data/docs/ChatCompletionsCreate200ResponseChoicesInnerMessage.md +20 -0
  10. data/docs/ChatCompletionsCreate200ResponseUsage.md +22 -0
  11. data/docs/ChatCompletionsCreateRequest.md +38 -0
  12. data/docs/ChatCompletionsCreateRequestMessagesInner.md +22 -0
  13. data/docs/ChatCompletionsCreateRequestStop.md +49 -0
  14. data/docs/CompletionsCreate200Response.md +28 -0
  15. data/docs/CompletionsCreate200ResponseChoicesInner.md +24 -0
  16. data/docs/CompletionsCreate200ResponseChoicesInnerLogprobs.md +24 -0
  17. data/docs/CompletionsCreate200ResponseUsage.md +22 -0
  18. data/docs/CompletionsCreateRequest.md +52 -0
  19. data/docs/CompletionsCreateRequestPrompt.md +49 -0
  20. data/docs/CompletionsCreateRequestStop.md +49 -0
  21. data/docs/DefaultApi.md +238 -0
  22. data/docs/EmbeddingsCreate200Response.md +24 -0
  23. data/docs/EmbeddingsCreate200ResponseDataInner.md +22 -0
  24. data/docs/EmbeddingsCreate200ResponseUsage.md +20 -0
  25. data/docs/EmbeddingsCreateRequest.md +24 -0
  26. data/docs/EmbeddingsCreateRequestInput.md +49 -0
  27. data/docs/ErrorResponse.md +18 -0
  28. data/docs/ErrorResponseError.md +24 -0
  29. data/git_push.sh +57 -0
  30. data/inference.json +816 -0
  31. data/lib/azure_openai_client/api/default_api.rb +120 -93
  32. data/lib/azure_openai_client/api_client.rb +77 -75
  33. data/lib/azure_openai_client/api_error.rb +5 -5
  34. data/lib/azure_openai_client/configuration.rb +114 -22
  35. data/lib/azure_openai_client/models/{inline_response_200_2.rb → chat_completions_create200_response.rb} +34 -26
  36. data/lib/azure_openai_client/models/{inline_response_200_2_choices.rb → chat_completions_create200_response_choices_inner.rb} +28 -20
  37. data/lib/azure_openai_client/models/{inline_response_200_2_message.rb → chat_completions_create200_response_choices_inner_message.rb} +28 -20
  38. data/lib/azure_openai_client/models/{inline_response_200_2_usage.rb → chat_completions_create200_response_usage.rb} +28 -20
  39. data/lib/azure_openai_client/models/{chat_completions_body.rb → chat_completions_create_request.rb} +183 -37
  40. data/lib/azure_openai_client/models/{deploymentsdeploymentidchatcompletions_messages.rb → chat_completions_create_request_messages_inner.rb} +30 -22
  41. data/lib/azure_openai_client/models/chat_completions_create_request_stop.rb +105 -0
  42. data/lib/azure_openai_client/models/{inline_response_200.rb → completions_create200_response.rb} +34 -26
  43. data/lib/azure_openai_client/models/{inline_response_200_choices.rb → completions_create200_response_choices_inner.rb} +30 -22
  44. data/lib/azure_openai_client/models/{inline_response_200_logprobs.rb → completions_create200_response_choices_inner_logprobs.rb} +30 -22
  45. data/lib/azure_openai_client/models/{inline_response_200_usage.rb → completions_create200_response_usage.rb} +28 -20
  46. data/lib/azure_openai_client/models/{deploymentid_completions_body.rb → completions_create_request.rb} +58 -52
  47. data/lib/azure_openai_client/models/completions_create_request_prompt.rb +105 -0
  48. data/lib/azure_openai_client/models/completions_create_request_stop.rb +105 -0
  49. data/lib/azure_openai_client/models/{inline_response_200_1.rb → embeddings_create200_response.rb} +30 -22
  50. data/lib/azure_openai_client/models/{inline_response_200_1_data.rb → embeddings_create200_response_data_inner.rb} +28 -20
  51. data/lib/azure_openai_client/models/{inline_response_200_1_usage.rb → embeddings_create200_response_usage.rb} +26 -18
  52. data/lib/azure_openai_client/models/embeddings_create_request.rb +252 -0
  53. data/lib/azure_openai_client/models/embeddings_create_request_input.rb +105 -0
  54. data/lib/azure_openai_client/models/error_response.rb +21 -13
  55. data/lib/azure_openai_client/models/error_response_error.rb +27 -19
  56. data/lib/azure_openai_client/version.rb +5 -6
  57. data/lib/azure_openai_client.rb +23 -23
  58. data/openapi-codegen.sh +12 -0
  59. data/openapi_config.yaml +14 -0
  60. data/spec/api/default_api_spec.rb +21 -22
  61. data/spec/api_client_spec.rb +15 -16
  62. data/spec/configuration_spec.rb +3 -3
  63. data/spec/models/chat_completions_create200_response_choices_inner_message_spec.rb +44 -0
  64. data/spec/models/chat_completions_create200_response_choices_inner_spec.rb +46 -0
  65. data/spec/models/chat_completions_create200_response_spec.rb +64 -0
  66. data/spec/models/chat_completions_create200_response_usage_spec.rb +46 -0
  67. data/spec/models/chat_completions_create_request_messages_inner_spec.rb +50 -0
  68. data/spec/models/chat_completions_create_request_spec.rb +94 -0
  69. data/spec/models/chat_completions_create_request_stop_spec.rb +31 -0
  70. data/spec/models/completions_create200_response_choices_inner_logprobs_spec.rb +52 -0
  71. data/spec/models/completions_create200_response_choices_inner_spec.rb +52 -0
  72. data/spec/models/completions_create200_response_spec.rb +64 -0
  73. data/spec/models/completions_create200_response_usage_spec.rb +46 -0
  74. data/spec/models/completions_create_request_prompt_spec.rb +31 -0
  75. data/spec/models/completions_create_request_spec.rb +136 -0
  76. data/spec/models/completions_create_request_stop_spec.rb +31 -0
  77. data/spec/models/embeddings_create200_response_data_inner_spec.rb +46 -0
  78. data/spec/models/embeddings_create200_response_spec.rb +52 -0
  79. data/spec/models/embeddings_create200_response_usage_spec.rb +40 -0
  80. data/spec/models/embeddings_create_request_input_spec.rb +31 -0
  81. data/spec/models/embeddings_create_request_spec.rb +52 -0
  82. data/spec/models/error_response_error_spec.rb +12 -20
  83. data/spec/models/error_response_spec.rb +9 -17
  84. data/spec/spec_helper.rb +4 -5
  85. metadata +91 -77
  86. data/lib/azure_openai_client/models/deploymentid_embeddings_body.rb +0 -202
  87. data/lib/azure_openai_client/models/one_ofchat_completions_body_stop.rb +0 -198
  88. data/lib/azure_openai_client/models/one_ofdeploymentid_completions_body_prompt.rb +0 -198
  89. data/lib/azure_openai_client/models/one_ofdeploymentid_completions_body_stop.rb +0 -198
  90. data/spec/models/chat_completions_body_spec.rb +0 -102
  91. data/spec/models/deploymentid_completions_body_spec.rb +0 -144
  92. data/spec/models/deploymentid_embeddings_body_spec.rb +0 -36
  93. data/spec/models/deploymentsdeploymentidchatcompletions_messages_spec.rb +0 -58
  94. data/spec/models/inline_response_200_1_data_spec.rb +0 -54
  95. data/spec/models/inline_response_200_1_spec.rb +0 -60
  96. data/spec/models/inline_response_200_1_usage_spec.rb +0 -48
  97. data/spec/models/inline_response_200_2_choices_spec.rb +0 -54
  98. data/spec/models/inline_response_200_2_message_spec.rb +0 -52
  99. data/spec/models/inline_response_200_2_spec.rb +0 -72
  100. data/spec/models/inline_response_200_2_usage_spec.rb +0 -54
  101. data/spec/models/inline_response_200_choices_spec.rb +0 -60
  102. data/spec/models/inline_response_200_logprobs_spec.rb +0 -60
  103. data/spec/models/inline_response_200_spec.rb +0 -72
  104. data/spec/models/inline_response_200_usage_spec.rb +0 -54
  105. data/spec/models/one_ofchat_completions_body_stop_spec.rb +0 -36
  106. data/spec/models/one_ofdeploymentid_completions_body_prompt_spec.rb +0 -36
  107. data/spec/models/one_ofdeploymentid_completions_body_stop_spec.rb +0 -36
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 0a16f8557725c03a595a0917831440f10db8b079067bff26f101db47ce55bfcf
4
- data.tar.gz: d69ef18fd78150c8806034e6441028a4ac07a7c6ac0a01df023a1770673dd20f
3
+ metadata.gz: 379feaf10b0834ca74ac805a7418ade7e55432db2c612e2e55576c484eaa6495
4
+ data.tar.gz: a6682bb67eb79c92b83cf9e3b3c80d502b3a60b94fef61ee84c3f08f03163e10
5
5
  SHA512:
6
- metadata.gz: ce73995a7ce5ac1488d3e41e9432cc55b2b2c6ab80008dbcaa3a0060ea3fb2e3ee263a93bcb1335fb2a428d64a8adeaad93b1764a4c2a1a38d596442e4417106
7
- data.tar.gz: f56d3bf7714e0db8467f88788ed27df098461e98a509ad9c389dd30783d8abaa84d46276de5e8e02d2df51d4f385dbbffa9b5e0f54ed792cf3503d7a20d5aa81
6
+ metadata.gz: 1f0d01ca96b49e659895d5e8a5a77cd11e2f087afdb4616c4e196b4d3be297a543b99b4c16619d587251ff77f957c8327968bbee481ee34f92277e34af742f1c
7
+ data.tar.gz: f3c133d55d2f49941e1cf578894aeb539e00da1d201059e16eec309ae3535411cb8ef3b574d7d6367f0c6758bb10b4fd0894693eba1274a4f684035cd7a188e8
data/Gemfile ADDED
@@ -0,0 +1,10 @@
1
+ source 'https://rubygems.org'
2
+
3
+ gemspec
4
+
5
+ group :development, :test do
6
+ gem 'rake', '~> 13.0.1'
7
+ gem 'pry-byebug'
8
+ gem 'rubocop', '~> 1.35.1', require: false
9
+ gem 'rubocop-rspec', '~> 2.12.1', require: false
10
+ end
data/Gemfile.lock ADDED
@@ -0,0 +1,76 @@
1
+ PATH
2
+ remote: .
3
+ specs:
4
+ azure_openai_client (0.0.2)
5
+ typhoeus (~> 1.0, >= 1.0.1)
6
+
7
+ GEM
8
+ remote: https://rubygems.org/
9
+ specs:
10
+ ast (2.4.2)
11
+ byebug (11.1.3)
12
+ coderay (1.1.3)
13
+ diff-lcs (1.5.0)
14
+ ethon (0.16.0)
15
+ ffi (>= 1.15.0)
16
+ ffi (1.15.5)
17
+ json (2.6.3)
18
+ method_source (1.0.0)
19
+ parallel (1.23.0)
20
+ parser (3.2.2.1)
21
+ ast (~> 2.4.1)
22
+ pry (0.14.2)
23
+ coderay (~> 1.1)
24
+ method_source (~> 1.0)
25
+ pry-byebug (3.10.1)
26
+ byebug (~> 11.0)
27
+ pry (>= 0.13, < 0.15)
28
+ rainbow (3.1.1)
29
+ rake (13.0.6)
30
+ regexp_parser (2.8.0)
31
+ rexml (3.2.5)
32
+ rspec (3.12.0)
33
+ rspec-core (~> 3.12.0)
34
+ rspec-expectations (~> 3.12.0)
35
+ rspec-mocks (~> 3.12.0)
36
+ rspec-core (3.12.2)
37
+ rspec-support (~> 3.12.0)
38
+ rspec-expectations (3.12.3)
39
+ diff-lcs (>= 1.2.0, < 2.0)
40
+ rspec-support (~> 3.12.0)
41
+ rspec-mocks (3.12.5)
42
+ diff-lcs (>= 1.2.0, < 2.0)
43
+ rspec-support (~> 3.12.0)
44
+ rspec-support (3.12.0)
45
+ rubocop (1.35.1)
46
+ json (~> 2.3)
47
+ parallel (~> 1.10)
48
+ parser (>= 3.1.2.1)
49
+ rainbow (>= 2.2.2, < 4.0)
50
+ regexp_parser (>= 1.8, < 3.0)
51
+ rexml (>= 3.2.5, < 4.0)
52
+ rubocop-ast (>= 1.20.1, < 2.0)
53
+ ruby-progressbar (~> 1.7)
54
+ unicode-display_width (>= 1.4.0, < 3.0)
55
+ rubocop-ast (1.28.0)
56
+ parser (>= 3.2.1.0)
57
+ rubocop-rspec (2.12.1)
58
+ rubocop (~> 1.31)
59
+ ruby-progressbar (1.13.0)
60
+ typhoeus (1.4.0)
61
+ ethon (>= 0.9.0)
62
+ unicode-display_width (2.4.2)
63
+
64
+ PLATFORMS
65
+ arm64-darwin-22
66
+
67
+ DEPENDENCIES
68
+ azure_openai_client!
69
+ pry-byebug
70
+ rake (~> 13.0.1)
71
+ rspec (~> 3.6, >= 3.6.0)
72
+ rubocop (~> 1.35.1)
73
+ rubocop-rspec (~> 2.12.1)
74
+
75
+ BUNDLED WITH
76
+ 2.4.8
data/README.md CHANGED
@@ -4,11 +4,11 @@ AzureOpenaiClient - the Ruby gem for the Azure OpenAI Service API
4
4
 
5
5
  Azure OpenAI APIs for completions and search
6
6
 
7
- This SDK is automatically generated by the [Swagger Codegen](https://github.com/swagger-api/swagger-codegen) project:
7
+ This SDK is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
8
8
 
9
9
  - API version: 2023-05-15
10
- - Package version: 1.0.0
11
- - Build package: io.swagger.codegen.v3.generators.ruby.RubyClientCodegen
10
+ - Package version: 0.0.2
11
+ - Build package: org.openapitools.codegen.languages.RubyClientCodegen
12
12
 
13
13
  ## Installation
14
14
 
@@ -23,21 +23,22 @@ gem build azure_openai_client.gemspec
23
23
  Then either install the gem locally:
24
24
 
25
25
  ```shell
26
- gem install ./azure_openai_client-1.0.0.gem
26
+ gem install ./azure_openai_client-0.0.2.gem
27
27
  ```
28
- (for development, run `gem install --dev ./azure_openai_client-1.0.0.gem` to install the development dependencies)
28
+
29
+ (for development, run `gem install --dev ./azure_openai_client-0.0.2.gem` to install the development dependencies)
29
30
 
30
31
  or publish the gem to a gem hosting service, e.g. [RubyGems](https://rubygems.org/).
31
32
 
32
33
  Finally add this to the Gemfile:
33
34
 
34
- gem 'azure_openai_client', '~> 1.0.0'
35
+ gem 'azure_openai_client', '~> 0.0.2'
35
36
 
36
37
  ### Install from Git
37
38
 
38
- If the Ruby gem is hosted at a git repository: https://github.com/GIT_USER_ID/GIT_REPO_ID, then add the following in the Gemfile:
39
+ If the Ruby gem is hosted at a git repository: https://github.com/etsenake/azure-openai-client, then add the following in the Gemfile:
39
40
 
40
- gem 'azure_openai_client', :git => 'https://github.com/GIT_USER_ID/GIT_REPO_ID.git'
41
+ gem 'azure_openai_client', :git => 'https://github.com/etsenake/azure-openai-client.git'
41
42
 
42
43
  ### Include the Ruby code directly
43
44
 
@@ -50,86 +51,42 @@ ruby -Ilib script.rb
50
51
  ## Getting Started
51
52
 
52
53
  Please follow the [installation](#installation) procedure and then run the following code:
54
+
53
55
  ```ruby
54
56
  # Load the gem
55
57
  require 'azure_openai_client'
58
+
56
59
  # Setup authorization
57
60
  AzureOpenaiClient.configure do |config|
58
61
  # Configure API key authorization: apiKey
59
- config.api_key['api-key'] = 'YOUR API KEY'
62
+ config.api_key['apiKey'] = 'YOUR API KEY'
60
63
  # Uncomment the following line to set a prefix for the API key, e.g. 'Bearer' (defaults to nil)
61
- #config.api_key_prefix['api-key'] = 'Bearer'
64
+ # config.api_key_prefix['apiKey'] = 'Bearer'
62
65
 
63
66
  # Configure OAuth2 access token for authorization: bearer
64
67
  config.access_token = 'YOUR ACCESS TOKEN'
68
+ # Configure a proc to get access tokens in lieu of the static access_token configuration
69
+ config.access_token_getter = -> { 'YOUR TOKEN GETTER PROC' }
65
70
  end
66
71
 
67
72
  api_instance = AzureOpenaiClient::DefaultApi.new
68
- body = AzureOpenaiClient::ChatCompletionsBody.new # ChatCompletionsBody |
69
73
  deployment_id = 'deployment_id_example' # String |
70
- api_version = 'api_version_example' # String |
71
-
74
+ api_version = '2023-05-15' # String |
75
+ chat_completions_create_request = AzureOpenaiClient::ChatCompletionsCreateRequest.new({messages: [AzureOpenaiClient::ChatCompletionsCreateRequestMessagesInner.new({role: 'system', content: 'content_example'})]}) # ChatCompletionsCreateRequest |
72
76
 
73
77
  begin
74
78
  #Creates a completion for the chat message
75
- result = api_instance.chat_completions_create(body, deployment_id, api_version)
79
+ result = api_instance.chat_completions_create(deployment_id, api_version, chat_completions_create_request)
76
80
  p result
77
81
  rescue AzureOpenaiClient::ApiError => e
78
82
  puts "Exception when calling DefaultApi->chat_completions_create: #{e}"
79
83
  end
80
- # Setup authorization
81
- AzureOpenaiClient.configure do |config|
82
- # Configure API key authorization: apiKey
83
- config.api_key['api-key'] = 'YOUR API KEY'
84
- # Uncomment the following line to set a prefix for the API key, e.g. 'Bearer' (defaults to nil)
85
- #config.api_key_prefix['api-key'] = 'Bearer'
86
-
87
- # Configure OAuth2 access token for authorization: bearer
88
- config.access_token = 'YOUR ACCESS TOKEN'
89
- end
90
-
91
- api_instance = AzureOpenaiClient::DefaultApi.new
92
- body = AzureOpenaiClient::DeploymentidCompletionsBody.new # DeploymentidCompletionsBody |
93
- deployment_id = 'deployment_id_example' # String |
94
- api_version = 'api_version_example' # String |
95
-
96
-
97
- begin
98
- #Creates a completion for the provided prompt, parameters and chosen model.
99
- result = api_instance.completions_create(body, deployment_id, api_version)
100
- p result
101
- rescue AzureOpenaiClient::ApiError => e
102
- puts "Exception when calling DefaultApi->completions_create: #{e}"
103
- end
104
- # Setup authorization
105
- AzureOpenaiClient.configure do |config|
106
- # Configure API key authorization: apiKey
107
- config.api_key['api-key'] = 'YOUR API KEY'
108
- # Uncomment the following line to set a prefix for the API key, e.g. 'Bearer' (defaults to nil)
109
- #config.api_key_prefix['api-key'] = 'Bearer'
110
-
111
- # Configure OAuth2 access token for authorization: bearer
112
- config.access_token = 'YOUR ACCESS TOKEN'
113
- end
114
84
 
115
- api_instance = AzureOpenaiClient::DefaultApi.new
116
- body = nil # Hash |
117
- deployment_id = 'deployment_id_example' # String | The deployment id of the model which was deployed.
118
- api_version = 'api_version_example' # String |
119
-
120
-
121
- begin
122
- #Get a vector representation of a given input that can be easily consumed by machine learning models and algorithms.
123
- result = api_instance.embeddings_create(body, deployment_id, api_version)
124
- p result
125
- rescue AzureOpenaiClient::ApiError => e
126
- puts "Exception when calling DefaultApi->embeddings_create: #{e}"
127
- end
128
85
  ```
129
86
 
130
87
  ## Documentation for API Endpoints
131
88
 
132
- All URIs are relative to *https://{endpoint}/openai*
89
+ All URIs are relative to *https://your-resource-name.openai.azure.com/openai*
133
90
 
134
91
  Class | Method | HTTP request | Description
135
92
  ------------ | ------------- | ------------- | -------------
@@ -137,42 +94,48 @@ Class | Method | HTTP request | Description
137
94
  *AzureOpenaiClient::DefaultApi* | [**completions_create**](docs/DefaultApi.md#completions_create) | **POST** /deployments/{deployment-id}/completions | Creates a completion for the provided prompt, parameters and chosen model.
138
95
  *AzureOpenaiClient::DefaultApi* | [**embeddings_create**](docs/DefaultApi.md#embeddings_create) | **POST** /deployments/{deployment-id}/embeddings | Get a vector representation of a given input that can be easily consumed by machine learning models and algorithms.
139
96
 
97
+
140
98
  ## Documentation for Models
141
99
 
142
- - [AzureOpenaiClient::ChatCompletionsBody](docs/ChatCompletionsBody.md)
143
- - [AzureOpenaiClient::DeploymentidCompletionsBody](docs/DeploymentidCompletionsBody.md)
144
- - [AzureOpenaiClient::DeploymentidEmbeddingsBody](docs/DeploymentidEmbeddingsBody.md)
145
- - [AzureOpenaiClient::DeploymentsdeploymentidchatcompletionsMessages](docs/DeploymentsdeploymentidchatcompletionsMessages.md)
100
+ - [AzureOpenaiClient::ChatCompletionsCreate200Response](docs/ChatCompletionsCreate200Response.md)
101
+ - [AzureOpenaiClient::ChatCompletionsCreate200ResponseChoicesInner](docs/ChatCompletionsCreate200ResponseChoicesInner.md)
102
+ - [AzureOpenaiClient::ChatCompletionsCreate200ResponseChoicesInnerMessage](docs/ChatCompletionsCreate200ResponseChoicesInnerMessage.md)
103
+ - [AzureOpenaiClient::ChatCompletionsCreate200ResponseUsage](docs/ChatCompletionsCreate200ResponseUsage.md)
104
+ - [AzureOpenaiClient::ChatCompletionsCreateRequest](docs/ChatCompletionsCreateRequest.md)
105
+ - [AzureOpenaiClient::ChatCompletionsCreateRequestMessagesInner](docs/ChatCompletionsCreateRequestMessagesInner.md)
106
+ - [AzureOpenaiClient::ChatCompletionsCreateRequestStop](docs/ChatCompletionsCreateRequestStop.md)
107
+ - [AzureOpenaiClient::CompletionsCreate200Response](docs/CompletionsCreate200Response.md)
108
+ - [AzureOpenaiClient::CompletionsCreate200ResponseChoicesInner](docs/CompletionsCreate200ResponseChoicesInner.md)
109
+ - [AzureOpenaiClient::CompletionsCreate200ResponseChoicesInnerLogprobs](docs/CompletionsCreate200ResponseChoicesInnerLogprobs.md)
110
+ - [AzureOpenaiClient::CompletionsCreate200ResponseUsage](docs/CompletionsCreate200ResponseUsage.md)
111
+ - [AzureOpenaiClient::CompletionsCreateRequest](docs/CompletionsCreateRequest.md)
112
+ - [AzureOpenaiClient::CompletionsCreateRequestPrompt](docs/CompletionsCreateRequestPrompt.md)
113
+ - [AzureOpenaiClient::CompletionsCreateRequestStop](docs/CompletionsCreateRequestStop.md)
114
+ - [AzureOpenaiClient::EmbeddingsCreate200Response](docs/EmbeddingsCreate200Response.md)
115
+ - [AzureOpenaiClient::EmbeddingsCreate200ResponseDataInner](docs/EmbeddingsCreate200ResponseDataInner.md)
116
+ - [AzureOpenaiClient::EmbeddingsCreate200ResponseUsage](docs/EmbeddingsCreate200ResponseUsage.md)
117
+ - [AzureOpenaiClient::EmbeddingsCreateRequest](docs/EmbeddingsCreateRequest.md)
118
+ - [AzureOpenaiClient::EmbeddingsCreateRequestInput](docs/EmbeddingsCreateRequestInput.md)
146
119
  - [AzureOpenaiClient::ErrorResponse](docs/ErrorResponse.md)
147
120
  - [AzureOpenaiClient::ErrorResponseError](docs/ErrorResponseError.md)
148
- - [AzureOpenaiClient::InlineResponse200](docs/InlineResponse200.md)
149
- - [AzureOpenaiClient::InlineResponse2001](docs/InlineResponse2001.md)
150
- - [AzureOpenaiClient::InlineResponse2001Data](docs/InlineResponse2001Data.md)
151
- - [AzureOpenaiClient::InlineResponse2001Usage](docs/InlineResponse2001Usage.md)
152
- - [AzureOpenaiClient::InlineResponse2002](docs/InlineResponse2002.md)
153
- - [AzureOpenaiClient::InlineResponse2002Choices](docs/InlineResponse2002Choices.md)
154
- - [AzureOpenaiClient::InlineResponse2002Message](docs/InlineResponse2002Message.md)
155
- - [AzureOpenaiClient::InlineResponse2002Usage](docs/InlineResponse2002Usage.md)
156
- - [AzureOpenaiClient::InlineResponse200Choices](docs/InlineResponse200Choices.md)
157
- - [AzureOpenaiClient::InlineResponse200Logprobs](docs/InlineResponse200Logprobs.md)
158
- - [AzureOpenaiClient::InlineResponse200Usage](docs/InlineResponse200Usage.md)
159
- - [AzureOpenaiClient::OneOfchatCompletionsBodyStop](docs/OneOfchatCompletionsBodyStop.md)
160
- - [AzureOpenaiClient::OneOfdeploymentidCompletionsBodyPrompt](docs/OneOfdeploymentidCompletionsBodyPrompt.md)
161
- - [AzureOpenaiClient::OneOfdeploymentidCompletionsBodyStop](docs/OneOfdeploymentidCompletionsBodyStop.md)
162
-
163
- ## Documentation for Authorization
164
121
 
165
122
 
166
- ### apiKey
123
+ ## Documentation for Authorization
167
124
 
168
- - **Type**: API key
169
- - **API key parameter name**: api-key
170
- - **Location**: HTTP header
171
125
 
126
+ Authentication schemes defined for the API:
172
127
  ### bearer
173
128
 
129
+
174
130
  - **Type**: OAuth
175
131
  - **Flow**: implicit
176
132
  - **Authorization URL**: https://login.microsoftonline.com/common/oauth2/v2.0/authorize
177
- - **Scopes**:
133
+ - **Scopes**: N/A
134
+
135
+ ### apiKey
136
+
137
+
138
+ - **Type**: API key
139
+ - **API key parameter name**: api-key
140
+ - **Location**: HTTP header
178
141
 
data/Rakefile ADDED
@@ -0,0 +1,10 @@
1
+ require "bundler/gem_tasks"
2
+
3
+ begin
4
+ require 'rspec/core/rake_task'
5
+
6
+ RSpec::Core::RakeTask.new(:spec)
7
+ task default: :spec
8
+ rescue LoadError
9
+ # no rspec available
10
+ end
@@ -0,0 +1,38 @@
1
+ # -*- encoding: utf-8 -*-
2
+
3
+ =begin
4
+ #Azure OpenAI Service API
5
+
6
+ #Azure OpenAI APIs for completions and search
7
+
8
+ The version of the OpenAPI document: 2023-05-15
9
+
10
+ Generated by: https://openapi-generator.tech
11
+ OpenAPI Generator version: 6.6.0
12
+
13
+ =end
14
+
15
+ $:.push File.expand_path("../lib", __FILE__)
16
+ require "azure_openai_client/version"
17
+
18
+ Gem::Specification.new do |s|
19
+ s.name = "azure_openai_client"
20
+ s.version = AzureOpenaiClient::VERSION
21
+ s.platform = Gem::Platform::RUBY
22
+ s.authors = ["Josh Etsenake"]
23
+ s.email = ["josh.etsenake@fullscript.com"]
24
+ s.homepage = "https://github.com/etsenake/azure-openai-client"
25
+ s.summary = "Azure OpenAI Service API Ruby Gem"
26
+ s.description = "Azure OpenAI APIs for completions and search"
27
+ s.license = "MIT"
28
+ s.required_ruby_version = ">= 2.7"
29
+
30
+ s.add_runtime_dependency 'typhoeus', '~> 1.0', '>= 1.0.1'
31
+
32
+ s.add_development_dependency 'rspec', '~> 3.6', '>= 3.6.0'
33
+
34
+ s.files = `find *`.split("\n").uniq.sort.select { |f| !f.empty? }
35
+ s.test_files = `find spec/*`.split("\n")
36
+ s.executables = []
37
+ s.require_paths = ["lib"]
38
+ end
@@ -0,0 +1,28 @@
1
+ # AzureOpenaiClient::ChatCompletionsCreate200Response
2
+
3
+ ## Properties
4
+
5
+ | Name | Type | Description | Notes |
6
+ | ---- | ---- | ----------- | ----- |
7
+ | **id** | **String** | | |
8
+ | **object** | **String** | | |
9
+ | **created** | **Integer** | | |
10
+ | **model** | **String** | | |
11
+ | **choices** | [**Array&lt;ChatCompletionsCreate200ResponseChoicesInner&gt;**](ChatCompletionsCreate200ResponseChoicesInner.md) | | |
12
+ | **usage** | [**ChatCompletionsCreate200ResponseUsage**](ChatCompletionsCreate200ResponseUsage.md) | | [optional] |
13
+
14
+ ## Example
15
+
16
+ ```ruby
17
+ require 'azure_openai_client'
18
+
19
+ instance = AzureOpenaiClient::ChatCompletionsCreate200Response.new(
20
+ id: null,
21
+ object: null,
22
+ created: null,
23
+ model: null,
24
+ choices: null,
25
+ usage: null
26
+ )
27
+ ```
28
+
@@ -0,0 +1,22 @@
1
+ # AzureOpenaiClient::ChatCompletionsCreate200ResponseChoicesInner
2
+
3
+ ## Properties
4
+
5
+ | Name | Type | Description | Notes |
6
+ | ---- | ---- | ----------- | ----- |
7
+ | **index** | **Integer** | | [optional] |
8
+ | **message** | [**ChatCompletionsCreate200ResponseChoicesInnerMessage**](ChatCompletionsCreate200ResponseChoicesInnerMessage.md) | | [optional] |
9
+ | **finish_reason** | **String** | | [optional] |
10
+
11
+ ## Example
12
+
13
+ ```ruby
14
+ require 'azure_openai_client'
15
+
16
+ instance = AzureOpenaiClient::ChatCompletionsCreate200ResponseChoicesInner.new(
17
+ index: null,
18
+ message: null,
19
+ finish_reason: null
20
+ )
21
+ ```
22
+
@@ -0,0 +1,20 @@
1
+ # AzureOpenaiClient::ChatCompletionsCreate200ResponseChoicesInnerMessage
2
+
3
+ ## Properties
4
+
5
+ | Name | Type | Description | Notes |
6
+ | ---- | ---- | ----------- | ----- |
7
+ | **role** | **String** | The role of the author of this message. | |
8
+ | **content** | **String** | The contents of the message | |
9
+
10
+ ## Example
11
+
12
+ ```ruby
13
+ require 'azure_openai_client'
14
+
15
+ instance = AzureOpenaiClient::ChatCompletionsCreate200ResponseChoicesInnerMessage.new(
16
+ role: null,
17
+ content: null
18
+ )
19
+ ```
20
+
@@ -0,0 +1,22 @@
1
+ # AzureOpenaiClient::ChatCompletionsCreate200ResponseUsage
2
+
3
+ ## Properties
4
+
5
+ | Name | Type | Description | Notes |
6
+ | ---- | ---- | ----------- | ----- |
7
+ | **prompt_tokens** | **Integer** | | |
8
+ | **completion_tokens** | **Integer** | | |
9
+ | **total_tokens** | **Integer** | | |
10
+
11
+ ## Example
12
+
13
+ ```ruby
14
+ require 'azure_openai_client'
15
+
16
+ instance = AzureOpenaiClient::ChatCompletionsCreate200ResponseUsage.new(
17
+ prompt_tokens: null,
18
+ completion_tokens: null,
19
+ total_tokens: null
20
+ )
21
+ ```
22
+
@@ -0,0 +1,38 @@
1
+ # AzureOpenaiClient::ChatCompletionsCreateRequest
2
+
3
+ ## Properties
4
+
5
+ | Name | Type | Description | Notes |
6
+ | ---- | ---- | ----------- | ----- |
7
+ | **messages** | [**Array&lt;ChatCompletionsCreateRequestMessagesInner&gt;**](ChatCompletionsCreateRequestMessagesInner.md) | The messages to generate chat completions for, in the chat format. | |
8
+ | **temperature** | **Float** | What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or &#x60;top_p&#x60; but not both. | [optional][default to 1] |
9
+ | **top_p** | **Float** | An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or &#x60;temperature&#x60; but not both. | [optional][default to 1] |
10
+ | **n** | **Integer** | How many chat completion choices to generate for each input message. | [optional][default to 1] |
11
+ | **stream** | **Boolean** | If set, partial message deltas will be sent, like in ChatGPT. Tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a &#x60;data: [DONE]&#x60; message. | [optional][default to false] |
12
+ | **stop** | [**ChatCompletionsCreateRequestStop**](ChatCompletionsCreateRequestStop.md) | | [optional] |
13
+ | **max_tokens** | **Integer** | The maximum number of tokens allowed for the generated answer. By default, the number of tokens the model can return will be (4096 - prompt tokens). | [optional] |
14
+ | **presence_penalty** | **Float** | Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model&#39;s likelihood to talk about new topics. | [optional][default to 0] |
15
+ | **frequency_penalty** | **Float** | Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model&#39;s likelihood to repeat the same line verbatim. | [optional][default to 0] |
16
+ | **logit_bias** | **Object** | Modify the likelihood of specified tokens appearing in the completion. Accepts a json object that maps tokens (specified by their token ID in the tokenizer) to an associated bias value from -100 to 100. Mathematically, the bias is added to the logits generated by the model prior to sampling. The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100 should result in a ban or exclusive selection of the relevant token. | [optional] |
17
+ | **user** | **String** | A unique identifier representing your end-user, which can help Azure OpenAI to monitor and detect abuse. | [optional] |
18
+
19
+ ## Example
20
+
21
+ ```ruby
22
+ require 'azure_openai_client'
23
+
24
+ instance = AzureOpenaiClient::ChatCompletionsCreateRequest.new(
25
+ messages: null,
26
+ temperature: 1,
27
+ top_p: 1,
28
+ n: 1,
29
+ stream: null,
30
+ stop: null,
31
+ max_tokens: null,
32
+ presence_penalty: null,
33
+ frequency_penalty: null,
34
+ logit_bias: null,
35
+ user: user-1234
36
+ )
37
+ ```
38
+
@@ -0,0 +1,22 @@
1
+ # AzureOpenaiClient::ChatCompletionsCreateRequestMessagesInner
2
+
3
+ ## Properties
4
+
5
+ | Name | Type | Description | Notes |
6
+ | ---- | ---- | ----------- | ----- |
7
+ | **role** | **String** | The role of the author of this message. | |
8
+ | **content** | **String** | The contents of the message | |
9
+ | **name** | **String** | The name of the user in a multi-user chat | [optional] |
10
+
11
+ ## Example
12
+
13
+ ```ruby
14
+ require 'azure_openai_client'
15
+
16
+ instance = AzureOpenaiClient::ChatCompletionsCreateRequestMessagesInner.new(
17
+ role: null,
18
+ content: null,
19
+ name: null
20
+ )
21
+ ```
22
+
@@ -0,0 +1,49 @@
1
+ # AzureOpenaiClient::ChatCompletionsCreateRequestStop
2
+
3
+ ## Class instance methods
4
+
5
+ ### `openapi_one_of`
6
+
7
+ Returns the list of classes defined in oneOf.
8
+
9
+ #### Example
10
+
11
+ ```ruby
12
+ require 'azure_openai_client'
13
+
14
+ AzureOpenaiClient::ChatCompletionsCreateRequestStop.openapi_one_of
15
+ # =>
16
+ # [
17
+ # :'Array<String>',
18
+ # :'String'
19
+ # ]
20
+ ```
21
+
22
+ ### build
23
+
24
+ Find the appropriate object from the `openapi_one_of` list and casts the data into it.
25
+
26
+ #### Example
27
+
28
+ ```ruby
29
+ require 'azure_openai_client'
30
+
31
+ AzureOpenaiClient::ChatCompletionsCreateRequestStop.build(data)
32
+ # => #<Array<String>:0x00007fdd4aab02a0>
33
+
34
+ AzureOpenaiClient::ChatCompletionsCreateRequestStop.build(data_that_doesnt_match)
35
+ # => nil
36
+ ```
37
+
38
+ #### Parameters
39
+
40
+ | Name | Type | Description |
41
+ | ---- | ---- | ----------- |
42
+ | **data** | **Mixed** | data to be matched against the list of oneOf items |
43
+
44
+ #### Return type
45
+
46
+ - `Array<String>`
47
+ - `String`
48
+ - `nil` (if no type matches)
49
+
@@ -0,0 +1,28 @@
1
+ # AzureOpenaiClient::CompletionsCreate200Response
2
+
3
+ ## Properties
4
+
5
+ | Name | Type | Description | Notes |
6
+ | ---- | ---- | ----------- | ----- |
7
+ | **id** | **String** | | |
8
+ | **object** | **String** | | |
9
+ | **created** | **Integer** | | |
10
+ | **model** | **String** | | |
11
+ | **choices** | [**Array&lt;CompletionsCreate200ResponseChoicesInner&gt;**](CompletionsCreate200ResponseChoicesInner.md) | | |
12
+ | **usage** | [**CompletionsCreate200ResponseUsage**](CompletionsCreate200ResponseUsage.md) | | [optional] |
13
+
14
+ ## Example
15
+
16
+ ```ruby
17
+ require 'azure_openai_client'
18
+
19
+ instance = AzureOpenaiClient::CompletionsCreate200Response.new(
20
+ id: null,
21
+ object: null,
22
+ created: null,
23
+ model: null,
24
+ choices: null,
25
+ usage: null
26
+ )
27
+ ```
28
+
@@ -0,0 +1,24 @@
1
+ # AzureOpenaiClient::CompletionsCreate200ResponseChoicesInner
2
+
3
+ ## Properties
4
+
5
+ | Name | Type | Description | Notes |
6
+ | ---- | ---- | ----------- | ----- |
7
+ | **text** | **String** | | [optional] |
8
+ | **index** | **Integer** | | [optional] |
9
+ | **logprobs** | [**CompletionsCreate200ResponseChoicesInnerLogprobs**](CompletionsCreate200ResponseChoicesInnerLogprobs.md) | | [optional] |
10
+ | **finish_reason** | **String** | | [optional] |
11
+
12
+ ## Example
13
+
14
+ ```ruby
15
+ require 'azure_openai_client'
16
+
17
+ instance = AzureOpenaiClient::CompletionsCreate200ResponseChoicesInner.new(
18
+ text: null,
19
+ index: null,
20
+ logprobs: null,
21
+ finish_reason: null
22
+ )
23
+ ```
24
+
@@ -0,0 +1,24 @@
1
+ # AzureOpenaiClient::CompletionsCreate200ResponseChoicesInnerLogprobs
2
+
3
+ ## Properties
4
+
5
+ | Name | Type | Description | Notes |
6
+ | ---- | ---- | ----------- | ----- |
7
+ | **tokens** | **Array&lt;String&gt;** | | [optional] |
8
+ | **token_logprobs** | **Array&lt;Float&gt;** | | [optional] |
9
+ | **top_logprobs** | **Array&lt;Hash&lt;String, Float&gt;&gt;** | | [optional] |
10
+ | **text_offset** | **Array&lt;Integer&gt;** | | [optional] |
11
+
12
+ ## Example
13
+
14
+ ```ruby
15
+ require 'azure_openai_client'
16
+
17
+ instance = AzureOpenaiClient::CompletionsCreate200ResponseChoicesInnerLogprobs.new(
18
+ tokens: null,
19
+ token_logprobs: null,
20
+ top_logprobs: null,
21
+ text_offset: null
22
+ )
23
+ ```
24
+
@@ -0,0 +1,22 @@
1
+ # AzureOpenaiClient::CompletionsCreate200ResponseUsage
2
+
3
+ ## Properties
4
+
5
+ | Name | Type | Description | Notes |
6
+ | ---- | ---- | ----------- | ----- |
7
+ | **completion_tokens** | **Float** | | |
8
+ | **prompt_tokens** | **Float** | | |
9
+ | **total_tokens** | **Float** | | |
10
+
11
+ ## Example
12
+
13
+ ```ruby
14
+ require 'azure_openai_client'
15
+
16
+ instance = AzureOpenaiClient::CompletionsCreate200ResponseUsage.new(
17
+ completion_tokens: null,
18
+ prompt_tokens: null,
19
+ total_tokens: null
20
+ )
21
+ ```
22
+