datasift 3.7.2 → 3.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (91) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +8 -0
  3. data/datasift.gemspec +6 -3
  4. data/lib/api/api_resource.rb +1 -1
  5. data/lib/datasift.rb +53 -16
  6. data/lib/errors.rb +8 -0
  7. data/lib/live_stream.rb +7 -7
  8. data/lib/pylon.rb +59 -26
  9. data/lib/tasks.rb +51 -0
  10. data/lib/version.rb +3 -0
  11. metadata +5 -83
  12. data/.gitignore +0 -12
  13. data/.rubocop.yml +0 -12
  14. data/.rubocop_todo.yml +0 -497
  15. data/.travis.yml +0 -17
  16. data/Gemfile +0 -15
  17. data/LICENSE +0 -19
  18. data/MIGRATING_TO_V.3.0.0.md +0 -262
  19. data/Rakefile +0 -10
  20. data/VERSION +0 -1
  21. data/examples/account_eg.rb +0 -33
  22. data/examples/account_identity_eg.rb +0 -50
  23. data/examples/account_identity_limit_eg.rb +0 -72
  24. data/examples/account_identity_token_eg.rb +0 -73
  25. data/examples/auth.rb +0 -55
  26. data/examples/cli.sh +0 -155
  27. data/examples/core_api_eg.rb +0 -50
  28. data/examples/historics_eg.rb +0 -66
  29. data/examples/historics_preview_eg.rb +0 -31
  30. data/examples/ingestion/twitter_gnip_batch_eg.rb +0 -61
  31. data/examples/live_stream_eg.rb +0 -92
  32. data/examples/managed_source_fb_eg.rb +0 -129
  33. data/examples/managed_source_ig_eg.rb +0 -126
  34. data/examples/pull.rb +0 -47
  35. data/examples/push_eg.rb +0 -51
  36. data/examples/pylon/pylon_api_v1.2_eg.rb +0 -166
  37. data/examples/pylon/pylon_api_v1.3_eg.rb +0 -201
  38. data/test/datasift/account/account_api_test.rb +0 -35
  39. data/test/datasift/core_api_test.rb +0 -179
  40. data/test/datasift/historics_preview_api_test.rb +0 -56
  41. data/test/datasift/odp/batch_upload_test.rb +0 -52
  42. data/test/datasift/push_api_test.rb +0 -238
  43. data/test/fixtures/cassettes/account/usage/default.json +0 -1
  44. data/test/fixtures/cassettes/account/usage/invalid.json +0 -1
  45. data/test/fixtures/cassettes/account/usage/valid_params.json +0 -1
  46. data/test/fixtures/cassettes/core/after_historic_dpu.json +0 -1
  47. data/test/fixtures/cassettes/core/balance_get.json +0 -1
  48. data/test/fixtures/cassettes/core/before_dpu.json +0 -1
  49. data/test/fixtures/cassettes/core/before_historic_dpu.json +0 -1
  50. data/test/fixtures/cassettes/core/compile_success.json +0 -1
  51. data/test/fixtures/cassettes/core/dpu_get_cost.json +0 -1
  52. data/test/fixtures/cassettes/core/dpu_throw_badrequest.json +0 -1
  53. data/test/fixtures/cassettes/core/historic_dpu.json +0 -1
  54. data/test/fixtures/cassettes/core/usage_success.json +0 -1
  55. data/test/fixtures/cassettes/core/validate_invalid_hash.json +0 -1
  56. data/test/fixtures/cassettes/core/validate_success_bool.json +0 -1
  57. data/test/fixtures/cassettes/core/validate_success_hash.json +0 -1
  58. data/test/fixtures/cassettes/odp/batch/after_upload.json +0 -1
  59. data/test/fixtures/cassettes/odp/batch/before_upload.json +0 -1
  60. data/test/fixtures/cassettes/odp/batch/upload_failure_no_source.json +0 -1
  61. data/test/fixtures/cassettes/odp/batch/upload_success.json +0 -1
  62. data/test/fixtures/cassettes/preview/before_preview_create.json +0 -1
  63. data/test/fixtures/cassettes/preview/before_preview_get.json +0 -1
  64. data/test/fixtures/cassettes/preview/preview_create_success.json +0 -1
  65. data/test/fixtures/cassettes/preview/preview_get_success.json +0 -1
  66. data/test/fixtures/cassettes/push/after_push_create.json +0 -1
  67. data/test/fixtures/cassettes/push/after_push_get.json +0 -1
  68. data/test/fixtures/cassettes/push/after_push_log.json +0 -1
  69. data/test/fixtures/cassettes/push/after_push_pause.json +0 -1
  70. data/test/fixtures/cassettes/push/after_push_resume.json +0 -1
  71. data/test/fixtures/cassettes/push/after_push_stop.json +0 -1
  72. data/test/fixtures/cassettes/push/after_push_update.json +0 -1
  73. data/test/fixtures/cassettes/push/before_push_create.json +0 -1
  74. data/test/fixtures/cassettes/push/before_push_delete.json +0 -1
  75. data/test/fixtures/cassettes/push/before_push_get.json +0 -1
  76. data/test/fixtures/cassettes/push/before_push_log.json +0 -1
  77. data/test/fixtures/cassettes/push/before_push_pause.json +0 -1
  78. data/test/fixtures/cassettes/push/before_push_resume.json +0 -1
  79. data/test/fixtures/cassettes/push/before_push_stop.json +0 -1
  80. data/test/fixtures/cassettes/push/before_push_update.json +0 -1
  81. data/test/fixtures/cassettes/push/push_create.json +0 -1
  82. data/test/fixtures/cassettes/push/push_delete.json +0 -1
  83. data/test/fixtures/cassettes/push/push_get_by_id.json +0 -1
  84. data/test/fixtures/cassettes/push/push_log_with_id.json +0 -1
  85. data/test/fixtures/cassettes/push/push_pause.json +0 -1
  86. data/test/fixtures/cassettes/push/push_resume.json +0 -1
  87. data/test/fixtures/cassettes/push/push_stop.json +0 -1
  88. data/test/fixtures/cassettes/push/push_update.json +0 -1
  89. data/test/fixtures/cassettes/push/push_validate.json +0 -1
  90. data/test/fixtures/data/fake_gnip_tweets.json +0 -10
  91. data/test/test_helper.rb +0 -49
@@ -1,17 +0,0 @@
1
- language: ruby
2
- sudo: false
3
- cache: bundler
4
- bundler_args: --without development --retry=3 --jobs=3
5
-
6
- rvm:
7
- - 1.9.3
8
- - 2.0.0
9
- - 2.1
10
- - 2.2
11
- - 2.3
12
- - ruby-head
13
-
14
- matrix:
15
- allow_failures:
16
- - ruby-head
17
- fast_finish: true
data/Gemfile DELETED
@@ -1,15 +0,0 @@
1
- source "https://rubygems.org"
2
-
3
- gem 'rake'
4
- gem 'yard'
5
-
6
- group :test do
7
- gem 'minitest', '~> 5.0'
8
- gem 'rubocop', '>= 0.27'
9
- gem 'simplecov', '>= 0.9'
10
- gem 'shoulda', '>= 2.11'
11
- gem 'vcr', '~> 2.9'
12
- gem 'webmock'
13
- end
14
-
15
- gemspec
data/LICENSE DELETED
@@ -1,19 +0,0 @@
1
- Copyright (c) 2011-2016 MediaSift Ltd
2
-
3
- Permission is hereby granted, free of charge, to any person obtaining a copy
4
- of this software and associated documentation files (the "Software"), to deal
5
- in the Software without restriction, including without limitation the rights
6
- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7
- copies of the Software, and to permit persons to whom the Software is
8
- furnished to do so, subject to the following conditions:
9
-
10
- The above copyright notice and this permission notice shall be included in
11
- all copies or substantial portions of the Software.
12
-
13
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18
- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19
- THE SOFTWARE.
@@ -1,262 +0,0 @@
1
- MIGRATING TO V.3.0.0
2
- ================================
3
-
4
- Breaking Changes
5
- ----------------
6
- Earlier versions of the DataSift library are incompatible with 3.x.x. 3.0.0 is a complete re-design. In order to continue delivering better features and performance some architectural changes have been made which make backwards compatibility very difficult and in some cases impractical.
7
-
8
- Features
9
- --------
10
- * Live streaming now uses multi-threaded WebSockets, so you can subscribe and unsubscribe from stream hashes.
11
- * This update ensures that the Ruby client library now supports all API features that were missing from prior versions of the client.
12
- * This includes adding support for Historics Previews, and the [Pull Connector](http://dev.datasift.com/blog/pullingdata).
13
- * Previous versions made API requests through a ```DataSift::User``` object. From v.3.0.0, we have moved to a more semantically correct ```DataSift::Client``` object.
14
-
15
- Code
16
- ====
17
-
18
- ## Authentication
19
- From v.3.0.0 of the Ruby client, we have dropped the concept of the ```DataSift::User``` object, and now use a ```DataSift::Client``` object for all API calls.
20
-
21
- ### Authentication: < 3.0.0
22
-
23
- ```ruby
24
- config = YAML::load(File.open(File.join(File.dirname(__FILE__), '..', 'config.yml')))
25
- user = DataSift::User.new(config['username'], config['api_key'])
26
- ```
27
-
28
- ### Authentication: 3.0.0+
29
- From v.3.0.0+ you begin by providing a configuration object to the DataSift client. From here the client instance gives you access to the rest of the DataSift API. This is organized in a similar way to the DataSift [REST API documentation](http://dev.datasift.com/docs/rest-api) except where it didn't make sense to do so.
30
-
31
- ```ruby
32
- @config = {:username => 'DATASIFT_USERNAME', :api_key => 'DATASIFT_API_KEY', :enable_ssl => true}
33
- @datasift = DataSift::Client.new(@config)
34
- ```
35
-
36
-
37
- ## Core
38
- * @datasift.valid? csdl
39
- * @datasift.compile csdl
40
- * @datasift.usage [period]
41
- * @datasift.dpu hash
42
- * @datasift.balance
43
-
44
- Below are examples of how to compile, then check the DPU cost of a CSDL statement, then check your API usage. These examples both assume you have correctly authenticated with the API.
45
-
46
- ### Core: < 3.0.0
47
- ```ruby
48
- csdl = 'interaction.content contains "datasift"'
49
- definition = user.createDefinition(csdl)
50
- dpu = definition.getDPUBreakdown()
51
- usage = user.getUsage()
52
- ```
53
-
54
- ### Core: 3.0.0+
55
- ```ruby
56
- csdl = interaction.content contains "datasift"'
57
- stream = @datasift.compile csdl
58
- dpu = @datasift.dpu stream[:data][:hash]
59
- usage = @datasift.usage
60
- ```
61
-
62
-
63
- ## Live Streaming
64
- The Live Streaming API is now accessed via WebSockets using the [websocket_td](https://github.com/zcourts/websocket-td) gem, rather than streaming over HTTP. This allows us to use the ```stream.subscribe(hash, on_message)``` and ```stream.unsubscribe hash``` methods to asynchronously subscribe and unsubscribe from streams, while still streaming data.
65
- Please note, the examples below include only the mandatory callback methods, and do not include any additional error handling. The examples included in the client library itself do include some very basic error handling.
66
-
67
- ### Core: < 3.0.0
68
- ```ruby
69
- consumer.consume(true) do |interaction|
70
- if interaction
71
- puts interaction.to_s
72
- end
73
- end
74
- ```
75
-
76
-
77
- ### Core: 3.0.0+
78
- ```ruby
79
- def stream(hash)
80
- on_delete = lambda { |stream, m| puts m }
81
- on_error = lambda { |stream, e| puts "An error has occurred: #{message}" }
82
- on_message = lambda { |message, stream, hash| puts message }
83
-
84
- on_datasift_message = lambda do |stream, message, hash|
85
- puts "DataSift Message #{hash} ==> #{message}"
86
- end
87
-
88
- conn = DataSift::new_stream(@config, on_delete, on_error, on_open, on_close)
89
- conn.on_datasift_message = on_datasift_message
90
- conn.stream.read_thread.join
91
- end
92
- ```
93
-
94
- #### on_delete event
95
- on_delete is called when your stream receives a [delete notification](http://dev.datasift.com/docs/resources/twitter-deletes) from Twitter, notifying you that a Tweet you may have received has been deleted.
96
-
97
- #### on_error event
98
- on_error is called in cases where where an exception occurs during streaming.
99
-
100
- #### on_message event
101
- on_message is called when we receive [user status messages](http://dev.datasift.com/docs/resources/twitter-user-status-messages) from Twitter.
102
-
103
-
104
- ## Push
105
- * @datasift.push.valid? @params
106
- * @datasift.create @params
107
- * @datasift.push.pause subscription_id
108
- * @datasift.push.resume subscription_id
109
- * @datasift.push.update @params.merge({:id => subscription_id, :name => 'Updated name'})
110
- * @datasift.push.stop subscription_id
111
- * @datasift.push.delete subscription_id
112
- * @datasift.push.log
113
- * @datasift.push.get_by_subscription subscription_id
114
- * @datasift.push.get
115
- * @datasift.pull
116
-
117
- Below are some simple examples, showing you how to create, pause, resume, update, get, stop then delete a Push Subscription:
118
-
119
- ### Push: < 3.0.0
120
- ```ruby
121
- definition = env.user.createDefinition(csdl)
122
-
123
- pushdef = env.user.createPushDefinition()
124
- pushdef.output_type = output_type
125
-
126
- # Add your output parameters to your Push Definition
127
- while env.args.size() > 0
128
- k, v = env.args.shift.split('=', 2)
129
- pushdef.output_params[k] = v
130
- end
131
-
132
- sub = pushdef.subscribeDefinition(definition, name)
133
-
134
- sub.pause()
135
- sub.resume()
136
- sub.save()
137
- sub.stop()
138
- sub.delete()
139
- ```
140
-
141
-
142
- ### Push: 3.0.0+
143
- ```ruby
144
- subscription = create_push(hash)
145
- subscription_id = subscription[:data][:id]
146
-
147
- @datasift.push.pause subscription_id
148
- @datasift.push.resume subscription_id
149
- @datasift.push.update @params.merge({:id => subscription_id, :name => 'New name'})
150
- @datasift.push.get_by_subscription subscription_id
151
- @datasift.push.stop subscription_id
152
- @datasift.push.delete subscription_id
153
- ```
154
-
155
-
156
- ## Historics
157
- * @datasift.historics.prepare(hash, start, end, 'My ruby historics')
158
- * @datasift.historics.start id
159
- * @datasift.historics.stop id
160
- * @datasift.historics.status(start, end_time)
161
- * @datasift.historics.update(id, 'The new name of my historics')
162
- * @datasift.historics.delete id
163
- * @datasift.historics.get_by_id id
164
- * @datasift.historics.get
165
-
166
- Below are some simple examples demonstrating how to check the status of the Historics archive for a given timeframe, prepare an Historic, then start, get, stop and delete the Historic.
167
-
168
- ### Historics: < 3.0.0
169
- ```ruby
170
- start_time = Time.now.to_i - 7200
171
- end_time = start + 3600
172
-
173
- # /historics/status not implemented in < 3.0.0
174
- definition = env.user.createDefinition(csdl)
175
- historic = definition.createHistoric(start_time, end_time, sources, sample, name)
176
-
177
- historic.prepare()
178
- historic.start()
179
- user.getHistoric(historic.id)
180
- historic.stop()
181
- historic.delete()
182
- ```
183
-
184
- ### Historics: 3.0.0+
185
- ```ruby
186
- start_time = Time.now.to_i - 7200
187
- end_time = start + 3600
188
-
189
- @datasift.historics.status(start_time, end_time)
190
-
191
- historics = @datasift.historics.prepare(hash, start_time, end_time, 'My Historic')
192
- id = historics[:data][:id]
193
-
194
- create_push(id, true)
195
-
196
- @datasift.historics.start id
197
- @datasift.historics.get_by_id id
198
- @datasift.historics.stop id
199
- @datasift.historics.delete id
200
- ```
201
-
202
- ## Historics Preview
203
- * @datasift.historics_preview.create(hash, parameters, start, end)
204
- * @datasift.historics_preview.get id
205
-
206
- Historics preview was not available before v.3.0.0. The example below demonstrates how to create, then get the results of an Historics preview:
207
-
208
- ### Hisotrics Preview: 3.0.0+
209
- ```ruby
210
- parameters = 'interaction.author.link,targetVol,hour;interaction.type,freqDist,10'
211
- start = Time.now.to_i - (3600 * 48) # 48hrs ago
212
- source = @datasift.historics_preview.create(hash, parameters, start)
213
- @datasift.historics_preview.get source[:data][:id]
214
- ```
215
-
216
-
217
- ## Managed Sources
218
- * @datasift.managed_source.create(source_type, name, parameters, resources, auth)
219
- * @datasift.managed_source.update(id, source_type, name, parameters, resources, auth)
220
- * @datasift.managed_source.delete id
221
- * @datasift.managed_source.log id
222
- * @datasift.managed_source.get id
223
- * @datasift.managed_source.stop id
224
- * @datasift.managed_source.start id
225
-
226
- Below is a Managed Sources example, using each of the Managed Sources API endpoints:
227
-
228
- ### Managed Sources < 3.0.0
229
- ```ruby
230
- parameters = {:likes => true, :comments => true}
231
- resources = [{:parameters => {:type => 'tag', :value => 'coffee'}}]
232
- auth = [{:parameters => {:value => '10942112.1fb234f.8713bcf4d5b44ece801022f6fa4b9e1b'}}]
233
-
234
- user = DataSift::User.new(config['username'], config['api_key'], false)
235
- source = user.createManagedSource(:source_type => 'instagram', :name => '#Coffee Pics', :parameters => parameters, :resources => resources, :auth => auth)
236
-
237
- source.start
238
- user.getManagedSource(source.managed_source_id)
239
- user.getManagedSourcesLog(source.managed_source_id)
240
- source.stop
241
- source.delete
242
- ```
243
-
244
- ### Managed Sources 3.0.0+
245
- ```ruby
246
- parameters = {:likes => true, :comments => true}
247
- resources = [{:parameters => {:type => 'tag', :value => 'coffee'}}]
248
- auth = [{:parameters => {:value => '10942112.1fb234f.8713bcf4d5b44ece801022f6fa4b9e1b'}}]
249
-
250
- source = @datasift.managed_source.create('instagram', '#Coffee Pics', parameters, resources, auth)
251
- id = source[:data][:id]
252
-
253
- @datasift.managed_source.start id
254
- source = @datasift.managed_source.get id
255
- # Note that in the line below, we pass the auth object returned from a /source/get call back into the /source/update statement. Passing the original auth object will fail
256
- @datasift.managed_source.update(id, 'instagram', 'Updated source name', parameters, resources, source[:data][:auth])
257
- @datasift.managed_source.log id
258
- @datasift.managed_source.stop id
259
- @datasift.managed_source.delete id
260
- ```
261
-
262
-
data/Rakefile DELETED
@@ -1,10 +0,0 @@
1
- require 'bundler/gem_tasks'
2
- require 'rake/testtask'
3
-
4
- Rake::TestTask.new(:test) do |test|
5
- test.libs << 'lib' << 'test'
6
- test.pattern = 'test/**/*.rb'
7
- test.verbose = true
8
- end
9
-
10
- task :default => :test
data/VERSION DELETED
@@ -1 +0,0 @@
1
- 3.7.2
@@ -1,33 +0,0 @@
1
- require './auth'
2
- class AccountEg < DataSiftExample
3
- def initialize
4
- super
5
- @datasift = DataSift::Client.new(@config)
6
- run
7
- end
8
-
9
- def run
10
- begin
11
- puts "Get account usage for the default period"
12
- puts @datasift.account.usage[:data].to_json
13
-
14
- puts "\nGet account usage for the past month"
15
- puts @datasift.account.usage('monthly')[:data].to_json
16
-
17
- rescue DataSiftError => dse
18
- puts dse.message
19
- # Then match specific error to take action;
20
- # All errors thrown by the client extend DataSiftError
21
- case dse
22
- when ConnectionError
23
- # some connection error
24
- when AuthError
25
- when BadRequestError
26
- else
27
- # do something else...
28
- end
29
- end
30
- end
31
- end
32
-
33
- AccountEg.new
@@ -1,50 +0,0 @@
1
- require './auth'
2
- class AccountIdentityEg < DataSiftExample
3
- def initialize
4
- super
5
- @datasift = DataSift::Client.new(@config)
6
- run
7
- end
8
-
9
- def run
10
- begin
11
- puts "Create a new identity"
12
- identity = @datasift.account_identity.create(
13
- "Ruby Identity #{DateTime.now}", "active", false
14
- )
15
- identity_id = identity[:data][:id]
16
- puts identity[:data].to_json
17
-
18
- puts "\nList all existing identities"
19
- puts @datasift.account_identity.list[:data].to_json
20
-
21
- puts "\nGet existing identity"
22
- puts @datasift.account_identity.get(identity_id)[:data].to_json
23
-
24
- puts "\nUpdate an identity"
25
- puts @datasift.account_identity.update(
26
- identity_id, "Updated Ruby Identity #{DateTime.now}"
27
- )[:data].to_json
28
-
29
- puts "\nDelete an identity"
30
- @datasift.account_identity.delete(identity_id)
31
-
32
- rescue DataSiftError => dse
33
- puts dse.inspect
34
- # Then match specific error to take action;
35
- # All errors thrown by the client extend DataSiftError
36
- case dse
37
- when ConnectionError
38
- # some connection error
39
- when AuthError
40
- when BadRequestError
41
- else
42
- # do something else...
43
- end
44
- puts "\nClean up and delete the identity"
45
- @datasift.account_identity.delete(identity_id)
46
- end
47
- end
48
- end
49
-
50
- AccountIdentityEg.new
@@ -1,72 +0,0 @@
1
- require './auth'
2
- class AccountIdentityLimitEg < DataSiftExample
3
- def initialize
4
- super
5
- @datasift = DataSift::Client.new(@config)
6
- run
7
- end
8
-
9
- def run
10
- begin
11
- puts "Create a new identity to apply Limits to"
12
- identity = @datasift.account_identity.create(
13
- "Ruby Identity for Token Limits", "active", false
14
- )
15
- identity_id = identity[:data][:id]
16
- puts identity[:data].to_json
17
-
18
- puts "\nCreate a Limit for our Identity"
19
- puts @datasift.account_identity_limit.create(
20
- identity_id,
21
- 'facebook',
22
- 100_000,
23
- 50
24
- )[:data].to_json
25
-
26
- puts "\nList all existing Limits for this Service"
27
- puts @datasift.account_identity_limit.list(
28
- 'facebook'
29
- )[:data].to_json
30
-
31
- puts "\nGet existing Limit by Identity and Service"
32
- puts @datasift.account_identity_limit.get(
33
- identity_id,
34
- 'facebook'
35
- )[:data].to_json
36
-
37
- puts "\nUpdate just the daily interaction limit for a Limit for a given Identity"
38
- puts @datasift.account_identity_limit.update(
39
- identity_id,
40
- 'facebook',
41
- 250_000,
42
- nil
43
- )[:data].to_json
44
-
45
- puts "\nRemove the Limit from a given Identity and Service"
46
- @datasift.account_identity_limit.delete(
47
- identity_id,
48
- 'facebook'
49
- )
50
-
51
- puts "\nCleanup and remove the Identity"
52
- @datasift.account_identity.delete(identity_id)
53
-
54
- rescue DataSiftError => dse
55
- puts dse.inspect
56
- # Then match specific error to take action;
57
- # All errors thrown by the client extend DataSiftError
58
- case dse
59
- when ConnectionError
60
- # some connection error
61
- when AuthError
62
- when BadRequestError
63
- else
64
- # do something else...
65
- end
66
- puts "\nCleanup and remove the Identity"
67
- @datasift.account_identity.delete(identity_id)
68
- end
69
- end
70
- end
71
-
72
- AccountIdentityLimitEg.new