thinkingdata-ruby 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 8675f2f2b9e9e65ef523eb61d868ab087a048105
4
+ data.tar.gz: a469f38692b16bcff8d1564b954a8691b3161cbf
5
+ SHA512:
6
+ metadata.gz: 48b4d1a412fe55788a7be4a5bbb357ff6613669126ce03799b4a999bb647135266b363aef32ed002163bbb551b82b675199b10e3f6ceeceb8b0abdc61276f6c4
7
+ data.tar.gz: 407c50cf1a0e731ca71b3c786d645cf389c16fa5bc82a13e9e708c5b718eab3edbf1a949aa229300580d4ec5e7acf0f80436660cfbd955d35a5b89e8726e784c
data/CHANGELOG.md ADDED
@@ -0,0 +1,4 @@
1
+ **v1.0.0** (2019-11-20)
2
+ - 支持三种模式的上报: DebugConsumer, BatchConsumer, LoggerConsumer.
3
+ - 支持事件上报和用户属性上报.
4
+ - 支持公共事件属性.
data/Gemfile ADDED
@@ -0,0 +1,7 @@
1
+ # frozen_string_literal: true
2
+
3
+ source "https://rubygems.org"
4
+
5
+ git_source(:github) {|repo_name| "https://github.com/#{repo_name}" }
6
+
7
+ # gem "rails"
data/LICENSE ADDED
@@ -0,0 +1,201 @@
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction,
10
+ and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by
13
+ the copyright owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all
16
+ other entities that control, are controlled by, or are under common
17
+ control with that entity. For the purposes of this definition,
18
+ "control" means (i) the power, direct or indirect, to cause the
19
+ direction or management of such entity, whether by contract or
20
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+ outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+ "You" (or "Your") shall mean an individual or Legal Entity
24
+ exercising permissions granted by this License.
25
+
26
+ "Source" form shall mean the preferred form for making modifications,
27
+ including but not limited to software source code, documentation
28
+ source, and configuration files.
29
+
30
+ "Object" form shall mean any form resulting from mechanical
31
+ transformation or translation of a Source form, including but
32
+ not limited to compiled object code, generated documentation,
33
+ and conversions to other media types.
34
+
35
+ "Work" shall mean the work of authorship, whether in Source or
36
+ Object form, made available under the License, as indicated by a
37
+ copyright notice that is included in or attached to the work
38
+ (an example is provided in the Appendix below).
39
+
40
+ "Derivative Works" shall mean any work, whether in Source or Object
41
+ form, that is based on (or derived from) the Work and for which the
42
+ editorial revisions, annotations, elaborations, or other modifications
43
+ represent, as a whole, an original work of authorship. For the purposes
44
+ of this License, Derivative Works shall not include works that remain
45
+ separable from, or merely link (or bind by name) to the interfaces of,
46
+ the Work and Derivative Works thereof.
47
+
48
+ "Contribution" shall mean any work of authorship, including
49
+ the original version of the Work and any modifications or additions
50
+ to that Work or Derivative Works thereof, that is intentionally
51
+ submitted to Licensor for inclusion in the Work by the copyright owner
52
+ or by an individual or Legal Entity authorized to submit on behalf of
53
+ the copyright owner. For the purposes of this definition, "submitted"
54
+ means any form of electronic, verbal, or written communication sent
55
+ to the Licensor or its representatives, including but not limited to
56
+ communication on electronic mailing lists, source code control systems,
57
+ and issue tracking systems that are managed by, or on behalf of, the
58
+ Licensor for the purpose of discussing and improving the Work, but
59
+ excluding communication that is conspicuously marked or otherwise
60
+ designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+ "Contributor" shall mean Licensor and any individual or Legal Entity
63
+ on behalf of whom a Contribution has been received by Licensor and
64
+ subsequently incorporated within the Work.
65
+
66
+ 2. Grant of Copyright License. Subject to the terms and conditions of
67
+ this License, each Contributor hereby grants to You a perpetual,
68
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+ copyright license to reproduce, prepare Derivative Works of,
70
+ publicly display, publicly perform, sublicense, and distribute the
71
+ Work and such Derivative Works in Source or Object form.
72
+
73
+ 3. Grant of Patent License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ (except as stated in this section) patent license to make, have made,
77
+ use, offer to sell, sell, import, and otherwise transfer the Work,
78
+ where such license applies only to those patent claims licensable
79
+ by such Contributor that are necessarily infringed by their
80
+ Contribution(s) alone or by combination of their Contribution(s)
81
+ with the Work to which such Contribution(s) was submitted. If You
82
+ institute patent litigation against any entity (including a
83
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+ or a Contribution incorporated within the Work constitutes direct
85
+ or contributory patent infringement, then any patent licenses
86
+ granted to You under this License for that Work shall terminate
87
+ as of the date such litigation is filed.
88
+
89
+ 4. Redistribution. You may reproduce and distribute copies of the
90
+ Work or Derivative Works thereof in any medium, with or without
91
+ modifications, and in Source or Object form, provided that You
92
+ meet the following conditions:
93
+
94
+ (a) You must give any other recipients of the Work or
95
+ Derivative Works a copy of this License; and
96
+
97
+ (b) You must cause any modified files to carry prominent notices
98
+ stating that You changed the files; and
99
+
100
+ (c) You must retain, in the Source form of any Derivative Works
101
+ that You distribute, all copyright, patent, trademark, and
102
+ attribution notices from the Source form of the Work,
103
+ excluding those notices that do not pertain to any part of
104
+ the Derivative Works; and
105
+
106
+ (d) If the Work includes a "NOTICE" text file as part of its
107
+ distribution, then any Derivative Works that You distribute must
108
+ include a readable copy of the attribution notices contained
109
+ within such NOTICE file, excluding those notices that do not
110
+ pertain to any part of the Derivative Works, in at least one
111
+ of the following places: within a NOTICE text file distributed
112
+ as part of the Derivative Works; within the Source form or
113
+ documentation, if provided along with the Derivative Works; or,
114
+ within a display generated by the Derivative Works, if and
115
+ wherever such third-party notices normally appear. The contents
116
+ of the NOTICE file are for informational purposes only and
117
+ do not modify the License. You may add Your own attribution
118
+ notices within Derivative Works that You distribute, alongside
119
+ or as an addendum to the NOTICE text from the Work, provided
120
+ that such additional attribution notices cannot be construed
121
+ as modifying the License.
122
+
123
+ You may add Your own copyright statement to Your modifications and
124
+ may provide additional or different license terms and conditions
125
+ for use, reproduction, or distribution of Your modifications, or
126
+ for any such Derivative Works as a whole, provided Your use,
127
+ reproduction, and distribution of the Work otherwise complies with
128
+ the conditions stated in this License.
129
+
130
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
131
+ any Contribution intentionally submitted for inclusion in the Work
132
+ by You to the Licensor shall be under the terms and conditions of
133
+ this License, without any additional terms or conditions.
134
+ Notwithstanding the above, nothing herein shall supersede or modify
135
+ the terms of any separate license agreement you may have executed
136
+ with Licensor regarding such Contributions.
137
+
138
+ 6. Trademarks. This License does not grant permission to use the trade
139
+ names, trademarks, service marks, or product names of the Licensor,
140
+ except as required for reasonable and customary use in describing the
141
+ origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+ 7. Disclaimer of Warranty. Unless required by applicable law or
144
+ agreed to in writing, Licensor provides the Work (and each
145
+ Contributor provides its Contributions) on an "AS IS" BASIS,
146
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+ implied, including, without limitation, any warranties or conditions
148
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+ PARTICULAR PURPOSE. You are solely responsible for determining the
150
+ appropriateness of using or redistributing the Work and assume any
151
+ risks associated with Your exercise of permissions under this License.
152
+
153
+ 8. Limitation of Liability. In no event and under no legal theory,
154
+ whether in tort (including negligence), contract, or otherwise,
155
+ unless required by applicable law (such as deliberate and grossly
156
+ negligent acts) or agreed to in writing, shall any Contributor be
157
+ liable to You for damages, including any direct, indirect, special,
158
+ incidental, or consequential damages of any character arising as a
159
+ result of this License or out of the use or inability to use the
160
+ Work (including but not limited to damages for loss of goodwill,
161
+ work stoppage, computer failure or malfunction, or any and all
162
+ other commercial damages or losses), even if such Contributor
163
+ has been advised of the possibility of such damages.
164
+
165
+ 9. Accepting Warranty or Additional Liability. While redistributing
166
+ the Work or Derivative Works thereof, You may choose to offer,
167
+ and charge a fee for, acceptance of support, warranty, indemnity,
168
+ or other liability obligations and/or rights consistent with this
169
+ License. However, in accepting such obligations, You may act only
170
+ on Your own behalf and on Your sole responsibility, not on behalf
171
+ of any other Contributor, and only if You agree to indemnify,
172
+ defend, and hold each Contributor harmless for any liability
173
+ incurred by, or claims asserted against, such Contributor by reason
174
+ of your accepting any such warranty or additional liability.
175
+
176
+ END OF TERMS AND CONDITIONS
177
+
178
+ APPENDIX: How to apply the Apache License to your work.
179
+
180
+ To apply the Apache License to your work, attach the following
181
+ boilerplate notice, with the fields enclosed by brackets "{}"
182
+ replaced with your own identifying information. (Don't include
183
+ the brackets!) The text should be enclosed in the appropriate
184
+ comment syntax for the file format. We also recommend that a
185
+ file or class name and description of purpose be included on the
186
+ same "printed page" as the copyright notice for easier
187
+ identification within third-party archives.
188
+
189
+ Copyright 2019 thinking-analytics / data-collector / server-sdk
190
+
191
+ Licensed under the Apache License, Version 2.0 (the "License");
192
+ you may not use this file except in compliance with the License.
193
+ You may obtain a copy of the License at
194
+
195
+ http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+ Unless required by applicable law or agreed to in writing, software
198
+ distributed under the License is distributed on an "AS IS" BASIS,
199
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+ See the License for the specific language governing permissions and
201
+ limitations under the License.
data/README.md ADDED
@@ -0,0 +1,202 @@
1
+ # ThinkingData Analytics API for Ruby
2
+
3
+ thinkingdata-ruby 是数数科技提供给客户,方便客户导入用户数据的 Ruby 接口实现, 支持 Ruby 2.0 以上版本。如需了解详细信息,请参考 [数数科技官方网站](https://www.thinkingdata.cn).
4
+
5
+ ### 一、集成 SDK
6
+
7
+ #### 1. 安装 SDK
8
+
9
+ ```sh
10
+ # 获取 SDK
11
+ gem install thinkingdata-ruby
12
+ ```
13
+ #### 2. 创建 SDK 实例
14
+ 首先在代码文件开头引入 `thinkingdata-ruby`:
15
+ ```ruby
16
+ require 'thinkingdata-ruby'
17
+ ```
18
+
19
+ 使用 SDK 上传数据,需要首先创建 `TDAnalytics::Tracker` 对象. `TDAnalytics::Tracker` 是数据上报的核心类,使用此类上报事件数据和更新用户属性. 创建 `Tracker` 对象需要传入 consumer 对象,consumer 决定了如何处理格式化的数据(存储在本地日志文件还是上传到服务端).
20
+
21
+ ```ruby
22
+ ta = TDAnalytics::Tracker.new(consumer)
23
+ ta.track('your_event', distinct_id: 'distinct_id_of_user')
24
+ ```
25
+ TDAnalytics 提供了三种 consumer 实现:
26
+
27
+ **(1) LoggerConsumer**: 将数据实时写入本地文件,文件以 天/小时 切分,并需要与 LogBus 搭配使用进行数据上传.
28
+ ```ruby
29
+ # 默认写入当前目录的文件,按日期命名(daily),例如: tda.log.2019-11-15
30
+ consumer = TDAnalytics::LoggerConsumer.new
31
+
32
+ # 也可以修改配置,如下配置会创建 LoggerConsumer,并将数据写入: /path/to/log/demolog.2019-11-15-18 (18 为小时)
33
+ consumer = TDAnalytics::LoggerConsumer.new('/path/to/log', 'hourly', prefix: 'demolog')
34
+ ```
35
+
36
+ **(2) DebugConsumer**: 逐条实时向 TA 服务器传输数据,当数据格式错误时会返回详细的错误信息。建议先使用 DebugConsumer 校验数据格式。初始化传入项目 APP ID 和接收端地址.
37
+ ```ruby
38
+ # 创建 DebugConsumer
39
+ consumer = TDAnalytics::DebugConsumer.new(SERVER_URL, YOUR_APPID)
40
+ ```
41
+
42
+ **(3) BatchConsumer**: 批量实时地向 TA 服务器传输数据,不需要搭配传输工具。在网络条件不好的情况下有可能会导致数据丢失,因此不建议在生产环境中大量使用. 初始化传入项目 APP ID 和接收端地址.
43
+
44
+ BatchConsumer 会先将数据存放在缓冲区中,当数据条数超过设定的缓冲区最大值(max_buffer_length, 默认为20),触发上报. 您也可以在初始化 SDK 时传入整数类型的参数配置缓冲区大小:
45
+ ```ruby
46
+ # BatchConsumer,数据将先存入缓冲区,达到指定条数时上报,默认为 20 条
47
+ consumer = TDAnalytics::BatchConsumer.new(SERVER_URL, YOUR_APPID)
48
+
49
+ # 创建指定缓冲区大小为 3 条的 BatchConsumer
50
+ consumer = TDAnalytics::BatchConsumer.new(SERVER_URL, YOUR_APPID, 3)
51
+ ```
52
+
53
+ 您也可以传入自己实现的 Consumer,只需实现以下接口:
54
+ - add(message): (必须) 接受 Hash 类型的数据对象
55
+ - flush: (可选) 将缓冲区的数据发送到指定地址
56
+ - close: (可选) 程序退出时用户可以主动调用此接口以保证安全退出
57
+
58
+ #### 3. 上报数据
59
+ SDK 初始化完成后,后续即可使用 ta 的接口来上报数据.
60
+
61
+ ### 使用示例
62
+
63
+ #### a. 发送事件
64
+ 您可以调用 track 来上传事件,建议您根据预先梳理的文档来设置事件的属性以及发送信息的条件。上传事件示例如下:
65
+ ```ruby
66
+ # 定义事件数据
67
+ event = {
68
+ # 事件名称 (必填)
69
+ event_name: 'test_event',
70
+ # 账号 ID (可选)
71
+ account_id: 'ruby_test_aid',
72
+ # 访客 ID (可选),账号 ID 和访客 ID 不可以都为空
73
+ distinct_id: 'ruby_distinct_id',
74
+ # 事件时间 (可选) 如果不填,将以调用接口时的时间作为事件时间
75
+ time: Time.now,
76
+ # 事件 IP (可选) 当传入 IP 地址时,后台可以解析所在地
77
+ ip: '202.38.64.1',
78
+ # 事件属性 (可选)
79
+ properties: {
80
+ prop_date: Time.now,
81
+ prop_double: 134.1,
82
+ prop_string: 'hello world',
83
+ prop_bool: true,
84
+ },
85
+ # 跳过本地格式校验 (可选)
86
+ # skip_local_check: true,
87
+ }
88
+
89
+ # 上传事件
90
+ ta.track(event)
91
+ ```
92
+
93
+ 参数说明:
94
+ * 事件的名称只能以字母开头,可包含数字,字母和下划线“_”,长度最大为 50 个字符,对字母大小写不敏感
95
+ * 事件的属性是 Hash 类型,其中每个元素代表一个属性
96
+ * 事件属性的 Key 值为属性的名称,为 string 类型,规定只能以字母开头,包含数字,字母和下划线“_”,长度最大为 50 个字符,对字母大小写不敏感
97
+ * 事件属性的 Value 值为该属性的值,支持 String、数值类型、bool、Time
98
+
99
+ SDK 会在本地对数据格式做校验,如果希望跳过本地校验,可以在调用 track 接口的时候传入 skip_local_check 参数.
100
+
101
+ #### 2. 设置公共事件属性
102
+ 公共事件属性是每个事件都会包含的属性. 也可以设置动态公共属性。如果有相同的属性,则动态公共属性会覆盖公共事件属性。
103
+
104
+ ```ruby
105
+ # 定义公共属性
106
+ super_properties = {
107
+ super_string: 'super_string',
108
+ super_int: 1,
109
+ super_bool: false,
110
+ super_date: Time.rfc2822("Thu, 26 Oct 2019 02:26:12 +0545")
111
+ }
112
+
113
+ # 设置公共事件属性,公共事件属性会添加到每个事件中
114
+ ta.set_super_properties(super_properties)
115
+
116
+ # 清空公共事件属性
117
+ ta.clear_super_properties
118
+ ```
119
+
120
+ #### 3. 设置用户属性
121
+ 对于一般的用户属性,您可以调用 user_set 来进行设置. 使用该接口上传的属性将会覆盖原有的属性值,如果之前不存在该用户属性,则会新建该用户属性:
122
+ ```ruby
123
+ # 定义用户属性数据
124
+ user_data = {
125
+ # 账号 ID (可选)
126
+ account_id: 'ruby_test_aid',
127
+ # 访客 ID (可选),账号 ID 和访客 ID 不可以都为空
128
+ distinct_id: 'ruby_distinct_id',
129
+ # 用户属性
130
+ properties: {
131
+ prop_date: Time.now,
132
+ prop_double: 134.12,
133
+ prop_string: 'hello',
134
+ prop_int: 666,
135
+ },
136
+ }
137
+
138
+ # 设置用户属性
139
+ ta.user_set(user_data);
140
+ ```
141
+ 如果您要上传的用户属性只要设置一次,则可以调用 user_set_once 来进行设置,当该属性之前已经有值的时候,将会忽略这条信息:
142
+ ```ruby
143
+ # 设置用户属性,如果已有同名属性,则忽略新设置属性
144
+ ta.user_set_once(user_data);
145
+ ```
146
+ 当您要上传数值型的属性时,可以调用 user_add 来对该属性进行累加操作,如果该属性还未被设置,则会赋值 0 后再进行计算:
147
+ ```ruby
148
+ # 对数值类型的属性进行累加操作
149
+ ta.user_add(distinct_id: 'ruby_distinct_id', properties: {prop_int: 10, prop_double: 15.88})
150
+ ```
151
+
152
+ 当您需要删除某个用户属性的值时,可以调用 user_unset.
153
+ ```ruby
154
+ # 删除某个用户属性
155
+ ta.user_unset(distinct_id: 'ruby_distinct_id', property: :prop_string)
156
+
157
+ # 删除一组用户属性
158
+ ta.user_unset(distinct_id: 'ruby_distinct_id', property: Array.[](:prop_a, :prop_b, :prob_c))
159
+ ```
160
+
161
+ 如果您要删除某个用户,可以调用 user_del 将这名用户删除. 之后您将无法再查询该用户的用户属性,但该用户产生的事件仍然可以被查询到:
162
+ ```ruby
163
+ # 删除用户
164
+ ta.user_del(
165
+ # 账号 ID (可选)
166
+ account_id: 'ruby_test_aid',
167
+ # 访客 ID (可选),账号 ID 和访客 ID 不可以都为空
168
+ distinct_id: 'ruby_distinct_id',
169
+ );
170
+ ```
171
+
172
+ #### 4. 立即进行数据 IO
173
+ 此操作与具体的 Consumer 实现有关. 在收到数据时, Consumer 可以先将数据存放在缓冲区, 并在特定情况下触发真正的数据 IO 操作, 以提高整体性能. 在某些情况下需要立即提交数据,可以调用 flush 接口:
174
+ ```ruby
175
+ # 立即提交数据到相应的接收端
176
+ ta.flush
177
+ ```
178
+
179
+ #### 5. 关闭 SDK
180
+ 请在退出程序前调用本接口,以避免缓存内的数据丢失:
181
+ ```ruby
182
+ # 关闭并退出 SDK
183
+ ta.close
184
+ ```
185
+
186
+ #### 6 其他说明
187
+ 默认情况下,除初始化参数不合法外,其他 Error 会被忽略,如果您希望自己处理接口调用中的 Error,可以传入自定义的 error handler.
188
+
189
+ ```ruby
190
+ # (可选) 定义一个错误处理器,当出现 Error 时会调用
191
+ class MyErrorHandler < TDAnalytics::ErrorHandler
192
+ def handle(error)
193
+ puts error
194
+ raise error
195
+ end
196
+ end
197
+ my_error_handler = MyErrorHandler.new
198
+
199
+ # 创建 TA 实例, 第一个参数为任意一种 Consumer, 第二个参数可选,如果设定了会在出错时调用
200
+ ta = TDAnalytics::Tracker.new(consumer, my_error_handler, uuid: true)
201
+ ```
202
+ uuid 如果为 true,每条数据都会被带上随机 UUID 作为 #uuid 属性的值上报,该值不会入库,仅仅用于后台做数据重复检测.
data/demo/demo.rb ADDED
@@ -0,0 +1,120 @@
1
+ $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
2
+
3
+ require 'thinkingdata-ruby'
4
+ require 'time'
5
+ #require 'pry'
6
+
7
+ if __FILE__ == $0
8
+ # 替换 DEMO_APPID 为您项目的 APP ID
9
+ DEMO_APPID = 'b2a61feb9e56472c90c5bcb320dfb4ef'
10
+ # 替换 SERVER_URL 为您项目的 URL
11
+ SERVER_URL = 'https://sdk.tga.thinkinggame.cn'
12
+
13
+ # 账号 ID
14
+ DEMO_ACCOUNT_ID = 'ruby_demo_aid'
15
+ # 访客 ID
16
+ DEMO_DISTINCT_ID = 'ruby_demo_did'
17
+
18
+ # (可选) 定义一个错误处理器,当出现 Error 时会调用
19
+ class MyErrorHandler < TDAnalytics::ErrorHandler
20
+ def handle(error)
21
+ puts error
22
+ raise error
23
+ end
24
+ end
25
+ my_error_handler = MyErrorHandler.new
26
+
27
+ # 定义 consumer: consumer 实现了 add、flush、close 等接口,将经过 SDK 格式化的数据以不同的方式存储或者发送到接收端
28
+ consumer = nil
29
+ case ARGV[0]
30
+ when '0'
31
+ # LoggerConsumer,数据将写入本地文件(当前目录,按小时切分,前缀为 demolog),需要配合 Logbus 上传数据到 TA 服务器
32
+ consumer = TDAnalytics::LoggerConsumer.new '.', 'hourly', prefix: 'demolog'
33
+ when '1'
34
+ # DebugConsumer,数据将被逐条同步的上报到 TA 服务器。出错时会返回详细的错误信息
35
+ consumer = TDAnalytics::DebugConsumer.new(SERVER_URL, DEMO_APPID)
36
+ when '2'
37
+ # BatchConsumer,数据将先存入缓冲区,达到指定条数时上报,默认为 20 条
38
+ consumer = TDAnalytics::BatchConsumer.new(SERVER_URL, DEMO_APPID, 3)
39
+ else
40
+ # LoggerConsumer,数据将写入本地文件(当前目录,按天切分,前缀为 tda.log),需要配合 Logbus 上传数据到 TA 服务器
41
+ consumer = TDAnalytics::LoggerConsumer.new
42
+ end
43
+
44
+ # 创建 TA 实例, 第一个参数为任意一种 Consumer, 第二个参数可选,如果设定了会在出错时调用
45
+ ta = TDAnalytics::Tracker.new(consumer, my_error_handler, uuid: true)
46
+
47
+ # 定义公共属性
48
+ super_properties = {
49
+ super_string: 'super_string',
50
+ super_int: 1,
51
+ super_bool: false,
52
+ super_date: Time.rfc2822("Thu, 26 Oct 2019 02:26:12 +0545")
53
+ }
54
+
55
+ # 设置公共事件属性,公共事件属性会添加到每个事件中
56
+ ta.set_super_properties(super_properties)
57
+
58
+ # 定义事件数据
59
+ event = {
60
+ # 事件名称 (必填)
61
+ event_name: 'test_event',
62
+ # 账号 ID (可选)
63
+ account_id: DEMO_ACCOUNT_ID,
64
+ # 访客 ID (可选),账号 ID 和访客 ID 不可以都为空
65
+ distinct_id: DEMO_DISTINCT_ID,
66
+ # 事件时间 (可选) 如果不填,将以调用接口时的时间作为事件时间
67
+ time: Time.now,
68
+ # 事件 IP (可选) 当传入 IP 地址时,后台可以解析所在地
69
+ ip: '202.38.64.1',
70
+ # 事件属性 (可选)
71
+ properties: {
72
+ prop_date: Time.now,
73
+ prop_double: 134.1,
74
+ prop_string: 'hello world',
75
+ prop_bool: true,
76
+ },
77
+ }
78
+
79
+ # 上报事件
80
+ 5.times do
81
+ ta.track(event)
82
+ ta.clear_super_properties
83
+ end
84
+
85
+ # 定义用户属性数据
86
+ user_data = {
87
+ # 账号 ID (可选)
88
+ account_id: DEMO_ACCOUNT_ID,
89
+ # 访客 ID (可选),账号 ID 和访客 ID 不可以都为空
90
+ distinct_id: DEMO_DISTINCT_ID,
91
+ # 用户属性
92
+ properties: {
93
+ prop_date: Time.now,
94
+ prop_double: 134.12,
95
+ prop_string: 'hello',
96
+ prop_int: 666,
97
+ },
98
+ }
99
+ # 设置用户属性, 覆盖同名属性
100
+ ta.user_set(user_data)
101
+
102
+ # 设置用户属性,不会覆盖已经设置的同名属性
103
+ user_data[:properties][:prop_int_new] = 800
104
+ ta.user_set_once(user_data)
105
+
106
+ # 删除某个用户属性
107
+ # ta.user_unset(distinct_id: DEMO_DISTINCT_ID, property: :prop_string)
108
+
109
+ # 累加用户属性
110
+ ta.user_add(distinct_id: DEMO_DISTINCT_ID, properties: {prop_int: 10, prop_double: 15.88})
111
+
112
+ # 删除用户。此操作之前的事件数据不会被删除
113
+ # ta.user_del(distinct_id: DEMO_DISTINCT_ID)
114
+
115
+ #binding.pry
116
+
117
+ # 退出前调用此接口
118
+ ta.close
119
+ end
120
+
@@ -0,0 +1,105 @@
1
+ require 'base64'
2
+ require 'json'
3
+ require 'net/http'
4
+
5
+ module TDAnalytics
6
+ # BatchConsumer 批量同步的发送数据.
7
+ # 有数据时,首先会加入本地缓冲区,当条数到达上限后会发起上报
8
+ class BatchConsumer
9
+ # 默认缓冲区大小
10
+ MAX_LENGTH = 20
11
+
12
+ def initialize(server_url, app_id, max_buffer_length=MAX_LENGTH)
13
+ @server_uri = URI.parse(server_url)
14
+ @server_uri.path = '/logagent'
15
+ @app_id = app_id
16
+ @max_length = [max_buffer_length, MAX_LENGTH].min
17
+ @buffers = []
18
+ end
19
+
20
+ def add(message)
21
+ @buffers << message
22
+ flush if @buffers.length >= @max_length
23
+ end
24
+
25
+ def close
26
+ flush
27
+ end
28
+
29
+ def flush
30
+ begin
31
+ @buffers.each_slice(@max_length) do |chunk|
32
+ wio = StringIO.new("w")
33
+ gzip_io = Zlib::GzipWriter.new(wio)
34
+ gzip_io.write(chunk.to_json)
35
+ gzip_io.close
36
+ data = Base64.encode64(wio.string).gsub("\n", '')
37
+
38
+ headers = {'Content-Type' => 'application/plaintext', 'appid' => @app_id}
39
+ request = CaseSensitivePost.new(@server_uri.request_uri, headers)
40
+ request.body = data
41
+
42
+ begin
43
+ response_code, response_body = _request(@server_uri, request)
44
+ rescue => e
45
+ raise ConnectionError.new("Could not connect to TA server, with error \"#{e.message}\".")
46
+ end
47
+
48
+ result = {}
49
+ if response_code.to_i == 200
50
+ begin
51
+ result = JSON.parse(response_body.to_s)
52
+ rescue JSON::JSONError
53
+ raise ServerError.new("Could not interpret TA server response: '#{response_body}'")
54
+ end
55
+ end
56
+
57
+ if result['code'] != 0
58
+ raise ServerError.new("Could not write to TA, server responded with #{response_code} returning: '#{response_body}'")
59
+ end
60
+ end
61
+ rescue
62
+ raise
63
+ end
64
+ @buffers = []
65
+ end
66
+
67
+ private
68
+ def _request(uri, request)
69
+ client = Net::HTTP.new(uri.host, uri.port)
70
+ client.use_ssl = uri.scheme === 'https' ? true : false
71
+ client.open_timeout = 10
72
+ client.continue_timeout = 10
73
+ client.read_timeout = 10
74
+ client.ssl_timeout = 10
75
+
76
+ response = client.request(request)
77
+ [response.code, response.body]
78
+ end
79
+ end
80
+
81
+ # 内部使用,为了兼容老版本服务端,将 Header 名称限定为小写
82
+ class CaseSensitivePost < Net::HTTP::Post
83
+ def initialize_http_header(headers)
84
+ @header = {}
85
+ headers.each{|k,v| @header[k.to_s] = [v] }
86
+ end
87
+
88
+ def [](name)
89
+ @header[name.to_s]
90
+ end
91
+
92
+ def []=(name, val)
93
+ if val
94
+ @header[name.to_s] = [val]
95
+ else
96
+ @header.delete(name.to_s)
97
+ end
98
+ end
99
+
100
+ def capitalize(name)
101
+ name
102
+ end
103
+ end
104
+
105
+ end
@@ -0,0 +1,56 @@
1
+ require 'json'
2
+ require 'net/http'
3
+
4
+ module TDAnalytics
5
+ # DebugConsumer 逐条、同步地向服务端上报数据
6
+ # DebugConsumer 会返回详细的报错信息,建议在集成阶段先使用 DebugConsumer 调试接口
7
+ class DebugConsumer
8
+
9
+ def initialize(server_url, app_id)
10
+ @server_uri = URI.parse(server_url)
11
+ @server_uri.path = '/sync_data'
12
+ @app_id = app_id
13
+ end
14
+
15
+ def add(message)
16
+ puts message.to_json
17
+ form_data = {"data" => message.to_json, "appid" => @app_id, "debug" => 1}
18
+ begin
19
+ response_code, response_body = request(@server_uri, form_data)
20
+ rescue => e
21
+ raise ConnectionError.new("Could not connect to TA server, with error \"#{e.message}\".")
22
+ end
23
+
24
+ result = {}
25
+ if response_code.to_i == 200
26
+ begin
27
+ result = JSON.parse(response_body.to_s)
28
+ rescue JSON::JSONError
29
+ raise ServerError.new("Could not interpret TA server response: '#{response_body}'")
30
+ end
31
+ end
32
+
33
+ puts result
34
+
35
+ if result['code'] != 0
36
+ raise ServerError.new("Could not write to TA, server responded with #{response_code} returning: '#{response_body}'")
37
+ end
38
+ end
39
+
40
+ def request(uri, form_data)
41
+ request = Net::HTTP::Post.new(uri.request_uri)
42
+ request.set_form_data(form_data)
43
+
44
+ client = Net::HTTP.new(uri.host, uri.port)
45
+ client.use_ssl = uri.scheme === 'https' ? true : false
46
+ client.open_timeout = 10
47
+ client.continue_timeout = 10
48
+ client.read_timeout = 10
49
+ client.ssl_timeout = 10
50
+
51
+ response = client.request(request)
52
+ [response.code, response.body]
53
+ end
54
+ end
55
+
56
+ end
@@ -0,0 +1,35 @@
1
+ module TDAnalytics
2
+
3
+ # TD Analytics SDK 的错误
4
+ TDAnalyticsError = Class.new(StandardError)
5
+
6
+ # 参数不合法
7
+ IllegalParameterError = Class.new(TDAnalyticsError)
8
+
9
+ # 网络连接错误
10
+ ConnectionError = Class.new(TDAnalyticsError)
11
+
12
+ # 服务器返回错误
13
+ ServerError = Class.new(TDAnalyticsError)
14
+
15
+
16
+ # 默认情况下,所有异常都不会被抛出。如果希望自己处理异常,可以实现继承自 ErrorHandler 的
17
+ # 错误处理类,并在初始化 SDK 的时候作为参数传入.
18
+ # 例如:
19
+ # class MyErrorHandler < TDAnalytics::ErrorHandler
20
+ # def handle(error)
21
+ # puts error
22
+ # raise error
23
+ # end
24
+ # end
25
+ #
26
+ # my_error_handler = MyErrorHandler.new
27
+ # tracker = TDAnalytics::Tracker.new(consumer, my_error_handler)
28
+ class ErrorHandler
29
+
30
+ # Override #handle to customize error handling
31
+ def handle(error)
32
+ false
33
+ end
34
+ end
35
+ end
@@ -0,0 +1,57 @@
1
+ require 'logger'
2
+ require 'thinkingdata-ruby/errors'
3
+
4
+ module TDAnalytics
5
+ # 将数据写入本地文件, 需配合 LogBus 将数据上传到服务器
6
+ # 由于 LogBus 有完善的失败重传机制,因此建议用户首先考虑此方案
7
+ class LoggerConsumer
8
+ # LoggerConsumer 构造函数
9
+ # log_path: 日志文件存放目录
10
+ # mode: 日志文件切分模式,可选 daily/hourly
11
+ # prefix: 日志文件前缀,默认为 'tda.log', 日志文件名格式为: tda.log.2019-11-15
12
+ def initialize(log_path='.', mode='daily', prefix:'tda.log')
13
+ case mode
14
+ when 'hourly'
15
+ @suffix_mode = '%Y-%m-%d-%H'
16
+ when 'daily'
17
+ @suffix_mode = '%Y-%m-%d'
18
+ else
19
+ raise IllegalParameterError.new("#{mode} is unsupported for LoggerConsumer. Replaced it by daily or hourly")
20
+ end
21
+
22
+ raise IllegalParameterError.new("prefix couldn't be empty") if prefix.nil? || prefix.length == 0
23
+
24
+ @current_suffix = Time.now.strftime(@suffix_mode)
25
+
26
+ @full_prefix = "#{log_path}/#{prefix}."
27
+
28
+ _reset
29
+ end
30
+
31
+ def add(msg)
32
+ unless Time.now.strftime(@suffix_mode) == @current_suffix
33
+ @logger.close
34
+ @current_suffix = Time.now.strftime(@suffix_mode)
35
+ _reset
36
+ end
37
+ @logger.info(msg.to_json)
38
+ end
39
+
40
+ # 关闭 logger
41
+ def close
42
+ @logger.close
43
+ end
44
+
45
+ private
46
+
47
+ # 重新创建 logger 对象. LogBus 判断新文件会同时考虑文件名和 inode,因此默认的切分方式会导致数据重传
48
+ def _reset
49
+ @logger = Logger.new("#{@full_prefix}#{@current_suffix}")
50
+ @logger.level = Logger::INFO
51
+ @logger.formatter = proc do |severity, datetime, progname, msg|
52
+ "#{msg}\n"
53
+ end
54
+ end
55
+
56
+ end
57
+ end
@@ -0,0 +1,313 @@
1
+ require 'securerandom'
2
+ require 'thinkingdata-ruby/errors'
3
+ require 'thinkingdata-ruby/version'
4
+
5
+ module TDAnalytics
6
+ # TDAnalytics::Tracker 是数据上报的核心类,使用此类上报事件数据和更新用户属性.
7
+ # 创建 Tracker 类需要传入 consumer 对象,consumer 决定了如何处理格式化的数据(存储在本地日志文件还是上传到服务端).
8
+ #
9
+ # ta = TDAnalytics::Tracker.new(consumer)
10
+ # ta.track('your_event', distinct_id: 'distinct_id_of_user')
11
+ #
12
+ # TDAnalytics 提供了三种 consumer 实现:
13
+ # LoggerConsumer: 数据写入本地文件
14
+ # DebugConsumer: 数据逐条、同步的发送到服务端,并返回详细的报错信息
15
+ # BatchConsumer: 数据批量、同步的发送到服务端
16
+ #
17
+ # 您也可以传入自己实现的 Consumer,只需实现以下接口:
18
+ # add(message): 接受 hash 类型的数据对象
19
+ # flush: (可选) 将缓冲区的数据发送到指定地址
20
+ # close: (可选) 程序退出时用户可以主动调用此接口以保证安全退出
21
+ class Tracker
22
+
23
+ LIB_PROPERTIES = {
24
+ '#lib' => 'ruby',
25
+ '#lib_version' => TDAnalytics::VERSION,
26
+ }
27
+
28
+ # SDK 构造函数,传入 consumer 对象
29
+ #
30
+ # 默认情况下,除参数不合法外,其他 Error 会被忽略,如果您希望自己处理接口调用中的 Error,可以传入自定义的 error handler.
31
+ # ErrorHandler 的定义可以参考 thinkingdata-ruby/errors.rb
32
+ #
33
+ # uuid 如果为 true,每条数据都会被带上随机 UUID 作为 #uuid 属性的值上报,该值不会入库,仅仅用于后台做数据重复检测
34
+ def initialize(consumer, error_handler=nil, uuid: false)
35
+ @error_handler = error_handler || ErrorHandler.new
36
+ @consumer = consumer
37
+ @super_properties = {}
38
+ @uuid = uuid
39
+ end
40
+
41
+ # 设置公共事件属性,公共事件属性是所有事件都会带上的属性. 此方法会将传入的属性与当前公共属性合并.
42
+ # 如果希望跳过本地格式校验,可以传入值为 true 的 skip_local_check 参数
43
+ def set_super_properties(properties, skip_local_check = false)
44
+ unless skip_local_check || _check_properties(:track, properties)
45
+ @error_handler.handle(IllegalParameterError.new("Invalid super properties"))
46
+ return false
47
+ end
48
+ properties.each do |k, v|
49
+ if v.is_a?(Time)
50
+ @super_properties[k] = _format_time(v)
51
+ else
52
+ @super_properties[k] = v
53
+ end
54
+ end
55
+ end
56
+
57
+ # 清除公共事件属性
58
+ def clear_super_properties
59
+ @super_properties = {}
60
+ end
61
+
62
+ # 上报事件. 每个事件都包含一个事件名和 Hash 对象的时间属性. 其参数说明如下:
63
+ # event_name: (必须) 事件名 必须是英文字母开头,可以包含字母、数字和 _, 长度不超过 50 个字符.
64
+ # distinct_id: (可选) 访客 ID
65
+ # account_id: (可选) 账号ID distinct_id 和 account_id 不能同时为空
66
+ # properties: (可选) Hash 事件属性。支持四种类型的值:字符串、数值、Time、boolean
67
+ # time: (可选)Time 事件发生时间,如果不传默认为系统当前时间
68
+ # ip: (可选) 事件 IP,如果传入 IP 地址,后端可以通过 IP 地址解析事件发生地点
69
+ # skip_local_check: (可选) boolean 表示是否跳过本地检测
70
+ def track(event_name:nil, distinct_id:nil, account_id:nil, properties:{}, time:nil, ip:nil, skip_local_check: false)
71
+ begin
72
+ _check_name event_name
73
+ _check_id(distinct_id, account_id)
74
+ unless skip_local_check
75
+ _check_properties(:track, properties)
76
+ end
77
+ rescue TDAnalyticsError => e
78
+ @error_handler.handle(e)
79
+ return false
80
+ end
81
+
82
+ data = {}
83
+ data[:event_name] = event_name
84
+ data[:distinct_id] = distinct_id if distinct_id
85
+ data[:account_id] = account_id if account_id
86
+ data[:time] = time if time
87
+ data[:ip] = ip if ip
88
+ data[:properties] = properties
89
+
90
+ _internal_track(:track, data)
91
+ end
92
+
93
+ # 设置用户属性. 如果出现同名属性,则会覆盖之前的值.
94
+ # distinct_id: (可选) 访客 ID
95
+ # account_id: (可选) 账号ID distinct_id 和 account_id 不能同时为空
96
+ # properties: (可选) Hash 用户属性。支持四种类型的值:字符串、数值、Time、boolean
97
+ def user_set(distinct_id:nil, account_id:nil, properties:{}, ip:nil)
98
+ begin
99
+ _check_id(distinct_id, account_id)
100
+ _check_properties(:user_set, properties)
101
+ rescue TDAnalyticsError => e
102
+ @error_handler.handle(e)
103
+ return false
104
+ end
105
+
106
+ _internal_track(:user_set,
107
+ distinct_id: distinct_id,
108
+ account_id: account_id,
109
+ properties: properties,
110
+ ip: ip,
111
+ )
112
+ end
113
+
114
+ # 设置用户属性. 如果有重名属性,则丢弃, 参数与 user_set 相同
115
+ def user_set_once(distinct_id:nil, account_id:nil, properties:{}, ip:nil)
116
+ begin
117
+ _check_id(distinct_id, account_id)
118
+ _check_properties(:user_setOnce, properties)
119
+ rescue TDAnalyticsError => e
120
+ @error_handler.handle(e)
121
+ return false
122
+ end
123
+
124
+ _internal_track(:user_setOnce,
125
+ distinct_id: distinct_id,
126
+ account_id: account_id,
127
+ properties: properties,
128
+ ip: ip,
129
+ )
130
+ end
131
+
132
+ # 删除用户属性, property 可以传入需要删除的用户属性的 key 值,或者 key 值数组
133
+ def user_unset(distinct_id:nil, account_id:nil, property:nil)
134
+ properties = {}
135
+ if property.is_a?(Array)
136
+ property.each do |k|
137
+ properties[k] = 0
138
+ end
139
+ else
140
+ properties[property] = 0
141
+ end
142
+
143
+ begin
144
+ _check_id(distinct_id, account_id)
145
+ _check_properties(:user_unset, properties)
146
+ rescue TDAnalyticsError => e
147
+ @error_handler.handle(e)
148
+ return false
149
+ end
150
+
151
+ _internal_track(:user_unset,
152
+ distinct_id: distinct_id,
153
+ account_id: account_id,
154
+ properties: properties,
155
+ )
156
+ end
157
+
158
+ # 累加用户属性, 如果用户属性不存在,则会设置为 0,然后再累加
159
+ # distinct_id: (可选) 访客 ID
160
+ # account_id: (可选) 账号ID distinct_id 和 account_id 不能同时为空
161
+ # properties: (可选) Hash 数值类型的用户属性
162
+ def user_add(distinct_id:nil, account_id:nil, properties:{})
163
+ begin
164
+ _check_id(distinct_id, account_id)
165
+ _check_properties(:user_add, properties)
166
+ rescue TDAnalyticsError => e
167
+ @error_handler.handle(e)
168
+ return false
169
+ end
170
+
171
+ _internal_track(:user_add,
172
+ distinct_id: distinct_id,
173
+ account_id: account_id,
174
+ properties: properties,
175
+ )
176
+ end
177
+
178
+ # 删除用户,用户之前的事件数据不会被删除
179
+ def user_del(distinct_id:nil, account_id:nil)
180
+ begin
181
+ _check_id(distinct_id, account_id)
182
+ rescue TDAnalyticsError => e
183
+ @error_handler.handle(e)
184
+ return false
185
+ end
186
+
187
+ _internal_track(:user_del,
188
+ distinct_id: distinct_id,
189
+ account_id: account_id,
190
+ )
191
+ end
192
+
193
+ # 立即上报数据,对于 BatchConsumer 会触发上报
194
+ def flush
195
+ return true unless defined? @consumer.flush
196
+ ret = true
197
+ begin
198
+ @consumer.flush
199
+ rescue TDAnalyticsError => e
200
+ @error_handler.handle(e)
201
+ ret = false
202
+ end
203
+ ret
204
+ end
205
+
206
+ # 退出前调用,保证 Consumer 安全退出
207
+ def close
208
+ return true unless defined? @consumer.close
209
+ ret = true
210
+ begin
211
+ @consumer.close
212
+ rescue TDAnalyticsError => e
213
+ @error_handler.handle(e)
214
+ ret = false
215
+ end
216
+ ret
217
+ end
218
+
219
+ private
220
+
221
+ # 出现异常的时候返回 false, 否则 true
222
+ def _internal_track(type, properties:{}, event_name:nil, account_id:nil, distinct_id:nil, ip:nil, time:Time.now)
223
+ if account_id == nil && distinct_id == nil
224
+ raise IllegalParameterError.new('account id or distinct id must be provided.')
225
+ end
226
+
227
+ if type == :track
228
+ raise IllegalParameterError.new('event name is empty for track') if event_name == nil
229
+ properties = {'#zone_offset': time.utc_offset / 3600.0}.merge(LIB_PROPERTIES).merge(@super_properties).merge(properties)
230
+ end
231
+
232
+ # 格式化 Time 类型
233
+ properties.each do |k, v|
234
+ if v.is_a?(Time)
235
+ properties[k] = _format_time(v)
236
+ end
237
+ end
238
+
239
+ data = {
240
+ '#type' => type,
241
+ '#time' => _format_time(time),
242
+ 'properties' => properties,
243
+ }
244
+
245
+ data['#event_name'] = event_name if type == :track
246
+ data['#account_id'] = account_id if account_id
247
+ data['#distinct_id'] = distinct_id if distinct_id
248
+ data['#ip'] = ip if ip
249
+ data['#uuid'] = SecureRandom.uuid if @uuid
250
+
251
+ ret = true
252
+ begin
253
+ @consumer.add(data)
254
+ rescue TDAnalyticsError => e
255
+ @error_handler.handle(e)
256
+ ret = false
257
+ end
258
+
259
+ ret
260
+ end
261
+
262
+ # 将 Time 类型格式化为数数指定格式的字符串
263
+ def _format_time(time)
264
+ time.strftime("%Y-%m-%d %H:%M:%S.#{((time.to_f * 1000.0).to_i % 1000).to_s.rjust(3, "0")}")
265
+ end
266
+
267
+ # 属性名或者事件名检查
268
+ def _check_name(name)
269
+ raise IllegalParameterError.new("the name of event or property cannot be nil") if name.nil?
270
+
271
+ unless name.instance_of?(String) || name.instance_of?(Symbol)
272
+ raise IllegalParameterError.new("#{name} is invalid. It must be String or Symbol")
273
+ end
274
+
275
+ unless name =~ /^[a-zA-Z][a-zA-Z0-9_]{1,49}$/
276
+ raise IllegalParameterError.new("#{name} is invalid. It must be string starts with letters and contains letters, numbers, and _ with max length of 50")
277
+ end
278
+ true
279
+ end
280
+
281
+ # 属性类型检查
282
+ def _check_properties(type, properties)
283
+ unless properties.instance_of? Hash
284
+ return false
285
+ end
286
+
287
+ properties.each do |k, v|
288
+ _check_name k
289
+ unless v.is_a?(Integer) || v.is_a?(Float) || v.is_a?(Symbol) || v.is_a?(String) || v.is_a?(Time) || !!v == v
290
+ raise IllegalParameterError.new("The value of properties must be type in Integer, Float, Symbol, String, and Time")
291
+ end
292
+
293
+ if type == :user_add
294
+ raise IllegalParameterError.new("Property value for user add must be numbers") unless v.is_a?(Integer) || v.is_a?(Float)
295
+ end
296
+ end
297
+ true
298
+ end
299
+
300
+ # 检查用户 ID 合法性
301
+ def _check_id(distinct_id, account_id)
302
+ raise IllegalParameterError.new("account id or distinct id must be provided.") if distinct_id.nil? && account_id.nil?
303
+
304
+ unless distinct_id.nil?
305
+ raise IllegalParameterError.new("The length of distinct id should in (0, 64]") if distinct_id.to_s.length < 1 || distinct_id.to_s.length > 64
306
+ end
307
+
308
+ unless account_id.nil?
309
+ raise IllegalParameterError.new("The length of account id should in (0, 64]") if account_id.to_s.length < 1 || account_id.to_s.length > 64
310
+ end
311
+ end
312
+ end
313
+ end
@@ -0,0 +1,3 @@
1
+ module TDAnalytics
2
+ VERSION = '1.0.0'
3
+ end
@@ -0,0 +1,5 @@
1
+ require 'thinkingdata-ruby/logger_consumer'
2
+ require 'thinkingdata-ruby/debug_consumer'
3
+ require 'thinkingdata-ruby/batch_consumer'
4
+ require 'thinkingdata-ruby/tracker'
5
+ require 'thinkingdata-ruby/errors'
@@ -0,0 +1,16 @@
1
+ require File.join(File.dirname(__FILE__), 'lib/thinkingdata-ruby/version.rb')
2
+
3
+ spec = Gem::Specification.new do |spec|
4
+ spec.name = 'thinkingdata-ruby'
5
+ spec.version = TDAnalytics::VERSION
6
+ spec.files = Dir.glob(`git ls-files`.split("\n"))
7
+ spec.require_paths = ['lib']
8
+ spec.summary = 'Official ThinkingData Analytics API for ruby'
9
+ spec.description = 'The official ThinkingData Analytics API for ruby'
10
+ spec.authors = [ 'ThinkingData' ]
11
+ spec.email = 'sdk@thinkingdata.cn'
12
+ spec.homepage = 'https://github.com/ThinkingDataAnalytics/ruby-sdk'
13
+ spec.license = 'Apache-2.0'
14
+
15
+ spec.required_ruby_version = '>= 2.0.0'
16
+ end
metadata ADDED
@@ -0,0 +1,56 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: thinkingdata-ruby
3
+ version: !ruby/object:Gem::Version
4
+ version: 1.0.0
5
+ platform: ruby
6
+ authors:
7
+ - ThinkingData
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2019-11-20 00:00:00.000000000 Z
12
+ dependencies: []
13
+ description: The official ThinkingData Analytics API for ruby
14
+ email: sdk@thinkingdata.cn
15
+ executables: []
16
+ extensions: []
17
+ extra_rdoc_files: []
18
+ files:
19
+ - CHANGELOG.md
20
+ - Gemfile
21
+ - LICENSE
22
+ - README.md
23
+ - demo/demo.rb
24
+ - lib/thinkingdata-ruby.rb
25
+ - lib/thinkingdata-ruby/batch_consumer.rb
26
+ - lib/thinkingdata-ruby/debug_consumer.rb
27
+ - lib/thinkingdata-ruby/errors.rb
28
+ - lib/thinkingdata-ruby/logger_consumer.rb
29
+ - lib/thinkingdata-ruby/tracker.rb
30
+ - lib/thinkingdata-ruby/version.rb
31
+ - thinkingdata-ruby.gemspec
32
+ homepage: https://github.com/ThinkingDataAnalytics/ruby-sdk
33
+ licenses:
34
+ - Apache-2.0
35
+ metadata: {}
36
+ post_install_message:
37
+ rdoc_options: []
38
+ require_paths:
39
+ - lib
40
+ required_ruby_version: !ruby/object:Gem::Requirement
41
+ requirements:
42
+ - - ">="
43
+ - !ruby/object:Gem::Version
44
+ version: 2.0.0
45
+ required_rubygems_version: !ruby/object:Gem::Requirement
46
+ requirements:
47
+ - - ">="
48
+ - !ruby/object:Gem::Version
49
+ version: '0'
50
+ requirements: []
51
+ rubyforge_project:
52
+ rubygems_version: 2.5.2.3
53
+ signing_key:
54
+ specification_version: 4
55
+ summary: Official ThinkingData Analytics API for ruby
56
+ test_files: []