freshbooks-cli 0.3.3 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/bin/fb +2 -2
- data/lib/freshbooks/api.rb +325 -0
- data/lib/freshbooks/auth.rb +484 -0
- data/lib/freshbooks/cli.rb +1106 -0
- data/lib/freshbooks/spinner.rb +50 -0
- data/lib/freshbooks/version.rb +7 -0
- data/lib/freshbooks.rb +7 -0
- metadata +22 -8
- data/lib/fb/api.rb +0 -301
- data/lib/fb/auth.rb +0 -379
- data/lib/fb/cli.rb +0 -1079
- data/lib/fb/spinner.rb +0 -48
- data/lib/fb/version.rb +0 -5
- data/lib/fb.rb +0 -7
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 84f4c3c33549e8e6f23749a6597fe394d6d94b248c54453810c4bd198b3e4294
|
|
4
|
+
data.tar.gz: 76d88cb16bf1d4b9298869c4b0a9db068157cbd6e068c851e27a834516b5baa8
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 0cba9865b5c7b5e1e2e2b11590be7044803ca4afbb0a8eeba1d3136a84fc5ebaadfd542bf531d3d37073274936f30794dad006348a09bc4fc424a79ef993fc73
|
|
7
|
+
data.tar.gz: 8d11babdd9b8f3406a664d13f89cca6c5fd915b8154044bfe9dddb3fea2b620ac99d8c0b774a8ffa81c5d32c6e9dfb4521ffaa728dc1f3b68d122e645c21de3d
|
data/bin/fb
CHANGED
|
@@ -0,0 +1,325 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "httparty"
|
|
4
|
+
require "json"
|
|
5
|
+
|
|
6
|
+
module FreshBooks
|
|
7
|
+
module CLI
|
|
8
|
+
class Api
|
|
9
|
+
BASE = "https://api.freshbooks.com"
|
|
10
|
+
|
|
11
|
+
class << self
|
|
12
|
+
def headers
|
|
13
|
+
token = Auth.valid_access_token
|
|
14
|
+
{
|
|
15
|
+
"Authorization" => "Bearer #{token}",
|
|
16
|
+
"Content-Type" => "application/json"
|
|
17
|
+
}
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
def config
|
|
21
|
+
@config = nil
|
|
22
|
+
@config = Auth.require_config
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
def business_id
|
|
26
|
+
c = config
|
|
27
|
+
unless c["business_id"]
|
|
28
|
+
c = Auth.require_business(c)
|
|
29
|
+
end
|
|
30
|
+
c["business_id"]
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
def account_id
|
|
34
|
+
c = config
|
|
35
|
+
unless c["account_id"]
|
|
36
|
+
c = Auth.require_business(c)
|
|
37
|
+
end
|
|
38
|
+
c["account_id"]
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
# --- Paginated fetch ---
|
|
42
|
+
|
|
43
|
+
def fetch_all_pages(url, result_key, params: {})
|
|
44
|
+
return [] if Thread.current[:fb_dry_run]
|
|
45
|
+
|
|
46
|
+
page = 1
|
|
47
|
+
all_items = []
|
|
48
|
+
|
|
49
|
+
loop do
|
|
50
|
+
response = HTTParty.get(url, {
|
|
51
|
+
headers: headers,
|
|
52
|
+
query: params.merge(page: page, per_page: 100)
|
|
53
|
+
})
|
|
54
|
+
|
|
55
|
+
unless response.success?
|
|
56
|
+
body = response.parsed_response
|
|
57
|
+
msg = extract_error(body) || response.body
|
|
58
|
+
abort("API error: #{msg}")
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
data = response.parsed_response
|
|
62
|
+
items = dig_results(data, result_key)
|
|
63
|
+
break if items.nil? || items.empty?
|
|
64
|
+
|
|
65
|
+
all_items.concat(items)
|
|
66
|
+
|
|
67
|
+
meta = dig_meta(data)
|
|
68
|
+
break if meta.nil?
|
|
69
|
+
break if page >= meta["pages"].to_i
|
|
70
|
+
|
|
71
|
+
page += 1
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
all_items
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
# --- Cache helpers ---
|
|
78
|
+
|
|
79
|
+
def cache_fresh?
|
|
80
|
+
cache = Auth.load_cache
|
|
81
|
+
cache["updated_at"] && (Time.now.to_i - cache["updated_at"]) < 600
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
def cached_data(key)
|
|
85
|
+
return Auth.load_cache[key] if Thread.current[:fb_dry_run]
|
|
86
|
+
|
|
87
|
+
cache = Auth.load_cache
|
|
88
|
+
return nil unless cache["updated_at"] && (Time.now.to_i - cache["updated_at"]) < 600
|
|
89
|
+
cache[key]
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
def update_cache(key, data)
|
|
93
|
+
cache = Auth.load_cache
|
|
94
|
+
cache["updated_at"] = Time.now.to_i
|
|
95
|
+
cache[key] = data
|
|
96
|
+
Auth.save_cache(cache)
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
# --- Clients ---
|
|
100
|
+
|
|
101
|
+
def fetch_clients(force: false)
|
|
102
|
+
unless force
|
|
103
|
+
cached = cached_data("clients_data")
|
|
104
|
+
return cached if cached
|
|
105
|
+
end
|
|
106
|
+
|
|
107
|
+
url = "#{BASE}/accounting/account/#{account_id}/users/clients"
|
|
108
|
+
results = fetch_all_pages(url, "clients")
|
|
109
|
+
update_cache("clients_data", results)
|
|
110
|
+
results
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
# --- Projects ---
|
|
114
|
+
|
|
115
|
+
def fetch_projects(force: false)
|
|
116
|
+
unless force
|
|
117
|
+
cached = cached_data("projects_data")
|
|
118
|
+
return cached if cached
|
|
119
|
+
end
|
|
120
|
+
|
|
121
|
+
url = "#{BASE}/projects/business/#{business_id}/projects"
|
|
122
|
+
results = fetch_all_pages(url, "projects")
|
|
123
|
+
update_cache("projects_data", results)
|
|
124
|
+
results
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
def fetch_projects_for_client(client_id)
|
|
128
|
+
all = fetch_projects
|
|
129
|
+
all.select { |p| p["client_id"].to_i == client_id.to_i }
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
# --- Services ---
|
|
133
|
+
|
|
134
|
+
def fetch_services(force: false)
|
|
135
|
+
return (Auth.load_cache["services_data"] || []) if Thread.current[:fb_dry_run]
|
|
136
|
+
|
|
137
|
+
unless force
|
|
138
|
+
cached = cached_data("services_data")
|
|
139
|
+
return cached if cached
|
|
140
|
+
end
|
|
141
|
+
|
|
142
|
+
url = "#{BASE}/comments/business/#{business_id}/services"
|
|
143
|
+
response = HTTParty.get(url, { headers: headers })
|
|
144
|
+
|
|
145
|
+
unless response.success?
|
|
146
|
+
body = response.parsed_response
|
|
147
|
+
msg = extract_error(body) || response.body
|
|
148
|
+
abort("API error: #{msg}")
|
|
149
|
+
end
|
|
150
|
+
|
|
151
|
+
data = response.parsed_response
|
|
152
|
+
services_hash = data.dig("result", "services") || {}
|
|
153
|
+
results = services_hash.values
|
|
154
|
+
update_cache("services_data", results)
|
|
155
|
+
results
|
|
156
|
+
end
|
|
157
|
+
|
|
158
|
+
# --- Time Entries ---
|
|
159
|
+
|
|
160
|
+
def fetch_time_entries(started_from: nil, started_to: nil)
|
|
161
|
+
url = "#{BASE}/timetracking/business/#{business_id}/time_entries"
|
|
162
|
+
params = {}
|
|
163
|
+
params["started_from"] = "#{started_from}T00:00:00Z" if started_from
|
|
164
|
+
params["started_to"] = "#{started_to}T23:59:59Z" if started_to
|
|
165
|
+
fetch_all_pages(url, "time_entries", params: params)
|
|
166
|
+
end
|
|
167
|
+
|
|
168
|
+
def fetch_time_entry(entry_id)
|
|
169
|
+
url = "#{BASE}/timetracking/business/#{business_id}/time_entries/#{entry_id}"
|
|
170
|
+
response = HTTParty.get(url, { headers: headers })
|
|
171
|
+
|
|
172
|
+
unless response.success?
|
|
173
|
+
body = response.parsed_response
|
|
174
|
+
msg = extract_error(body) || response.body
|
|
175
|
+
abort("API error: #{msg}")
|
|
176
|
+
end
|
|
177
|
+
|
|
178
|
+
data = response.parsed_response
|
|
179
|
+
data.dig("result", "time_entry") || data.dig("time_entry")
|
|
180
|
+
end
|
|
181
|
+
|
|
182
|
+
def create_time_entry(entry)
|
|
183
|
+
if Thread.current[:fb_dry_run]
|
|
184
|
+
return {
|
|
185
|
+
"_dry_run" => { "simulated" => true, "payload_sent" => entry },
|
|
186
|
+
"result" => { "time_entry" => entry.merge("id" => 0) }
|
|
187
|
+
}
|
|
188
|
+
end
|
|
189
|
+
|
|
190
|
+
url = "#{BASE}/timetracking/business/#{business_id}/time_entries"
|
|
191
|
+
body = { time_entry: entry }
|
|
192
|
+
|
|
193
|
+
response = HTTParty.post(url, {
|
|
194
|
+
headers: headers,
|
|
195
|
+
body: body.to_json
|
|
196
|
+
})
|
|
197
|
+
|
|
198
|
+
unless response.success?
|
|
199
|
+
body = response.parsed_response
|
|
200
|
+
msg = extract_error(body) || response.body
|
|
201
|
+
abort("API error: #{msg}")
|
|
202
|
+
end
|
|
203
|
+
|
|
204
|
+
response.parsed_response
|
|
205
|
+
end
|
|
206
|
+
|
|
207
|
+
def update_time_entry(entry_id, fields)
|
|
208
|
+
if Thread.current[:fb_dry_run]
|
|
209
|
+
return {
|
|
210
|
+
"_dry_run" => { "simulated" => true, "payload_sent" => fields },
|
|
211
|
+
"result" => { "time_entry" => fields.merge("id" => entry_id) }
|
|
212
|
+
}
|
|
213
|
+
end
|
|
214
|
+
|
|
215
|
+
url = "#{BASE}/timetracking/business/#{business_id}/time_entries/#{entry_id}"
|
|
216
|
+
body = { time_entry: fields }
|
|
217
|
+
|
|
218
|
+
response = HTTParty.put(url, {
|
|
219
|
+
headers: headers,
|
|
220
|
+
body: body.to_json
|
|
221
|
+
})
|
|
222
|
+
|
|
223
|
+
unless response.success?
|
|
224
|
+
body = response.parsed_response
|
|
225
|
+
msg = extract_error(body) || response.body
|
|
226
|
+
abort("API error: #{msg}")
|
|
227
|
+
end
|
|
228
|
+
|
|
229
|
+
response.parsed_response
|
|
230
|
+
end
|
|
231
|
+
|
|
232
|
+
def delete_time_entry(entry_id)
|
|
233
|
+
return true if Thread.current[:fb_dry_run]
|
|
234
|
+
|
|
235
|
+
url = "#{BASE}/timetracking/business/#{business_id}/time_entries/#{entry_id}"
|
|
236
|
+
|
|
237
|
+
response = HTTParty.delete(url, { headers: headers })
|
|
238
|
+
|
|
239
|
+
unless response.success?
|
|
240
|
+
body = response.parsed_response
|
|
241
|
+
msg = extract_error(body) || response.body
|
|
242
|
+
abort("API error: #{msg}")
|
|
243
|
+
end
|
|
244
|
+
|
|
245
|
+
true
|
|
246
|
+
end
|
|
247
|
+
|
|
248
|
+
# --- Name Resolution (for entries display) ---
|
|
249
|
+
|
|
250
|
+
def build_name_maps
|
|
251
|
+
cache = Auth.load_cache
|
|
252
|
+
now = Time.now.to_i
|
|
253
|
+
|
|
254
|
+
if cache["updated_at"] && (now - cache["updated_at"]) < 600 &&
|
|
255
|
+
cache["clients"] && !cache["clients"].empty?
|
|
256
|
+
return {
|
|
257
|
+
clients: (cache["clients"] || {}),
|
|
258
|
+
projects: (cache["projects"] || {}),
|
|
259
|
+
services: (cache["services"] || {})
|
|
260
|
+
}
|
|
261
|
+
end
|
|
262
|
+
|
|
263
|
+
clients = fetch_clients(force: true)
|
|
264
|
+
projects = fetch_projects(force: true)
|
|
265
|
+
services = fetch_services(force: true)
|
|
266
|
+
|
|
267
|
+
client_map = {}
|
|
268
|
+
clients.each do |c|
|
|
269
|
+
name = c["organization"]
|
|
270
|
+
name = "#{c["fname"]} #{c["lname"]}" if name.nil? || name.empty?
|
|
271
|
+
client_map[c["id"].to_s] = name
|
|
272
|
+
end
|
|
273
|
+
|
|
274
|
+
project_map = {}
|
|
275
|
+
projects.each do |p|
|
|
276
|
+
project_map[p["id"].to_s] = p["title"]
|
|
277
|
+
end
|
|
278
|
+
|
|
279
|
+
service_map = {}
|
|
280
|
+
services.each do |s|
|
|
281
|
+
service_map[s["id"].to_s] = s["name"]
|
|
282
|
+
end
|
|
283
|
+
|
|
284
|
+
# Also collect services embedded in projects
|
|
285
|
+
projects.each do |p|
|
|
286
|
+
(p["services"] || []).each do |s|
|
|
287
|
+
service_map[s["id"].to_s] ||= s["name"]
|
|
288
|
+
end
|
|
289
|
+
end
|
|
290
|
+
|
|
291
|
+
cache = Auth.load_cache
|
|
292
|
+
cache["updated_at"] = now
|
|
293
|
+
cache["clients"] = client_map
|
|
294
|
+
cache["projects"] = project_map
|
|
295
|
+
cache["services"] = service_map
|
|
296
|
+
Auth.save_cache(cache)
|
|
297
|
+
|
|
298
|
+
{ clients: client_map, projects: project_map, services: service_map }
|
|
299
|
+
end
|
|
300
|
+
|
|
301
|
+
private
|
|
302
|
+
|
|
303
|
+
def extract_error(body)
|
|
304
|
+
return nil unless body.is_a?(Hash)
|
|
305
|
+
body["error_description"] ||
|
|
306
|
+
body.dig("response", "errors", 0, "message") ||
|
|
307
|
+
body.dig("error") ||
|
|
308
|
+
body.dig("message")
|
|
309
|
+
end
|
|
310
|
+
|
|
311
|
+
def dig_results(data, key)
|
|
312
|
+
data.dig("result", key) ||
|
|
313
|
+
data.dig("response", "result", key) ||
|
|
314
|
+
data.dig(key)
|
|
315
|
+
end
|
|
316
|
+
|
|
317
|
+
def dig_meta(data)
|
|
318
|
+
data.dig("result", "meta") ||
|
|
319
|
+
data.dig("response", "result", "meta") ||
|
|
320
|
+
data.dig("meta")
|
|
321
|
+
end
|
|
322
|
+
end
|
|
323
|
+
end # closes class Api
|
|
324
|
+
end # closes module CLI
|
|
325
|
+
end # closes module FreshBooks
|