scrapebadger 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024 ScrapeBadger
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,295 @@
1
+ # ScrapeBadger Node.js SDK
2
+
3
+ [![npm version](https://img.shields.io/npm/v/scrapebadger.svg)](https://www.npmjs.com/package/scrapebadger)
4
+ [![npm downloads](https://img.shields.io/npm/dm/scrapebadger.svg)](https://www.npmjs.com/package/scrapebadger)
5
+ [![Tests](https://github.com/scrape-badger/scrapebadger-node/actions/workflows/test.yml/badge.svg)](https://github.com/scrape-badger/scrapebadger-node/actions/workflows/test.yml)
6
+ [![TypeScript](https://img.shields.io/badge/TypeScript-5.0+-blue.svg)](https://www.typescriptlang.org/)
7
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
8
+
9
+ The official Node.js/TypeScript client library for the [ScrapeBadger](https://scrapebadger.com) API.
10
+
11
+ ## Features
12
+
13
+ - **Full TypeScript Support** - Complete type definitions for all API endpoints
14
+ - **Modern ESM & CommonJS** - Works with both module systems
15
+ - **Async Iterators** - Automatic pagination with `for await...of` syntax
16
+ - **Retry Logic** - Built-in exponential backoff for reliability
17
+ - **Error Handling** - Typed exceptions for different error scenarios
18
+ - **Tree Shakeable** - Import only what you need
19
+
20
+ ## Installation
21
+
22
+ ```bash
23
+ npm install scrapebadger
24
+ ```
25
+
26
+ ```bash
27
+ yarn add scrapebadger
28
+ ```
29
+
30
+ ```bash
31
+ pnpm add scrapebadger
32
+ ```
33
+
34
+ ## Quick Start
35
+
36
+ ```typescript
37
+ import { ScrapeBadger } from "scrapebadger";
38
+
39
+ // Create client with API key
40
+ const client = new ScrapeBadger({ apiKey: "your-api-key" });
41
+
42
+ // Or use environment variable (SCRAPEBADGER_API_KEY)
43
+ const client = new ScrapeBadger();
44
+
45
+ // Get a tweet
46
+ const tweet = await client.twitter.tweets.getById("1234567890");
47
+ console.log(`@${tweet.username}: ${tweet.text}`);
48
+
49
+ // Get a user profile
50
+ const user = await client.twitter.users.getByUsername("elonmusk");
51
+ console.log(`${user.name} has ${user.followers_count.toLocaleString()} followers`);
52
+ ```
53
+
54
+ ## Usage Examples
55
+
56
+ ### Search Tweets
57
+
58
+ ```typescript
59
+ import { ScrapeBadger } from "scrapebadger";
60
+
61
+ const client = new ScrapeBadger({ apiKey: "your-api-key" });
62
+
63
+ // Basic search (returns first page)
64
+ const results = await client.twitter.tweets.search("python programming");
65
+ for (const tweet of results.data) {
66
+ console.log(`@${tweet.username}: ${tweet.text}`);
67
+ }
68
+
69
+ // Paginate manually
70
+ if (results.hasMore) {
71
+ const nextPage = await client.twitter.tweets.search("python programming", {
72
+ cursor: results.nextCursor,
73
+ });
74
+ }
75
+
76
+ // Automatic pagination with async iterators
77
+ for await (const tweet of client.twitter.tweets.searchAll("python", { maxItems: 100 })) {
78
+ console.log(tweet.text);
79
+ }
80
+
81
+ // Collect all results into an array
82
+ import { collectAll } from "scrapebadger";
83
+
84
+ const tweets = await collectAll(
85
+ client.twitter.tweets.searchAll("python", { maxItems: 100 })
86
+ );
87
+ console.log(`Fetched ${tweets.length} tweets`);
88
+ ```
89
+
90
+ ### User Operations
91
+
92
+ ```typescript
93
+ // Get user by username
94
+ const user = await client.twitter.users.getByUsername("elonmusk");
95
+
96
+ // Get user by ID
97
+ const userById = await client.twitter.users.getById("44196397");
98
+
99
+ // Get extended profile info
100
+ const about = await client.twitter.users.getAbout("elonmusk");
101
+ console.log(`Account based in: ${about.account_based_in}`);
102
+ console.log(`Username changes: ${about.username_changes}`);
103
+
104
+ // Get followers
105
+ const followers = await client.twitter.users.getFollowers("elonmusk");
106
+ for (const follower of followers.data) {
107
+ console.log(`@${follower.username}`);
108
+ }
109
+
110
+ // Iterate through all followers
111
+ for await (const follower of client.twitter.users.getFollowersAll("elonmusk", {
112
+ maxItems: 1000,
113
+ })) {
114
+ console.log(follower.username);
115
+ }
116
+
117
+ // Search users
118
+ const users = await client.twitter.users.search("python developer");
119
+ ```
120
+
121
+ ### Lists
122
+
123
+ ```typescript
124
+ // Get list details
125
+ const list = await client.twitter.lists.getDetail("123456");
126
+ console.log(`${list.name}: ${list.member_count} members`);
127
+
128
+ // Get list tweets
129
+ const tweets = await client.twitter.lists.getTweets("123456");
130
+
131
+ // Get list members
132
+ const members = await client.twitter.lists.getMembers("123456");
133
+
134
+ // Search for lists
135
+ const lists = await client.twitter.lists.search("tech leaders");
136
+ ```
137
+
138
+ ### Communities
139
+
140
+ ```typescript
141
+ // Get community details
142
+ const community = await client.twitter.communities.getDetail("123456");
143
+ console.log(`${community.name}: ${community.member_count} members`);
144
+
145
+ // Get community tweets
146
+ const tweets = await client.twitter.communities.getTweets("123456", {
147
+ tweetType: "Latest",
148
+ });
149
+
150
+ // Search communities
151
+ const communities = await client.twitter.communities.search("python developers");
152
+ ```
153
+
154
+ ### Trends
155
+
156
+ ```typescript
157
+ // Get trending topics
158
+ const trends = await client.twitter.trends.getTrends();
159
+ for (const trend of trends.data) {
160
+ console.log(`${trend.name}: ${trend.tweet_count || "N/A"} tweets`);
161
+ }
162
+
163
+ // Get trends by category
164
+ const newsTrends = await client.twitter.trends.getTrends({
165
+ category: "news",
166
+ });
167
+
168
+ // Get trends for a specific location
169
+ const usTrends = await client.twitter.trends.getPlaceTrends(23424977); // US WOEID
170
+ console.log(`Trends in ${usTrends.name}:`);
171
+ for (const trend of usTrends.trends) {
172
+ console.log(` - ${trend.name}`);
173
+ }
174
+
175
+ // Get available locations
176
+ const locations = await client.twitter.trends.getAvailableLocations();
177
+ ```
178
+
179
+ ### Geographic Places
180
+
181
+ ```typescript
182
+ // Search for places
183
+ const places = await client.twitter.geo.search({ query: "San Francisco" });
184
+ for (const place of places.data) {
185
+ console.log(`${place.full_name} (${place.place_type})`);
186
+ }
187
+
188
+ // Search by coordinates
189
+ const nearby = await client.twitter.geo.search({
190
+ lat: 37.7749,
191
+ long: -122.4194,
192
+ granularity: "city",
193
+ });
194
+
195
+ // Get place details
196
+ const place = await client.twitter.geo.getDetail("5a110d312052166f");
197
+ ```
198
+
199
+ ## Error Handling
200
+
201
+ ```typescript
202
+ import {
203
+ ScrapeBadger,
204
+ AuthenticationError,
205
+ RateLimitError,
206
+ NotFoundError,
207
+ InsufficientCreditsError,
208
+ } from "scrapebadger";
209
+
210
+ const client = new ScrapeBadger({ apiKey: "your-api-key" });
211
+
212
+ try {
213
+ const tweet = await client.twitter.tweets.getById("1234567890");
214
+ } catch (error) {
215
+ if (error instanceof AuthenticationError) {
216
+ console.error("Invalid API key");
217
+ } else if (error instanceof RateLimitError) {
218
+ console.error(`Rate limited. Retry after: ${error.retryAfter}`);
219
+ } else if (error instanceof NotFoundError) {
220
+ console.error("Tweet not found");
221
+ } else if (error instanceof InsufficientCreditsError) {
222
+ console.error("Out of credits");
223
+ } else {
224
+ throw error;
225
+ }
226
+ }
227
+ ```
228
+
229
+ ## Configuration
230
+
231
+ ```typescript
232
+ const client = new ScrapeBadger({
233
+ // Required: Your API key (or use SCRAPEBADGER_API_KEY env var)
234
+ apiKey: "your-api-key",
235
+
236
+ // Optional: Custom base URL (default: https://api.scrapebadger.com)
237
+ baseUrl: "https://api.scrapebadger.com",
238
+
239
+ // Optional: Request timeout in milliseconds (default: 30000)
240
+ timeout: 30000,
241
+
242
+ // Optional: Maximum retry attempts (default: 3)
243
+ maxRetries: 3,
244
+
245
+ // Optional: Initial retry delay in milliseconds (default: 1000)
246
+ retryDelay: 1000,
247
+ });
248
+ ```
249
+
250
+ ## API Reference
251
+
252
+ ### Client
253
+
254
+ - `ScrapeBadger` - Main client class
255
+
256
+ ### Twitter Module
257
+
258
+ - `client.twitter.tweets` - Tweet operations
259
+ - `client.twitter.users` - User operations
260
+ - `client.twitter.lists` - List operations
261
+ - `client.twitter.communities` - Community operations
262
+ - `client.twitter.trends` - Trend operations
263
+ - `client.twitter.geo` - Geographic place operations
264
+
265
+ ### Utilities
266
+
267
+ - `collectAll(asyncIterator)` - Collect async iterator results into an array
268
+
269
+ ### Exceptions
270
+
271
+ - `ScrapeBadgerError` - Base exception class
272
+ - `AuthenticationError` - Invalid or missing API key
273
+ - `RateLimitError` - Rate limit exceeded
274
+ - `NotFoundError` - Resource not found
275
+ - `ValidationError` - Invalid request
276
+ - `ServerError` - Server error
277
+ - `TimeoutError` - Request timeout
278
+ - `InsufficientCreditsError` - Out of credits
279
+ - `AccountRestrictedError` - Account restricted
280
+
281
+ ## Requirements
282
+
283
+ - Node.js 18+ (for native `fetch` support)
284
+ - TypeScript 5.0+ (for best type inference)
285
+
286
+ ## License
287
+
288
+ MIT License - see [LICENSE](LICENSE) for details.
289
+
290
+ ## Links
291
+
292
+ - [Documentation](https://docs.scrapebadger.com)
293
+ - [API Reference](https://docs.scrapebadger.com/api)
294
+ - [GitHub](https://github.com/scrape-badger/scrapebadger-node)
295
+ - [npm](https://www.npmjs.com/package/scrapebadger)