@flisk/analyze-tracking 0.5.0 → 0.5.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -26,7 +26,7 @@ npx @flisk/analyze-tracking /path/to/project [options]
26
26
  ### Key Options:
27
27
  - `-g, --generateDescription`: Generate descriptions of fields (default: `false`)
28
28
  - `-p, --provider <provider>`: Specify a provider (options: `openai`, `gemini`)
29
- - `-m, --model <model>`: Specify a model (options: `gpt-4o-mini`, `gemini-2.0-flash-lite-001`)
29
+ - `-m, --model <model>`: Specify a model (ex: `gpt-4.1-nano`, `gpt-4o-mini`, `gemini-2.0-flash-lite-001`)
30
30
  - `-o, --output <output_file>`: Name of the output file (default: `tracking-schema.yaml`)
31
31
  - `-c, --customFunction <function_name>`: Specify a custom tracking function
32
32
 
@@ -73,7 +73,7 @@ events:
73
73
 
74
74
  Use this to understand where your events live in the code and how they’re being tracked.
75
75
 
76
- [GPT-4o mini](https://platform.openai.com/docs/models/gpt-4o-mini) is used for generating descriptions of events, properties, and implementations.
76
+ Your LLM of choice is used for generating descriptions of events, properties, and implementations.
77
77
 
78
78
  See [schema.json](schema.json) for a JSON Schema of the output.
79
79
 
package/bin/cli.js CHANGED
@@ -6,10 +6,7 @@ const commandLineUsage = require('command-line-usage');
6
6
  const { run } = require('../src/index');
7
7
  const { helpContent } = require('./help');
8
8
 
9
- const SUPPORTED_MODELS = {
10
- openai: ['gpt-4o-mini'],
11
- gemini: ['gemini-2.0-flash-lite-001'],
12
- };
9
+ const SUPPORTED_MODEL_PROVIDERS = ['openai', 'gemini'];
13
10
 
14
11
  // Parse command-line arguments
15
12
  const optionDefinitions = [
@@ -100,31 +97,19 @@ if (!targetDir) {
100
97
  }
101
98
 
102
99
  if (generateDescription) {
103
- if (!Object.keys(SUPPORTED_MODELS).includes(provider)) {
100
+ if (!SUPPORTED_MODEL_PROVIDERS.includes(provider)) {
104
101
  console.error('Please provide a valid provider. Options: openai, gemini');
105
102
  process.exit(1);
106
103
  }
107
104
 
108
- if (provider === 'openai') {
109
- if (!SUPPORTED_MODELS.openai.includes(model)) {
110
- console.error(`Please provide a valid model for OpenAI. Options: ${SUPPORTED_MODELS.openai.join(', ')}`);
111
- process.exit(1);
112
- }
113
- if (!process.env.OPENAI_API_KEY) {
114
- console.error('Please set the `OPENAI_API_KEY` environment variable to use OpenAI for `generateDescription`.');
115
- process.exit(1);
116
- }
105
+ if (provider === 'openai' && !process.env.OPENAI_API_KEY) {
106
+ console.error('Please set the `OPENAI_API_KEY` environment variable to use OpenAI for `generateDescription`.');
107
+ process.exit(1);
117
108
  }
118
-
119
- if (provider === 'gemini') {
120
- if (!SUPPORTED_MODELS.gemini.includes(model)) {
121
- console.error(`Please provide a valid model for Gemini. Options: ${SUPPORTED_MODELS.gemini.join(', ')}`);
122
- process.exit(1);
123
- }
124
- if (!process.env.GOOGLE_APPLICATION_CREDENTIALS) {
125
- console.error('Please set the `GOOGLE_APPLICATION_CREDENTIALS` environment variable to use Gemini for `generateDescription`.');
126
- process.exit(1);
127
- }
109
+
110
+ if (provider === 'gemini' && !process.env.GOOGLE_APPLICATION_CREDENTIALS) {
111
+ console.error('Please set the `GOOGLE_APPLICATION_CREDENTIALS` environment variable to use Gemini for `generateDescription`.');
112
+ process.exit(1);
128
113
  }
129
114
  }
130
115
 
package/bin/help.js CHANGED
@@ -57,10 +57,10 @@ const helpContent = [
57
57
  {
58
58
  name: 'model',
59
59
  alias: 'm',
60
- description: 'Specify a model (options: {italic gpt-4o-mini}, {italic gemini-2.0-flash-lite-001})',
60
+ description: 'Specify a model (ex: {italic gpt-4.1-nano, gpt-4o-mini}, {italic gemini-2.0-flash-lite-001})',
61
61
  type: String,
62
- defaultValue: 'gpt-4o-mini',
63
- typeLabel: '{underline gpt-4o-mini}'
62
+ defaultValue: 'gpt-4.1-nano',
63
+ typeLabel: '{underline gpt-4.1-nano}'
64
64
  },
65
65
  {
66
66
  name: 'output',
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@flisk/analyze-tracking",
3
- "version": "0.5.0",
3
+ "version": "0.5.2",
4
4
  "description": "Analyzes tracking code in a project and generates data schemas",
5
5
  "main": "src/index.js",
6
6
  "bin": {
@@ -20,9 +20,9 @@
20
20
  },
21
21
  "homepage": "https://github.com/fliskdata/analyze-tracking#readme",
22
22
  "dependencies": {
23
- "@langchain/core": "^0.3.49",
24
- "@langchain/google-vertexai": "^0.2.5",
25
- "@langchain/openai": "^0.5.7",
23
+ "@langchain/core": "^0.3.56",
24
+ "@langchain/google-vertexai": "^0.2.9",
25
+ "@langchain/openai": "^0.5.10",
26
26
  "@ruby/prism": "^1.4.0",
27
27
  "@typescript-eslint/parser": "^8.1.0",
28
28
  "acorn": "^8.12.1",
@@ -34,7 +34,7 @@
34
34
  "isomorphic-git": "^1.27.1",
35
35
  "js-yaml": "^4.1.0",
36
36
  "typescript": "^5.5.4",
37
- "zod": "^3.24.3"
37
+ "zod": "^3.24.4"
38
38
  },
39
39
  "devDependencies": {
40
40
  "jest": "^29.7.0"
package/src/index.js CHANGED
@@ -12,13 +12,13 @@ async function run(targetDir, outputPath, customFunction, customSourceDetails, g
12
12
  let llm;
13
13
  if (provider === 'openai') {
14
14
  llm = new ChatOpenAI({
15
- modelName: model,
15
+ model: model,
16
16
  temperature: 0,
17
17
  });
18
18
  }
19
19
  if (provider === 'gemini') {
20
20
  llm = new ChatVertexAI({
21
- modelName: model,
21
+ model: model,
22
22
  temperature: 0,
23
23
  });
24
24
  }