@huggingface/inference 3.5.2 → 3.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/dist/index.cjs +364 -970
  2. package/dist/index.js +366 -981
  3. package/dist/src/index.d.ts.map +1 -1
  4. package/dist/src/lib/makeRequestOptions.d.ts +16 -1
  5. package/dist/src/lib/makeRequestOptions.d.ts.map +1 -1
  6. package/dist/src/providers/novita.d.ts.map +1 -1
  7. package/dist/src/snippets/getInferenceSnippets.d.ts +4 -0
  8. package/dist/src/snippets/getInferenceSnippets.d.ts.map +1 -0
  9. package/dist/src/snippets/index.d.ts +1 -4
  10. package/dist/src/snippets/index.d.ts.map +1 -1
  11. package/dist/src/snippets/templates.exported.d.ts +2 -0
  12. package/dist/src/snippets/templates.exported.d.ts.map +1 -0
  13. package/dist/src/tasks/cv/textToVideo.d.ts.map +1 -1
  14. package/package.json +9 -5
  15. package/src/index.ts +1 -1
  16. package/src/lib/makeRequestOptions.ts +37 -10
  17. package/src/providers/fireworks-ai.ts +1 -1
  18. package/src/providers/hf-inference.ts +1 -1
  19. package/src/providers/nebius.ts +3 -3
  20. package/src/providers/novita.ts +7 -6
  21. package/src/providers/sambanova.ts +1 -1
  22. package/src/providers/together.ts +3 -3
  23. package/src/snippets/getInferenceSnippets.ts +380 -0
  24. package/src/snippets/index.ts +1 -5
  25. package/src/snippets/templates.exported.ts +72 -0
  26. package/src/tasks/cv/textToVideo.ts +25 -5
  27. package/src/vendor/fetch-event-source/LICENSE +21 -0
  28. package/dist/src/snippets/curl.d.ts +0 -17
  29. package/dist/src/snippets/curl.d.ts.map +0 -1
  30. package/dist/src/snippets/js.d.ts +0 -21
  31. package/dist/src/snippets/js.d.ts.map +0 -1
  32. package/dist/src/snippets/python.d.ts +0 -4
  33. package/dist/src/snippets/python.d.ts.map +0 -1
  34. package/src/snippets/curl.ts +0 -177
  35. package/src/snippets/js.ts +0 -475
  36. package/src/snippets/python.ts +0 -563
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,uBAAuB,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAC1F,OAAO,EAAE,oBAAoB,EAAE,MAAM,4BAA4B,CAAC;AAClE,cAAc,SAAS,CAAC;AACxB,cAAc,SAAS,CAAC;AAExB,OAAO,KAAK,QAAQ,MAAM,qBAAqB,CAAC;AAChD,OAAO,EAAE,QAAQ,EAAE,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,uBAAuB,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAC1F,OAAO,EAAE,oBAAoB,EAAE,MAAM,4BAA4B,CAAC;AAClE,cAAc,SAAS,CAAC;AACxB,cAAc,SAAS,CAAC;AACxB,OAAO,KAAK,QAAQ,MAAM,qBAAqB,CAAC;AAEhD,OAAO,EAAE,QAAQ,EAAE,CAAC"}
@@ -1,6 +1,7 @@
1
1
  import type { InferenceTask, Options, RequestArgs } from "../types";
2
2
  /**
3
- * Helper that prepares request arguments
3
+ * Helper that prepares request arguments.
4
+ * This async version handle the model ID resolution step.
4
5
  */
5
6
  export declare function makeRequestOptions(args: RequestArgs & {
6
7
  data?: Blob | ArrayBuffer;
@@ -13,4 +14,18 @@ export declare function makeRequestOptions(args: RequestArgs & {
13
14
  url: string;
14
15
  info: RequestInit;
15
16
  }>;
17
+ /**
18
+ * Helper that prepares request arguments. - for internal use only
19
+ * This sync version skips the model ID resolution step
20
+ */
21
+ export declare function makeRequestOptionsFromResolvedModel(resolvedModel: string, args: RequestArgs & {
22
+ data?: Blob | ArrayBuffer;
23
+ stream?: boolean;
24
+ }, options?: Options & {
25
+ task?: InferenceTask;
26
+ chatCompletion?: boolean;
27
+ }): {
28
+ url: string;
29
+ info: RequestInit;
30
+ };
16
31
  //# sourceMappingURL=makeRequestOptions.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"makeRequestOptions.d.ts","sourceRoot":"","sources":["../../../src/lib/makeRequestOptions.ts"],"names":[],"mappings":"AAcA,OAAO,KAAK,EAAqB,aAAa,EAAE,OAAO,EAAkB,WAAW,EAAE,MAAM,UAAU,CAAC;AAgCvG;;GAEG;AACH,wBAAsB,kBAAkB,CACvC,IAAI,EAAE,WAAW,GAAG;IACnB,IAAI,CAAC,EAAE,IAAI,GAAG,WAAW,CAAC;IAC1B,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB,EACD,OAAO,CAAC,EAAE,OAAO,GAAG;IACnB,oEAAoE;IACpE,IAAI,CAAC,EAAE,aAAa,CAAC;IACrB,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB,GACC,OAAO,CAAC;IAAE,GAAG,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,WAAW,CAAA;CAAE,CAAC,CAsH7C"}
1
+ {"version":3,"file":"makeRequestOptions.d.ts","sourceRoot":"","sources":["../../../src/lib/makeRequestOptions.ts"],"names":[],"mappings":"AAcA,OAAO,KAAK,EAAqB,aAAa,EAAE,OAAO,EAAkB,WAAW,EAAE,MAAM,UAAU,CAAC;AAgCvG;;;GAGG;AACH,wBAAsB,kBAAkB,CACvC,IAAI,EAAE,WAAW,GAAG;IACnB,IAAI,CAAC,EAAE,IAAI,GAAG,WAAW,CAAC;IAC1B,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB,EACD,OAAO,CAAC,EAAE,OAAO,GAAG;IACnB,oEAAoE;IACpE,IAAI,CAAC,EAAE,aAAa,CAAC;IACrB,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB,GACC,OAAO,CAAC;IAAE,GAAG,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,WAAW,CAAA;CAAE,CAAC,CAoC7C;AAED;;;GAGG;AACH,wBAAgB,mCAAmC,CAClD,aAAa,EAAE,MAAM,EACrB,IAAI,EAAE,WAAW,GAAG;IACnB,IAAI,CAAC,EAAE,IAAI,GAAG,WAAW,CAAC;IAC1B,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB,EACD,OAAO,CAAC,EAAE,OAAO,GAAG;IACnB,IAAI,CAAC,EAAE,aAAa,CAAC;IACrB,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB,GACC;IAAE,GAAG,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,WAAW,CAAA;CAAE,CA4FpC"}
@@ -1 +1 @@
1
- {"version":3,"file":"novita.d.ts","sourceRoot":"","sources":["../../../src/providers/novita.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,EAAE,cAAc,EAAuC,MAAM,UAAU,CAAC;AAyBpF,eAAO,MAAM,aAAa,EAAE,cAK3B,CAAC"}
1
+ {"version":3,"file":"novita.d.ts","sourceRoot":"","sources":["../../../src/providers/novita.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,OAAO,KAAK,EAAE,cAAc,EAAuC,MAAM,UAAU,CAAC;AA0BpF,eAAO,MAAM,aAAa,EAAE,cAK3B,CAAC"}
@@ -0,0 +1,4 @@
1
+ import { type InferenceSnippet, type ModelDataMinimal } from "@huggingface/tasks";
2
+ import type { InferenceProvider } from "../types";
3
+ export declare function getInferenceSnippets(model: ModelDataMinimal, accessToken: string, provider: InferenceProvider, providerModelId?: string, opts?: Record<string, unknown>): InferenceSnippet[];
4
+ //# sourceMappingURL=getInferenceSnippets.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"getInferenceSnippets.d.ts","sourceRoot":"","sources":["../../../src/snippets/getInferenceSnippets.ts"],"names":[],"mappings":"AAEA,OAAO,EACN,KAAK,gBAAgB,EAErB,KAAK,gBAAgB,EAGrB,MAAM,oBAAoB,CAAC;AAC5B,OAAO,KAAK,EAAE,iBAAiB,EAA8B,MAAM,UAAU,CAAC;AAgS9E,wBAAgB,oBAAoB,CACnC,KAAK,EAAE,gBAAgB,EACvB,WAAW,EAAE,MAAM,EACnB,QAAQ,EAAE,iBAAiB,EAC3B,eAAe,CAAC,EAAE,MAAM,EACxB,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAC5B,gBAAgB,EAAE,CAIpB"}
@@ -1,5 +1,2 @@
1
- import * as curl from "./curl.js";
2
- import * as python from "./python.js";
3
- import * as js from "./js.js";
4
- export { curl, python, js };
1
+ export { getInferenceSnippets } from "./getInferenceSnippets.js";
5
2
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/snippets/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,IAAI,MAAM,WAAW,CAAC;AAClC,OAAO,KAAK,MAAM,MAAM,aAAa,CAAC;AACtC,OAAO,KAAK,EAAE,MAAM,SAAS,CAAC;AAE9B,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,EAAE,EAAE,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/snippets/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,oBAAoB,EAAE,MAAM,2BAA2B,CAAC"}
@@ -0,0 +1,2 @@
1
+ export declare const templates: Record<string, Record<string, Record<string, string>>>;
2
+ //# sourceMappingURL=templates.exported.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"templates.exported.d.ts","sourceRoot":"","sources":["../../../src/snippets/templates.exported.ts"],"names":[],"mappings":"AACA,eAAO,MAAM,SAAS,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,CAsEnE,CAAC"}
@@ -1 +1 @@
1
- {"version":3,"file":"textToVideo.d.ts","sourceRoot":"","sources":["../../../../src/tasks/cv/textToVideo.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,QAAQ,EAAqB,OAAO,EAAE,MAAM,aAAa,CAAC;AACxE,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAO3D,MAAM,MAAM,eAAe,GAAG,QAAQ,GAAG,gBAAgB,CAAC;AAE1D,MAAM,MAAM,iBAAiB,GAAG,IAAI,CAAC;AAcrC,wBAAsB,WAAW,CAAC,IAAI,EAAE,eAAe,EAAE,OAAO,CAAC,EAAE,OAAO,GAAG,OAAO,CAAC,iBAAiB,CAAC,CA0CtG"}
1
+ {"version":3,"file":"textToVideo.d.ts","sourceRoot":"","sources":["../../../../src/tasks/cv/textToVideo.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,QAAQ,EAAqB,OAAO,EAAE,MAAM,aAAa,CAAC;AACxE,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAO3D,MAAM,MAAM,eAAe,GAAG,QAAQ,GAAG,gBAAgB,CAAC;AAE1D,MAAM,MAAM,iBAAiB,GAAG,IAAI,CAAC;AAoBrC,wBAAsB,WAAW,CAAC,IAAI,EAAE,eAAe,EAAE,OAAO,CAAC,EAAE,OAAO,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAwDtG"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@huggingface/inference",
3
- "version": "3.5.2",
3
+ "version": "3.6.1",
4
4
  "packageManager": "pnpm@8.10.5",
5
5
  "license": "MIT",
6
6
  "author": "Hugging Face and Tim Mikeladze <tim.mikeladze@gmail.com>",
@@ -26,7 +26,8 @@
26
26
  },
27
27
  "files": [
28
28
  "dist",
29
- "src"
29
+ "src",
30
+ "!src/snippets/templates/**/*.jinja"
30
31
  ],
31
32
  "source": "src/index.ts",
32
33
  "types": "./dist/src/index.d.ts",
@@ -39,14 +40,15 @@
39
40
  },
40
41
  "type": "module",
41
42
  "dependencies": {
42
- "@huggingface/tasks": "^0.17.4"
43
+ "@huggingface/tasks": "^0.17.8",
44
+ "@huggingface/jinja": "^0.3.3"
43
45
  },
44
46
  "devDependencies": {
45
47
  "@types/node": "18.13.0"
46
48
  },
47
49
  "resolutions": {},
48
50
  "scripts": {
49
- "build": "tsup src/index.ts --format cjs,esm --clean && tsc --emitDeclarationOnly --declaration",
51
+ "build": "pnpm run export-templates && tsup src/index.ts --format cjs,esm --clean && tsc --emitDeclarationOnly --declaration",
50
52
  "dts": "tsx scripts/generate-dts.ts && tsc --noEmit dist/index.d.ts",
51
53
  "lint": "eslint --quiet --fix --ext .cjs,.ts .",
52
54
  "lint:check": "eslint --ext .cjs,.ts .",
@@ -54,6 +56,8 @@
54
56
  "format:check": "prettier --check .",
55
57
  "test": "vitest run --config vitest.config.mts",
56
58
  "test:browser": "vitest run --browser.name=chrome --browser.headless --config vitest.config.mts",
57
- "check": "tsc"
59
+ "check": "tsc",
60
+ "dev": "pnpm run export-templates && tsup src/index.ts --format cjs,esm --watch",
61
+ "export-templates": "tsx scripts/export-templates.ts"
58
62
  }
59
63
  }
package/src/index.ts CHANGED
@@ -2,6 +2,6 @@ export { InferenceClient, InferenceClientEndpoint, HfInference } from "./Inferen
2
2
  export { InferenceOutputError } from "./lib/InferenceOutputError";
3
3
  export * from "./types";
4
4
  export * from "./tasks";
5
-
6
5
  import * as snippets from "./snippets/index.js";
6
+
7
7
  export { snippets };
@@ -45,7 +45,8 @@ const providerConfigs: Record<InferenceProvider, ProviderConfig> = {
45
45
  };
46
46
 
47
47
  /**
48
- * Helper that prepares request arguments
48
+ * Helper that prepares request arguments.
49
+ * This async version handle the model ID resolution step.
49
50
  */
50
51
  export async function makeRequestOptions(
51
52
  args: RequestArgs & {
@@ -58,13 +59,13 @@ export async function makeRequestOptions(
58
59
  chatCompletion?: boolean;
59
60
  }
60
61
  ): Promise<{ url: string; info: RequestInit }> {
61
- const { accessToken, endpointUrl, provider: maybeProvider, model: maybeModel, ...remainingArgs } = args;
62
+ const { provider: maybeProvider, model: maybeModel } = args;
62
63
  const provider = maybeProvider ?? "hf-inference";
63
64
  const providerConfig = providerConfigs[provider];
65
+ const { task, chatCompletion } = options ?? {};
64
66
 
65
- const { includeCredentials, task, chatCompletion, signal } = options ?? {};
66
-
67
- if (endpointUrl && provider !== "hf-inference") {
67
+ // Validate inputs
68
+ if (args.endpointUrl && provider !== "hf-inference") {
68
69
  throw new Error(`Cannot use endpointUrl with a third-party provider.`);
69
70
  }
70
71
  if (maybeModel && isUrl(maybeModel)) {
@@ -79,18 +80,44 @@ export async function makeRequestOptions(
79
80
  if (providerConfig.clientSideRoutingOnly && !maybeModel) {
80
81
  throw new Error(`Provider ${provider} requires a model ID to be passed directly.`);
81
82
  }
83
+
82
84
  // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
83
85
  const hfModel = maybeModel ?? (await loadDefaultModel(task!));
84
- const model = providerConfig.clientSideRoutingOnly
86
+ const resolvedModel = providerConfig.clientSideRoutingOnly
85
87
  ? // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
86
88
  removeProviderPrefix(maybeModel!, provider)
87
- : // For closed-models API providers, one needs to pass the model ID directly (e.g. "gpt-3.5-turbo")
88
- await getProviderModelId({ model: hfModel, provider }, args, {
89
+ : await getProviderModelId({ model: hfModel, provider }, args, {
89
90
  task,
90
91
  chatCompletion,
91
92
  fetch: options?.fetch,
92
93
  });
93
94
 
95
+ // Use the sync version with the resolved model
96
+ return makeRequestOptionsFromResolvedModel(resolvedModel, args, options);
97
+ }
98
+
99
+ /**
100
+ * Helper that prepares request arguments. - for internal use only
101
+ * This sync version skips the model ID resolution step
102
+ */
103
+ export function makeRequestOptionsFromResolvedModel(
104
+ resolvedModel: string,
105
+ args: RequestArgs & {
106
+ data?: Blob | ArrayBuffer;
107
+ stream?: boolean;
108
+ },
109
+ options?: Options & {
110
+ task?: InferenceTask;
111
+ chatCompletion?: boolean;
112
+ }
113
+ ): { url: string; info: RequestInit } {
114
+ const { accessToken, endpointUrl, provider: maybeProvider, model, ...remainingArgs } = args;
115
+
116
+ const provider = maybeProvider ?? "hf-inference";
117
+ const providerConfig = providerConfigs[provider];
118
+
119
+ const { includeCredentials, task, chatCompletion, signal } = options ?? {};
120
+
94
121
  const authMethod = (() => {
95
122
  if (providerConfig.clientSideRoutingOnly) {
96
123
  // Closed-source providers require an accessToken (cannot be routed).
@@ -119,7 +146,7 @@ export async function makeRequestOptions(
119
146
  authMethod !== "provider-key"
120
147
  ? HF_HUB_INFERENCE_PROXY_TEMPLATE.replace("{{PROVIDER}}", provider)
121
148
  : providerConfig.baseUrl,
122
- model,
149
+ model: resolvedModel,
123
150
  chatCompletion,
124
151
  task,
125
152
  });
@@ -150,7 +177,7 @@ export async function makeRequestOptions(
150
177
  : JSON.stringify(
151
178
  providerConfig.makeBody({
152
179
  args: remainingArgs as Record<string, unknown>,
153
- model,
180
+ model: resolvedModel,
154
181
  task,
155
182
  chatCompletion,
156
183
  })
@@ -30,7 +30,7 @@ const makeHeaders = (params: HeaderParams): Record<string, string> => {
30
30
  };
31
31
 
32
32
  const makeUrl = (params: UrlParams): string => {
33
- if (params.task === "text-generation" && params.chatCompletion) {
33
+ if (params.chatCompletion) {
34
34
  return `${params.baseUrl}/inference/v1/chat/completions`;
35
35
  }
36
36
  return `${params.baseUrl}/inference`;
@@ -29,7 +29,7 @@ const makeUrl = (params: UrlParams): string => {
29
29
  /// when deployed on hf-inference, those two tasks are automatically compatible with one another.
30
30
  return `${params.baseUrl}/pipeline/${params.task}/${params.model}`;
31
31
  }
32
- if (params.task === "text-generation" && params.chatCompletion) {
32
+ if (params.chatCompletion) {
33
33
  return `${params.baseUrl}/models/${params.model}/v1/chat/completions`;
34
34
  }
35
35
  return `${params.baseUrl}/models/${params.model}`;
@@ -33,10 +33,10 @@ const makeUrl = (params: UrlParams): string => {
33
33
  if (params.task === "text-to-image") {
34
34
  return `${params.baseUrl}/v1/images/generations`;
35
35
  }
36
+ if (params.chatCompletion) {
37
+ return `${params.baseUrl}/v1/chat/completions`;
38
+ }
36
39
  if (params.task === "text-generation") {
37
- if (params.chatCompletion) {
38
- return `${params.baseUrl}/v1/chat/completions`;
39
- }
40
40
  return `${params.baseUrl}/v1/completions`;
41
41
  }
42
42
  return params.baseUrl;
@@ -16,7 +16,7 @@
16
16
  */
17
17
  import type { ProviderConfig, UrlParams, HeaderParams, BodyParams } from "../types";
18
18
 
19
- const NOVITA_API_BASE_URL = "https://api.novita.ai/v3/openai";
19
+ const NOVITA_API_BASE_URL = "https://api.novita.ai";
20
20
 
21
21
  const makeBody = (params: BodyParams): Record<string, unknown> => {
22
22
  return {
@@ -30,11 +30,12 @@ const makeHeaders = (params: HeaderParams): Record<string, string> => {
30
30
  };
31
31
 
32
32
  const makeUrl = (params: UrlParams): string => {
33
- if (params.task === "text-generation") {
34
- if (params.chatCompletion) {
35
- return `${params.baseUrl}/chat/completions`;
36
- }
37
- return `${params.baseUrl}/completions`;
33
+ if (params.chatCompletion) {
34
+ return `${params.baseUrl}/v3/openai/chat/completions`;
35
+ } else if (params.task === "text-generation") {
36
+ return `${params.baseUrl}/v3/openai/completions`;
37
+ } else if (params.task === "text-to-video") {
38
+ return `${params.baseUrl}/v3/hf/${params.model}`;
38
39
  }
39
40
  return params.baseUrl;
40
41
  };
@@ -30,7 +30,7 @@ const makeHeaders = (params: HeaderParams): Record<string, string> => {
30
30
  };
31
31
 
32
32
  const makeUrl = (params: UrlParams): string => {
33
- if (params.task === "text-generation" && params.chatCompletion) {
33
+ if (params.chatCompletion) {
34
34
  return `${params.baseUrl}/v1/chat/completions`;
35
35
  }
36
36
  return params.baseUrl;
@@ -33,10 +33,10 @@ const makeUrl = (params: UrlParams): string => {
33
33
  if (params.task === "text-to-image") {
34
34
  return `${params.baseUrl}/v1/images/generations`;
35
35
  }
36
+ if (params.chatCompletion) {
37
+ return `${params.baseUrl}/v1/chat/completions`;
38
+ }
36
39
  if (params.task === "text-generation") {
37
- if (params.chatCompletion) {
38
- return `${params.baseUrl}/v1/chat/completions`;
39
- }
40
40
  return `${params.baseUrl}/v1/completions`;
41
41
  }
42
42
  return params.baseUrl;