kiln-ai 0.19.0__py3-none-any.whl → 0.20.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kiln-ai might be problematic. Click here for more details.

Files changed (70) hide show
  1. kiln_ai/adapters/__init__.py +2 -2
  2. kiln_ai/adapters/adapter_registry.py +19 -1
  3. kiln_ai/adapters/chat/chat_formatter.py +8 -12
  4. kiln_ai/adapters/chat/test_chat_formatter.py +6 -2
  5. kiln_ai/adapters/docker_model_runner_tools.py +119 -0
  6. kiln_ai/adapters/eval/base_eval.py +2 -2
  7. kiln_ai/adapters/eval/eval_runner.py +3 -1
  8. kiln_ai/adapters/eval/g_eval.py +2 -2
  9. kiln_ai/adapters/eval/test_base_eval.py +1 -1
  10. kiln_ai/adapters/eval/test_g_eval.py +3 -4
  11. kiln_ai/adapters/fine_tune/__init__.py +1 -1
  12. kiln_ai/adapters/fine_tune/openai_finetune.py +14 -4
  13. kiln_ai/adapters/fine_tune/test_openai_finetune.py +108 -111
  14. kiln_ai/adapters/ml_model_list.py +380 -34
  15. kiln_ai/adapters/model_adapters/base_adapter.py +51 -21
  16. kiln_ai/adapters/model_adapters/litellm_adapter.py +383 -79
  17. kiln_ai/adapters/model_adapters/test_base_adapter.py +193 -17
  18. kiln_ai/adapters/model_adapters/test_litellm_adapter.py +406 -1
  19. kiln_ai/adapters/model_adapters/test_litellm_adapter_tools.py +1103 -0
  20. kiln_ai/adapters/model_adapters/test_saving_adapter_results.py +5 -5
  21. kiln_ai/adapters/model_adapters/test_structured_output.py +110 -4
  22. kiln_ai/adapters/parsers/__init__.py +1 -1
  23. kiln_ai/adapters/provider_tools.py +15 -1
  24. kiln_ai/adapters/repair/test_repair_task.py +12 -9
  25. kiln_ai/adapters/run_output.py +3 -0
  26. kiln_ai/adapters/test_adapter_registry.py +80 -1
  27. kiln_ai/adapters/test_docker_model_runner_tools.py +305 -0
  28. kiln_ai/adapters/test_ml_model_list.py +39 -1
  29. kiln_ai/adapters/test_prompt_adaptors.py +13 -6
  30. kiln_ai/adapters/test_provider_tools.py +55 -0
  31. kiln_ai/adapters/test_remote_config.py +98 -0
  32. kiln_ai/datamodel/__init__.py +23 -21
  33. kiln_ai/datamodel/datamodel_enums.py +1 -0
  34. kiln_ai/datamodel/eval.py +1 -1
  35. kiln_ai/datamodel/external_tool_server.py +298 -0
  36. kiln_ai/datamodel/json_schema.py +25 -10
  37. kiln_ai/datamodel/project.py +8 -1
  38. kiln_ai/datamodel/registry.py +0 -15
  39. kiln_ai/datamodel/run_config.py +62 -0
  40. kiln_ai/datamodel/task.py +2 -77
  41. kiln_ai/datamodel/task_output.py +6 -1
  42. kiln_ai/datamodel/task_run.py +41 -0
  43. kiln_ai/datamodel/test_basemodel.py +3 -3
  44. kiln_ai/datamodel/test_example_models.py +175 -0
  45. kiln_ai/datamodel/test_external_tool_server.py +691 -0
  46. kiln_ai/datamodel/test_registry.py +8 -3
  47. kiln_ai/datamodel/test_task.py +15 -47
  48. kiln_ai/datamodel/test_tool_id.py +239 -0
  49. kiln_ai/datamodel/tool_id.py +83 -0
  50. kiln_ai/tools/__init__.py +8 -0
  51. kiln_ai/tools/base_tool.py +82 -0
  52. kiln_ai/tools/built_in_tools/__init__.py +13 -0
  53. kiln_ai/tools/built_in_tools/math_tools.py +124 -0
  54. kiln_ai/tools/built_in_tools/test_math_tools.py +204 -0
  55. kiln_ai/tools/mcp_server_tool.py +95 -0
  56. kiln_ai/tools/mcp_session_manager.py +243 -0
  57. kiln_ai/tools/test_base_tools.py +199 -0
  58. kiln_ai/tools/test_mcp_server_tool.py +457 -0
  59. kiln_ai/tools/test_mcp_session_manager.py +1585 -0
  60. kiln_ai/tools/test_tool_registry.py +473 -0
  61. kiln_ai/tools/tool_registry.py +64 -0
  62. kiln_ai/utils/config.py +22 -0
  63. kiln_ai/utils/open_ai_types.py +94 -0
  64. kiln_ai/utils/project_utils.py +17 -0
  65. kiln_ai/utils/test_config.py +138 -1
  66. kiln_ai/utils/test_open_ai_types.py +131 -0
  67. {kiln_ai-0.19.0.dist-info → kiln_ai-0.20.1.dist-info}/METADATA +6 -5
  68. {kiln_ai-0.19.0.dist-info → kiln_ai-0.20.1.dist-info}/RECORD +70 -47
  69. {kiln_ai-0.19.0.dist-info → kiln_ai-0.20.1.dist-info}/WHEEL +0 -0
  70. {kiln_ai-0.19.0.dist-info → kiln_ai-0.20.1.dist-info}/licenses/LICENSE.txt +0 -0
@@ -93,6 +93,7 @@ class ModelName(str, Enum):
93
93
  gemma_2_2b = "gemma_2_2b"
94
94
  gemma_2_9b = "gemma_2_9b"
95
95
  gemma_2_27b = "gemma_2_27b"
96
+ gemma_3_0p27b = "gemma_3_0p27b"
96
97
  gemma_3_1b = "gemma_3_1b"
97
98
  gemma_3_4b = "gemma_3_4b"
98
99
  gemma_3_12b = "gemma_3_12b"
@@ -119,6 +120,7 @@ class ModelName(str, Enum):
119
120
  qwen_2p5_14b = "qwen_2p5_14b"
120
121
  qwen_2p5_72b = "qwen_2p5_72b"
121
122
  qwq_32b = "qwq_32b"
123
+ deepseek_3_1 = "deepseek_3_1"
122
124
  deepseek_3 = "deepseek_3"
123
125
  deepseek_r1 = "deepseek_r1"
124
126
  deepseek_r1_0528 = "deepseek_r1_0528"
@@ -157,6 +159,8 @@ class ModelName(str, Enum):
157
159
  qwen_long_l1_32b = "qwen_long_l1_32b"
158
160
  kimi_k2 = "kimi_k2"
159
161
  kimi_dev_72b = "kimi_dev_72b"
162
+ glm_4_5 = "glm_4_5"
163
+ glm_4_5_air = "glm_4_5_air"
160
164
  glm_4_1v_9b_thinking = "glm_4_1v_9b_thinking"
161
165
  glm_z1_32b_0414 = "glm_z1_32b_0414"
162
166
  glm_z1_9b_0414 = "glm_z1_9b_0414"
@@ -213,6 +217,7 @@ class KilnModelProvider(BaseModel):
213
217
  reasoning_capable: bool = False
214
218
  supports_logprobs: bool = False
215
219
  suggested_for_evals: bool = False
220
+ supports_function_calling: bool = True
216
221
  uncensored: bool = False
217
222
  suggested_for_uncensored_data_gen: bool = False
218
223
  tuned_chat_strategy: ChatStrategy | None = None
@@ -271,6 +276,13 @@ built_in_models: List[KilnModel] = [
271
276
  suggested_for_data_gen=True,
272
277
  suggested_for_evals=True,
273
278
  ),
279
+ KilnModelProvider(
280
+ name=ModelProviderName.openrouter,
281
+ model_id="openai/gpt-5",
282
+ structured_output_mode=StructuredOutputMode.json_schema,
283
+ suggested_for_data_gen=True,
284
+ suggested_for_evals=True,
285
+ ),
274
286
  ],
275
287
  ),
276
288
  # GPT 5 Mini
@@ -286,6 +298,13 @@ built_in_models: List[KilnModel] = [
286
298
  suggested_for_evals=True,
287
299
  suggested_for_data_gen=True,
288
300
  ),
301
+ KilnModelProvider(
302
+ name=ModelProviderName.openrouter,
303
+ model_id="openai/gpt-5-mini",
304
+ structured_output_mode=StructuredOutputMode.json_schema,
305
+ suggested_for_evals=True,
306
+ suggested_for_data_gen=True,
307
+ ),
289
308
  ],
290
309
  ),
291
310
  # GPT 5 Nano
@@ -299,6 +318,11 @@ built_in_models: List[KilnModel] = [
299
318
  model_id="gpt-5-nano",
300
319
  structured_output_mode=StructuredOutputMode.json_schema,
301
320
  ),
321
+ KilnModelProvider(
322
+ name=ModelProviderName.openrouter,
323
+ model_id="openai/gpt-5-nano",
324
+ structured_output_mode=StructuredOutputMode.json_schema,
325
+ ),
302
326
  ],
303
327
  ),
304
328
  # GPT 5 Chat
@@ -312,6 +336,7 @@ built_in_models: List[KilnModel] = [
312
336
  model_id="gpt-5-chat-latest",
313
337
  # Oddly no json_schema support for this model.
314
338
  structured_output_mode=StructuredOutputMode.json_instruction_and_object,
339
+ supports_function_calling=False,
315
340
  ),
316
341
  ],
317
342
  ),
@@ -328,6 +353,7 @@ built_in_models: List[KilnModel] = [
328
353
  structured_output_mode=StructuredOutputMode.json_schema,
329
354
  supports_logprobs=True,
330
355
  suggested_for_evals=True,
356
+ suggested_for_data_gen=True,
331
357
  ),
332
358
  KilnModelProvider(
333
359
  name=ModelProviderName.openrouter,
@@ -335,11 +361,13 @@ built_in_models: List[KilnModel] = [
335
361
  structured_output_mode=StructuredOutputMode.json_schema,
336
362
  supports_logprobs=True,
337
363
  suggested_for_evals=True,
364
+ suggested_for_data_gen=True,
338
365
  ),
339
366
  KilnModelProvider(
340
367
  name=ModelProviderName.azure_openai,
341
368
  model_id="gpt-4.1",
342
369
  suggested_for_evals=True,
370
+ suggested_for_data_gen=True,
343
371
  ),
344
372
  ],
345
373
  ),
@@ -788,26 +816,25 @@ built_in_models: List[KilnModel] = [
788
816
  ),
789
817
  ],
790
818
  ),
791
- # Claude 3.5 Sonnet
819
+ # Claude Sonnet 4
792
820
  KilnModel(
793
821
  family=ModelFamily.claude,
794
- name=ModelName.claude_3_5_sonnet,
795
- friendly_name="Claude 3.5 Sonnet",
822
+ name=ModelName.claude_sonnet_4,
823
+ friendly_name="Claude 4 Sonnet",
796
824
  providers=[
797
825
  KilnModelProvider(
798
826
  name=ModelProviderName.openrouter,
827
+ model_id="anthropic/claude-sonnet-4",
799
828
  structured_output_mode=StructuredOutputMode.function_calling,
800
- model_id="anthropic/claude-3.5-sonnet",
829
+ suggested_for_data_gen=True,
830
+ suggested_for_evals=True,
801
831
  ),
802
832
  KilnModelProvider(
803
833
  name=ModelProviderName.anthropic,
804
- model_id="claude-3-5-sonnet-20241022",
834
+ model_id="claude-sonnet-4-20250514",
805
835
  structured_output_mode=StructuredOutputMode.function_calling,
806
- ),
807
- KilnModelProvider(
808
- name=ModelProviderName.vertex,
809
- model_id="claude-3-5-sonnet",
810
- structured_output_mode=StructuredOutputMode.function_calling_weak,
836
+ suggested_for_data_gen=True,
837
+ suggested_for_evals=True,
811
838
  ),
812
839
  ],
813
840
  ),
@@ -852,25 +879,26 @@ built_in_models: List[KilnModel] = [
852
879
  ),
853
880
  ],
854
881
  ),
855
- # Claude Sonnet 4
882
+ # Claude 3.5 Sonnet
856
883
  KilnModel(
857
884
  family=ModelFamily.claude,
858
- name=ModelName.claude_sonnet_4,
859
- friendly_name="Claude Sonnet 4",
885
+ name=ModelName.claude_3_5_sonnet,
886
+ friendly_name="Claude 3.5 Sonnet",
860
887
  providers=[
861
888
  KilnModelProvider(
862
889
  name=ModelProviderName.openrouter,
863
- model_id="anthropic/claude-sonnet-4",
864
890
  structured_output_mode=StructuredOutputMode.function_calling,
865
- suggested_for_data_gen=True,
866
- suggested_for_evals=True,
891
+ model_id="anthropic/claude-3.5-sonnet",
867
892
  ),
868
893
  KilnModelProvider(
869
894
  name=ModelProviderName.anthropic,
870
- model_id="claude-sonnet-4-20250514",
895
+ model_id="claude-3-5-sonnet-20241022",
871
896
  structured_output_mode=StructuredOutputMode.function_calling,
872
- suggested_for_data_gen=True,
873
- suggested_for_evals=True,
897
+ ),
898
+ KilnModelProvider(
899
+ name=ModelProviderName.vertex,
900
+ model_id="claude-3-5-sonnet",
901
+ structured_output_mode=StructuredOutputMode.function_calling_weak,
874
902
  ),
875
903
  ],
876
904
  ),
@@ -1084,6 +1112,7 @@ built_in_models: List[KilnModel] = [
1084
1112
  supports_structured_output=False,
1085
1113
  supports_data_gen=False,
1086
1114
  model_id="nvidia/llama-3.1-nemotron-70b-instruct",
1115
+ supports_function_calling=False,
1087
1116
  ),
1088
1117
  ],
1089
1118
  ),
@@ -1097,16 +1126,19 @@ built_in_models: List[KilnModel] = [
1097
1126
  name=ModelProviderName.openrouter,
1098
1127
  model_id="meta-llama/llama-4-maverick",
1099
1128
  structured_output_mode=StructuredOutputMode.json_schema,
1129
+ supports_function_calling=False,
1100
1130
  ),
1101
1131
  KilnModelProvider(
1102
1132
  name=ModelProviderName.fireworks_ai,
1103
1133
  model_id="accounts/fireworks/models/llama4-maverick-instruct-basic",
1104
1134
  structured_output_mode=StructuredOutputMode.json_schema,
1135
+ supports_function_calling=False,
1105
1136
  ),
1106
1137
  KilnModelProvider(
1107
1138
  name=ModelProviderName.together_ai,
1108
1139
  model_id="meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
1109
1140
  structured_output_mode=StructuredOutputMode.json_schema,
1141
+ supports_function_calling=False,
1110
1142
  ),
1111
1143
  KilnModelProvider(
1112
1144
  name=ModelProviderName.cerebras,
@@ -1125,16 +1157,19 @@ built_in_models: List[KilnModel] = [
1125
1157
  name=ModelProviderName.openrouter,
1126
1158
  model_id="meta-llama/llama-4-scout",
1127
1159
  structured_output_mode=StructuredOutputMode.json_schema,
1160
+ supports_function_calling=False,
1128
1161
  ),
1129
1162
  KilnModelProvider(
1130
1163
  name=ModelProviderName.fireworks_ai,
1131
1164
  model_id="accounts/fireworks/models/llama4-scout-instruct-basic",
1132
1165
  structured_output_mode=StructuredOutputMode.json_schema,
1166
+ supports_function_calling=False,
1133
1167
  ),
1134
1168
  KilnModelProvider(
1135
1169
  name=ModelProviderName.together_ai,
1136
1170
  model_id="meta-llama/Llama-4-Scout-17B-16E-Instruct",
1137
1171
  structured_output_mode=StructuredOutputMode.json_schema,
1172
+ supports_function_calling=False,
1138
1173
  ),
1139
1174
  KilnModelProvider(
1140
1175
  name=ModelProviderName.cerebras,
@@ -1152,24 +1187,28 @@ built_in_models: List[KilnModel] = [
1152
1187
  KilnModelProvider(
1153
1188
  name=ModelProviderName.groq,
1154
1189
  model_id="llama-3.1-8b-instant",
1190
+ supports_function_calling=False,
1155
1191
  ),
1156
1192
  KilnModelProvider(
1157
1193
  name=ModelProviderName.amazon_bedrock,
1158
1194
  structured_output_mode=StructuredOutputMode.json_schema,
1159
1195
  supports_structured_output=False,
1160
1196
  model_id="meta.llama3-1-8b-instruct-v1:0",
1197
+ supports_function_calling=False,
1161
1198
  ),
1162
1199
  KilnModelProvider(
1163
1200
  name=ModelProviderName.ollama,
1164
1201
  structured_output_mode=StructuredOutputMode.json_schema,
1165
1202
  model_id="llama3.1:8b",
1166
1203
  ollama_model_aliases=["llama3.1"], # 8b is default
1204
+ supports_function_calling=False,
1167
1205
  ),
1168
1206
  KilnModelProvider(
1169
1207
  name=ModelProviderName.openrouter,
1170
1208
  supports_data_gen=False,
1171
1209
  structured_output_mode=StructuredOutputMode.function_calling,
1172
1210
  model_id="meta-llama/llama-3.1-8b-instruct",
1211
+ supports_function_calling=False,
1173
1212
  ),
1174
1213
  KilnModelProvider(
1175
1214
  name=ModelProviderName.fireworks_ai,
@@ -1177,6 +1216,7 @@ built_in_models: List[KilnModel] = [
1177
1216
  structured_output_mode=StructuredOutputMode.json_instruction_and_object,
1178
1217
  supports_data_gen=False,
1179
1218
  model_id="accounts/fireworks/models/llama-v3p1-8b-instruct",
1219
+ supports_function_calling=False,
1180
1220
  ),
1181
1221
  KilnModelProvider(
1182
1222
  name=ModelProviderName.together_ai,
@@ -1184,6 +1224,7 @@ built_in_models: List[KilnModel] = [
1184
1224
  supports_data_gen=False,
1185
1225
  structured_output_mode=StructuredOutputMode.function_calling_weak,
1186
1226
  provider_finetune_id="meta-llama/Meta-Llama-3.1-8B-Instruct-Reference",
1227
+ # Constrained decode? They make function calling work when no one else does!
1187
1228
  ),
1188
1229
  KilnModelProvider(
1189
1230
  name=ModelProviderName.cerebras,
@@ -1191,6 +1232,13 @@ built_in_models: List[KilnModel] = [
1191
1232
  structured_output_mode=StructuredOutputMode.function_calling,
1192
1233
  supports_data_gen=False,
1193
1234
  suggested_for_evals=False,
1235
+ supports_function_calling=False,
1236
+ ),
1237
+ KilnModelProvider(
1238
+ name=ModelProviderName.docker_model_runner,
1239
+ structured_output_mode=StructuredOutputMode.json_schema,
1240
+ model_id="ai/llama3.1:8B-Q4_K_M",
1241
+ supports_function_calling=False,
1194
1242
  ),
1195
1243
  ],
1196
1244
  ),
@@ -1205,6 +1253,7 @@ built_in_models: List[KilnModel] = [
1205
1253
  structured_output_mode=StructuredOutputMode.json_schema,
1206
1254
  supports_data_gen=False,
1207
1255
  model_id="meta.llama3-1-70b-instruct-v1:0",
1256
+ supports_function_calling=False,
1208
1257
  ),
1209
1258
  KilnModelProvider(
1210
1259
  name=ModelProviderName.openrouter,
@@ -1213,11 +1262,13 @@ built_in_models: List[KilnModel] = [
1213
1262
  model_id="meta-llama/llama-3.1-70b-instruct",
1214
1263
  supports_logprobs=True,
1215
1264
  logprobs_openrouter_options=True,
1265
+ supports_function_calling=False,
1216
1266
  ),
1217
1267
  KilnModelProvider(
1218
1268
  name=ModelProviderName.ollama,
1219
1269
  structured_output_mode=StructuredOutputMode.json_schema,
1220
1270
  model_id="llama3.1:70b",
1271
+ supports_function_calling=False,
1221
1272
  ),
1222
1273
  KilnModelProvider(
1223
1274
  name=ModelProviderName.fireworks_ai,
@@ -1255,6 +1306,7 @@ built_in_models: List[KilnModel] = [
1255
1306
  name=ModelProviderName.openrouter,
1256
1307
  structured_output_mode=StructuredOutputMode.function_calling,
1257
1308
  model_id="meta-llama/llama-3.1-405b-instruct",
1309
+ supports_function_calling=False, # Not reliable
1258
1310
  ),
1259
1311
  KilnModelProvider(
1260
1312
  name=ModelProviderName.fireworks_ai,
@@ -1307,6 +1359,12 @@ built_in_models: List[KilnModel] = [
1307
1359
  name=ModelProviderName.openrouter,
1308
1360
  model_id="mistralai/mistral-nemo",
1309
1361
  structured_output_mode=StructuredOutputMode.json_instruction_and_object,
1362
+ supports_function_calling=False, # Not reliable
1363
+ ),
1364
+ KilnModelProvider(
1365
+ name=ModelProviderName.docker_model_runner,
1366
+ model_id="ai/mistral-nemo:12B-Q4_K_M",
1367
+ structured_output_mode=StructuredOutputMode.json_schema,
1310
1368
  ),
1311
1369
  ],
1312
1370
  ),
@@ -1351,12 +1409,21 @@ built_in_models: List[KilnModel] = [
1351
1409
  supports_data_gen=False,
1352
1410
  structured_output_mode=StructuredOutputMode.json_instruction_and_object,
1353
1411
  model_id="meta-llama/llama-3.2-1b-instruct",
1412
+ supports_function_calling=False,
1354
1413
  ),
1355
1414
  KilnModelProvider(
1356
1415
  name=ModelProviderName.ollama,
1357
1416
  supports_structured_output=False,
1358
1417
  supports_data_gen=False,
1359
1418
  model_id="llama3.2:1b",
1419
+ supports_function_calling=False,
1420
+ ),
1421
+ KilnModelProvider(
1422
+ name=ModelProviderName.docker_model_runner,
1423
+ supports_structured_output=False,
1424
+ supports_data_gen=False,
1425
+ model_id="ai/llama3.2:1B-F16",
1426
+ supports_function_calling=False,
1360
1427
  ),
1361
1428
  ],
1362
1429
  ),
@@ -1372,11 +1439,13 @@ built_in_models: List[KilnModel] = [
1372
1439
  supports_data_gen=False,
1373
1440
  structured_output_mode=StructuredOutputMode.json_schema,
1374
1441
  model_id="meta-llama/llama-3.2-3b-instruct",
1442
+ supports_function_calling=False,
1375
1443
  ),
1376
1444
  KilnModelProvider(
1377
1445
  name=ModelProviderName.ollama,
1378
1446
  supports_data_gen=False,
1379
1447
  model_id="llama3.2",
1448
+ supports_function_calling=False,
1380
1449
  ),
1381
1450
  KilnModelProvider(
1382
1451
  name=ModelProviderName.together_ai,
@@ -1384,6 +1453,13 @@ built_in_models: List[KilnModel] = [
1384
1453
  supports_structured_output=False,
1385
1454
  supports_data_gen=False,
1386
1455
  ),
1456
+ KilnModelProvider(
1457
+ name=ModelProviderName.docker_model_runner,
1458
+ model_id="ai/llama3.2:3B-Q4_K_M",
1459
+ structured_output_mode=StructuredOutputMode.json_schema,
1460
+ supports_data_gen=False,
1461
+ supports_function_calling=False,
1462
+ ),
1387
1463
  ],
1388
1464
  ),
1389
1465
  # Llama 3.2 11B
@@ -1399,17 +1475,20 @@ built_in_models: List[KilnModel] = [
1399
1475
  supports_structured_output=False,
1400
1476
  supports_data_gen=False,
1401
1477
  model_id="meta-llama/llama-3.2-11b-vision-instruct",
1478
+ supports_function_calling=False,
1402
1479
  ),
1403
1480
  KilnModelProvider(
1404
1481
  name=ModelProviderName.ollama,
1405
1482
  structured_output_mode=StructuredOutputMode.json_schema,
1406
1483
  model_id="llama3.2-vision",
1484
+ supports_function_calling=False,
1407
1485
  ),
1408
1486
  KilnModelProvider(
1409
1487
  name=ModelProviderName.together_ai,
1410
1488
  model_id="meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo",
1411
1489
  supports_structured_output=False,
1412
1490
  supports_data_gen=False,
1491
+ supports_function_calling=False, # weird 3b works and 11b doesn't but... vision?
1413
1492
  ),
1414
1493
  ],
1415
1494
  ),
@@ -1423,17 +1502,20 @@ built_in_models: List[KilnModel] = [
1423
1502
  name=ModelProviderName.openrouter,
1424
1503
  structured_output_mode=StructuredOutputMode.json_instruction_and_object,
1425
1504
  model_id="meta-llama/llama-3.2-90b-vision-instruct",
1505
+ supports_function_calling=False,
1426
1506
  ),
1427
1507
  KilnModelProvider(
1428
1508
  name=ModelProviderName.ollama,
1429
1509
  structured_output_mode=StructuredOutputMode.json_schema,
1430
1510
  model_id="llama3.2-vision:90b",
1511
+ supports_function_calling=False,
1431
1512
  ),
1432
1513
  KilnModelProvider(
1433
1514
  name=ModelProviderName.together_ai,
1434
1515
  model_id="meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo",
1435
1516
  supports_structured_output=False,
1436
1517
  supports_data_gen=False,
1518
+ supports_function_calling=False,
1437
1519
  ),
1438
1520
  ],
1439
1521
  ),
@@ -1450,23 +1532,27 @@ built_in_models: List[KilnModel] = [
1450
1532
  # Openrouter not working with json_schema or tools. JSON_schema sometimes works so force that, but not consistently so still not recommended.
1451
1533
  supports_structured_output=False,
1452
1534
  supports_data_gen=False,
1535
+ supports_function_calling=False,
1453
1536
  ),
1454
1537
  KilnModelProvider(
1455
1538
  name=ModelProviderName.groq,
1456
1539
  supports_structured_output=True,
1457
1540
  supports_data_gen=True,
1458
1541
  model_id="llama-3.3-70b-versatile",
1542
+ supports_function_calling=False,
1459
1543
  ),
1460
1544
  KilnModelProvider(
1461
1545
  name=ModelProviderName.ollama,
1462
1546
  structured_output_mode=StructuredOutputMode.json_schema,
1463
1547
  model_id="llama3.3",
1548
+ supports_function_calling=False,
1464
1549
  ),
1465
1550
  KilnModelProvider(
1466
1551
  name=ModelProviderName.fireworks_ai,
1467
1552
  # Tool calling forces schema -- fireworks doesn't support json_schema, just json_mode
1468
1553
  structured_output_mode=StructuredOutputMode.function_calling_weak,
1469
1554
  model_id="accounts/fireworks/models/llama-v3p3-70b-instruct",
1555
+ supports_function_calling=False,
1470
1556
  ),
1471
1557
  KilnModelProvider(
1472
1558
  name=ModelProviderName.vertex,
@@ -1474,11 +1560,19 @@ built_in_models: List[KilnModel] = [
1474
1560
  # Doesn't work yet; needs debugging
1475
1561
  supports_structured_output=False,
1476
1562
  supports_data_gen=False,
1563
+ supports_function_calling=False,
1477
1564
  ),
1478
1565
  KilnModelProvider(
1479
1566
  name=ModelProviderName.together_ai,
1480
1567
  model_id="meta-llama/Llama-3.3-70B-Instruct-Turbo",
1481
1568
  structured_output_mode=StructuredOutputMode.function_calling_weak,
1569
+ # Tools work. Probably constrained decode? Nice
1570
+ ),
1571
+ KilnModelProvider(
1572
+ name=ModelProviderName.docker_model_runner,
1573
+ structured_output_mode=StructuredOutputMode.json_schema,
1574
+ model_id="ai/llama3.3:70B-Q4_K_M",
1575
+ supports_function_calling=False,
1482
1576
  ),
1483
1577
  ],
1484
1578
  ),
@@ -1494,6 +1588,7 @@ built_in_models: List[KilnModel] = [
1494
1588
  supports_structured_output=False,
1495
1589
  supports_data_gen=False,
1496
1590
  model_id="phi3.5",
1591
+ supports_function_calling=False,
1497
1592
  ),
1498
1593
  KilnModelProvider(
1499
1594
  name=ModelProviderName.openrouter,
@@ -1501,6 +1596,7 @@ built_in_models: List[KilnModel] = [
1501
1596
  supports_data_gen=False,
1502
1597
  model_id="microsoft/phi-3.5-mini-128k-instruct",
1503
1598
  structured_output_mode=StructuredOutputMode.json_schema,
1599
+ supports_function_calling=False,
1504
1600
  ),
1505
1601
  ],
1506
1602
  ),
@@ -1514,6 +1610,7 @@ built_in_models: List[KilnModel] = [
1514
1610
  name=ModelProviderName.ollama,
1515
1611
  structured_output_mode=StructuredOutputMode.json_schema,
1516
1612
  model_id="phi4",
1613
+ supports_function_calling=False,
1517
1614
  ),
1518
1615
  KilnModelProvider(
1519
1616
  name=ModelProviderName.openrouter,
@@ -1521,6 +1618,13 @@ built_in_models: List[KilnModel] = [
1521
1618
  structured_output_mode=StructuredOutputMode.json_instruction_and_object,
1522
1619
  supports_data_gen=False,
1523
1620
  model_id="microsoft/phi-4",
1621
+ supports_function_calling=False,
1622
+ ),
1623
+ KilnModelProvider(
1624
+ name=ModelProviderName.docker_model_runner,
1625
+ structured_output_mode=StructuredOutputMode.json_schema,
1626
+ model_id="ai/phi4:14B-Q4_K_M",
1627
+ supports_function_calling=False,
1524
1628
  ),
1525
1629
  ],
1526
1630
  ),
@@ -1535,6 +1639,7 @@ built_in_models: List[KilnModel] = [
1535
1639
  model_id="microsoft/phi-4-multimodal-instruct",
1536
1640
  supports_structured_output=False,
1537
1641
  supports_data_gen=False,
1642
+ supports_function_calling=False,
1538
1643
  ),
1539
1644
  ],
1540
1645
  ),
@@ -1547,6 +1652,7 @@ built_in_models: List[KilnModel] = [
1547
1652
  KilnModelProvider(
1548
1653
  name=ModelProviderName.ollama,
1549
1654
  model_id="phi4-mini",
1655
+ supports_function_calling=False,
1550
1656
  ),
1551
1657
  ],
1552
1658
  ),
@@ -1560,6 +1666,7 @@ built_in_models: List[KilnModel] = [
1560
1666
  name=ModelProviderName.ollama,
1561
1667
  supports_data_gen=False,
1562
1668
  model_id="gemma2:2b",
1669
+ supports_function_calling=False,
1563
1670
  ),
1564
1671
  ],
1565
1672
  ),
@@ -1573,6 +1680,7 @@ built_in_models: List[KilnModel] = [
1573
1680
  name=ModelProviderName.ollama,
1574
1681
  supports_data_gen=False,
1575
1682
  model_id="gemma2:9b",
1683
+ supports_function_calling=False,
1576
1684
  ),
1577
1685
  KilnModelProvider(
1578
1686
  name=ModelProviderName.openrouter,
@@ -1581,6 +1689,7 @@ built_in_models: List[KilnModel] = [
1581
1689
  supports_structured_output=False,
1582
1690
  supports_data_gen=False,
1583
1691
  model_id="google/gemma-2-9b-it",
1692
+ supports_function_calling=False,
1584
1693
  ),
1585
1694
  # fireworks AI errors - not allowing system role. Exclude until resolved.
1586
1695
  ],
@@ -1595,15 +1704,32 @@ built_in_models: List[KilnModel] = [
1595
1704
  name=ModelProviderName.ollama,
1596
1705
  supports_data_gen=False,
1597
1706
  model_id="gemma2:27b",
1707
+ supports_function_calling=False,
1598
1708
  ),
1599
1709
  KilnModelProvider(
1600
1710
  name=ModelProviderName.openrouter,
1601
1711
  structured_output_mode=StructuredOutputMode.json_instruction_and_object,
1602
1712
  supports_data_gen=False,
1603
1713
  model_id="google/gemma-2-27b-it",
1714
+ supports_function_calling=False,
1604
1715
  ),
1605
1716
  ],
1606
1717
  ),
1718
+ # Gemma 3 270M
1719
+ KilnModel(
1720
+ family=ModelFamily.gemma,
1721
+ name=ModelName.gemma_3_0p27b,
1722
+ friendly_name="Gemma 3 270M",
1723
+ providers=[
1724
+ KilnModelProvider(
1725
+ name=ModelProviderName.docker_model_runner,
1726
+ model_id="ai/gemma3:270M-F16",
1727
+ supports_structured_output=False,
1728
+ supports_data_gen=False,
1729
+ supports_function_calling=False,
1730
+ )
1731
+ ],
1732
+ ),
1607
1733
  # Gemma 3 1B
1608
1734
  KilnModel(
1609
1735
  family=ModelFamily.gemma,
@@ -1615,6 +1741,14 @@ built_in_models: List[KilnModel] = [
1615
1741
  model_id="gemma3:1b",
1616
1742
  supports_structured_output=False,
1617
1743
  supports_data_gen=False,
1744
+ supports_function_calling=False,
1745
+ ),
1746
+ KilnModelProvider(
1747
+ name=ModelProviderName.docker_model_runner,
1748
+ model_id="ai/gemma3:1B-F16",
1749
+ supports_structured_output=False,
1750
+ supports_data_gen=False,
1751
+ supports_function_calling=False,
1618
1752
  ),
1619
1753
  ],
1620
1754
  ),
@@ -1628,11 +1762,18 @@ built_in_models: List[KilnModel] = [
1628
1762
  name=ModelProviderName.ollama,
1629
1763
  model_id="gemma3:4b",
1630
1764
  ollama_model_aliases=["gemma3"],
1765
+ supports_function_calling=False,
1631
1766
  ),
1632
1767
  KilnModelProvider(
1633
1768
  name=ModelProviderName.openrouter,
1634
1769
  structured_output_mode=StructuredOutputMode.json_instruction_and_object,
1635
1770
  model_id="google/gemma-3-4b-it",
1771
+ supports_function_calling=False,
1772
+ ),
1773
+ KilnModelProvider(
1774
+ name=ModelProviderName.docker_model_runner,
1775
+ model_id="ai/gemma3:4B-Q4_K_M",
1776
+ supports_function_calling=False,
1636
1777
  ),
1637
1778
  ],
1638
1779
  ),
@@ -1645,11 +1786,13 @@ built_in_models: List[KilnModel] = [
1645
1786
  KilnModelProvider(
1646
1787
  name=ModelProviderName.ollama,
1647
1788
  model_id="gemma3:12b",
1789
+ supports_function_calling=False,
1648
1790
  ),
1649
1791
  KilnModelProvider(
1650
1792
  name=ModelProviderName.openrouter,
1651
1793
  structured_output_mode=StructuredOutputMode.json_instruction_and_object,
1652
1794
  model_id="google/gemma-3-12b-it",
1795
+ supports_function_calling=False,
1653
1796
  ),
1654
1797
  ],
1655
1798
  ),
@@ -1662,11 +1805,13 @@ built_in_models: List[KilnModel] = [
1662
1805
  KilnModelProvider(
1663
1806
  name=ModelProviderName.ollama,
1664
1807
  model_id="gemma3:27b",
1808
+ supports_function_calling=False,
1665
1809
  ),
1666
1810
  KilnModelProvider(
1667
1811
  name=ModelProviderName.openrouter,
1668
1812
  structured_output_mode=StructuredOutputMode.json_instruction_and_object,
1669
1813
  model_id="google/gemma-3-27b-it",
1814
+ supports_function_calling=False,
1670
1815
  ),
1671
1816
  ],
1672
1817
  ),
@@ -1681,12 +1826,14 @@ built_in_models: List[KilnModel] = [
1681
1826
  model_id="gemma3n:e2b",
1682
1827
  structured_output_mode=StructuredOutputMode.json_schema,
1683
1828
  supports_data_gen=False,
1829
+ supports_function_calling=False,
1684
1830
  ),
1685
1831
  KilnModelProvider(
1686
1832
  name=ModelProviderName.gemini_api,
1687
1833
  model_id="gemma-3n-e2b-it",
1688
1834
  supports_structured_output=False,
1689
1835
  supports_data_gen=False,
1836
+ supports_function_calling=False,
1690
1837
  ),
1691
1838
  ],
1692
1839
  ),
@@ -1701,18 +1848,28 @@ built_in_models: List[KilnModel] = [
1701
1848
  model_id="google/gemma-3n-e4b-it",
1702
1849
  structured_output_mode=StructuredOutputMode.json_instruction_and_object,
1703
1850
  supports_data_gen=False,
1851
+ supports_function_calling=False,
1704
1852
  ),
1705
1853
  KilnModelProvider(
1706
1854
  name=ModelProviderName.ollama,
1707
1855
  model_id="gemma3n:e4b",
1708
1856
  supports_data_gen=False,
1709
1857
  structured_output_mode=StructuredOutputMode.json_schema,
1858
+ supports_function_calling=False,
1710
1859
  ),
1711
1860
  KilnModelProvider(
1712
1861
  name=ModelProviderName.gemini_api,
1713
1862
  model_id="gemma-3n-e4b-it",
1714
1863
  structured_output_mode=StructuredOutputMode.json_instructions,
1715
1864
  supports_data_gen=False,
1865
+ supports_function_calling=False,
1866
+ ),
1867
+ KilnModelProvider(
1868
+ name=ModelProviderName.docker_model_runner,
1869
+ model_id="ai/gemma3n:4B-Q4_K_M",
1870
+ supports_data_gen=False,
1871
+ structured_output_mode=StructuredOutputMode.json_schema,
1872
+ supports_function_calling=False,
1716
1873
  ),
1717
1874
  ],
1718
1875
  ),
@@ -1727,10 +1884,12 @@ built_in_models: List[KilnModel] = [
1727
1884
  model_id="mistralai/mixtral-8x7b-instruct",
1728
1885
  supports_data_gen=False,
1729
1886
  structured_output_mode=StructuredOutputMode.json_instruction_and_object,
1887
+ supports_function_calling=False,
1730
1888
  ),
1731
1889
  KilnModelProvider(
1732
1890
  name=ModelProviderName.ollama,
1733
1891
  model_id="mixtral",
1892
+ supports_function_calling=False,
1734
1893
  ),
1735
1894
  ],
1736
1895
  ),
@@ -1748,13 +1907,7 @@ built_in_models: List[KilnModel] = [
1748
1907
  r1_openrouter_options=True,
1749
1908
  structured_output_mode=StructuredOutputMode.json_instructions,
1750
1909
  parser=ModelParserID.r1_thinking,
1751
- ),
1752
- KilnModelProvider(
1753
- name=ModelProviderName.fireworks_ai,
1754
- model_id="accounts/fireworks/models/qwq-32b",
1755
- reasoning_capable=True,
1756
- parser=ModelParserID.r1_thinking,
1757
- structured_output_mode=StructuredOutputMode.json_instructions,
1910
+ supports_function_calling=False,
1758
1911
  ),
1759
1912
  KilnModelProvider(
1760
1913
  name=ModelProviderName.ollama,
@@ -1762,6 +1915,7 @@ built_in_models: List[KilnModel] = [
1762
1915
  reasoning_capable=True,
1763
1916
  parser=ModelParserID.r1_thinking,
1764
1917
  structured_output_mode=StructuredOutputMode.json_instructions,
1918
+ supports_function_calling=False,
1765
1919
  ),
1766
1920
  KilnModelProvider(
1767
1921
  name=ModelProviderName.together_ai,
@@ -1769,6 +1923,7 @@ built_in_models: List[KilnModel] = [
1769
1923
  structured_output_mode=StructuredOutputMode.json_instructions,
1770
1924
  parser=ModelParserID.r1_thinking,
1771
1925
  reasoning_capable=True,
1926
+ supports_function_calling=False,
1772
1927
  ),
1773
1928
  KilnModelProvider(
1774
1929
  name=ModelProviderName.siliconflow_cn,
@@ -1776,6 +1931,14 @@ built_in_models: List[KilnModel] = [
1776
1931
  structured_output_mode=StructuredOutputMode.json_instructions,
1777
1932
  reasoning_capable=True,
1778
1933
  ),
1934
+ KilnModelProvider(
1935
+ name=ModelProviderName.docker_model_runner,
1936
+ model_id="ai/qwq:32B-Q4_K_M",
1937
+ reasoning_capable=True,
1938
+ parser=ModelParserID.r1_thinking,
1939
+ structured_output_mode=StructuredOutputMode.json_instructions,
1940
+ supports_function_calling=False,
1941
+ ),
1779
1942
  ],
1780
1943
  ),
1781
1944
  # Qwen 2.5 7B
@@ -1788,10 +1951,17 @@ built_in_models: List[KilnModel] = [
1788
1951
  name=ModelProviderName.openrouter,
1789
1952
  model_id="qwen/qwen-2.5-7b-instruct",
1790
1953
  structured_output_mode=StructuredOutputMode.json_instruction_and_object,
1954
+ supports_function_calling=False,
1791
1955
  ),
1792
1956
  KilnModelProvider(
1793
1957
  name=ModelProviderName.ollama,
1794
1958
  model_id="qwen2.5",
1959
+ supports_function_calling=False,
1960
+ ),
1961
+ KilnModelProvider(
1962
+ name=ModelProviderName.docker_model_runner,
1963
+ model_id="ai/qwen2.5:7B-Q4_K_M",
1964
+ supports_function_calling=False,
1795
1965
  ),
1796
1966
  ],
1797
1967
  ),
@@ -1809,6 +1979,7 @@ built_in_models: List[KilnModel] = [
1809
1979
  name=ModelProviderName.ollama,
1810
1980
  model_id="qwen2.5:14b",
1811
1981
  supports_data_gen=False,
1982
+ supports_function_calling=False,
1812
1983
  ),
1813
1984
  ],
1814
1985
  ),
@@ -1830,12 +2001,6 @@ built_in_models: List[KilnModel] = [
1830
2001
  name=ModelProviderName.ollama,
1831
2002
  model_id="qwen2.5:72b",
1832
2003
  ),
1833
- KilnModelProvider(
1834
- name=ModelProviderName.fireworks_ai,
1835
- model_id="accounts/fireworks/models/qwen2p5-72b-instruct",
1836
- # Tool calling forces schema -- fireworks doesn't support json_schema, just json_mode
1837
- structured_output_mode=StructuredOutputMode.function_calling_weak,
1838
- ),
1839
2004
  KilnModelProvider(
1840
2005
  name=ModelProviderName.together_ai,
1841
2006
  provider_finetune_id="Qwen/Qwen2.5-72B-Instruct",
@@ -1853,11 +2018,13 @@ built_in_models: List[KilnModel] = [
1853
2018
  structured_output_mode=StructuredOutputMode.json_instruction_and_object,
1854
2019
  model_id="mistralai/mistral-small-24b-instruct-2501",
1855
2020
  uncensored=True,
2021
+ supports_function_calling=False,
1856
2022
  ),
1857
2023
  KilnModelProvider(
1858
2024
  name=ModelProviderName.ollama,
1859
2025
  model_id="mistral-small:24b",
1860
2026
  uncensored=True,
2027
+ supports_function_calling=False,
1861
2028
  ),
1862
2029
  ],
1863
2030
  ),
@@ -1892,6 +2059,7 @@ built_in_models: List[KilnModel] = [
1892
2059
  parser=ModelParserID.r1_thinking,
1893
2060
  reasoning_capable=True,
1894
2061
  supports_data_gen=True,
2062
+ supports_function_calling=False,
1895
2063
  ),
1896
2064
  KilnModelProvider(
1897
2065
  name=ModelProviderName.siliconflow_cn,
@@ -1900,6 +2068,7 @@ built_in_models: List[KilnModel] = [
1900
2068
  structured_output_mode=StructuredOutputMode.json_instructions,
1901
2069
  reasoning_capable=True,
1902
2070
  supports_data_gen=True,
2071
+ supports_function_calling=False,
1903
2072
  ),
1904
2073
  ],
1905
2074
  ),
@@ -1916,6 +2085,7 @@ built_in_models: List[KilnModel] = [
1916
2085
  reasoning_capable=True,
1917
2086
  r1_openrouter_options=True,
1918
2087
  require_openrouter_reasoning=True,
2088
+ supports_function_calling=False,
1919
2089
  ),
1920
2090
  KilnModelProvider(
1921
2091
  name=ModelProviderName.siliconflow_cn,
@@ -1924,6 +2094,27 @@ built_in_models: List[KilnModel] = [
1924
2094
  reasoning_capable=True,
1925
2095
  reasoning_optional_for_structured_output=True,
1926
2096
  supports_data_gen=False,
2097
+ supports_function_calling=False,
2098
+ ),
2099
+ ],
2100
+ ),
2101
+ # DeepSeek 3.1
2102
+ KilnModel(
2103
+ family=ModelFamily.deepseek,
2104
+ name=ModelName.deepseek_3_1,
2105
+ friendly_name="DeepSeek 3.1",
2106
+ providers=[
2107
+ KilnModelProvider(
2108
+ name=ModelProviderName.openrouter,
2109
+ model_id="deepseek/deepseek-chat-v3.1",
2110
+ structured_output_mode=StructuredOutputMode.json_instruction_and_object,
2111
+ supports_data_gen=True,
2112
+ ),
2113
+ KilnModelProvider(
2114
+ name=ModelProviderName.fireworks_ai,
2115
+ model_id="accounts/fireworks/models/deepseek-v3p1",
2116
+ structured_output_mode=StructuredOutputMode.json_instruction_and_object,
2117
+ supports_data_gen=True,
1927
2118
  ),
1928
2119
  ],
1929
2120
  ),
@@ -1978,6 +2169,7 @@ built_in_models: List[KilnModel] = [
1978
2169
  parser=ModelParserID.r1_thinking,
1979
2170
  structured_output_mode=StructuredOutputMode.json_instructions,
1980
2171
  reasoning_capable=True,
2172
+ supports_function_calling=False,
1981
2173
  ),
1982
2174
  KilnModelProvider(
1983
2175
  # I want your RAM
@@ -2003,6 +2195,7 @@ built_in_models: List[KilnModel] = [
2003
2195
  r1_openrouter_options=True,
2004
2196
  parser=ModelParserID.r1_thinking,
2005
2197
  require_openrouter_reasoning=True,
2198
+ supports_function_calling=False,
2006
2199
  ),
2007
2200
  KilnModelProvider(
2008
2201
  name=ModelProviderName.ollama,
@@ -2010,12 +2203,14 @@ built_in_models: List[KilnModel] = [
2010
2203
  reasoning_capable=True,
2011
2204
  structured_output_mode=StructuredOutputMode.json_instructions,
2012
2205
  model_id="deepseek-r1:32b",
2206
+ supports_function_calling=False,
2013
2207
  ),
2014
2208
  KilnModelProvider(
2015
2209
  name=ModelProviderName.siliconflow_cn,
2016
2210
  model_id="deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
2017
2211
  structured_output_mode=StructuredOutputMode.json_schema,
2018
2212
  reasoning_capable=True,
2213
+ supports_function_calling=False,
2019
2214
  reasoning_optional_for_structured_output=True,
2020
2215
  ),
2021
2216
  ],
@@ -2034,6 +2229,7 @@ built_in_models: List[KilnModel] = [
2034
2229
  r1_openrouter_options=True,
2035
2230
  require_openrouter_reasoning=True,
2036
2231
  parser=ModelParserID.r1_thinking,
2232
+ supports_function_calling=False,
2037
2233
  ),
2038
2234
  KilnModelProvider(
2039
2235
  name=ModelProviderName.ollama,
@@ -2042,12 +2238,23 @@ built_in_models: List[KilnModel] = [
2042
2238
  reasoning_capable=True,
2043
2239
  structured_output_mode=StructuredOutputMode.json_instructions,
2044
2240
  model_id="deepseek-r1:70b",
2241
+ supports_function_calling=False,
2045
2242
  ),
2046
2243
  KilnModelProvider(
2047
2244
  name=ModelProviderName.together_ai,
2048
2245
  model_id="deepseek-ai/DeepSeek-R1-Distill-Llama-70B",
2049
2246
  structured_output_mode=StructuredOutputMode.json_instructions,
2050
2247
  parser=ModelParserID.r1_thinking,
2248
+ supports_function_calling=False,
2249
+ ),
2250
+ KilnModelProvider(
2251
+ name=ModelProviderName.docker_model_runner,
2252
+ supports_data_gen=False,
2253
+ parser=ModelParserID.r1_thinking,
2254
+ reasoning_capable=True,
2255
+ structured_output_mode=StructuredOutputMode.json_instructions,
2256
+ model_id="ai/deepseek-r1-distill-llama:70B-Q4_K_M",
2257
+ supports_function_calling=False,
2051
2258
  ),
2052
2259
  ],
2053
2260
  ),
@@ -2067,6 +2274,7 @@ built_in_models: List[KilnModel] = [
2067
2274
  require_openrouter_reasoning=True,
2068
2275
  openrouter_skip_required_parameters=True,
2069
2276
  parser=ModelParserID.r1_thinking,
2277
+ supports_function_calling=False,
2070
2278
  ),
2071
2279
  KilnModelProvider(
2072
2280
  name=ModelProviderName.ollama,
@@ -2075,12 +2283,14 @@ built_in_models: List[KilnModel] = [
2075
2283
  reasoning_capable=True,
2076
2284
  structured_output_mode=StructuredOutputMode.json_instructions,
2077
2285
  model_id="deepseek-r1:14b",
2286
+ supports_function_calling=False,
2078
2287
  ),
2079
2288
  KilnModelProvider(
2080
2289
  name=ModelProviderName.together_ai,
2081
2290
  model_id="deepseek-ai/DeepSeek-R1-Distill-Qwen-14B",
2082
2291
  structured_output_mode=StructuredOutputMode.json_instructions,
2083
2292
  parser=ModelParserID.r1_thinking,
2293
+ supports_function_calling=False,
2084
2294
  ),
2085
2295
  KilnModelProvider(
2086
2296
  name=ModelProviderName.siliconflow_cn,
@@ -2089,6 +2299,7 @@ built_in_models: List[KilnModel] = [
2089
2299
  reasoning_capable=True,
2090
2300
  reasoning_optional_for_structured_output=True,
2091
2301
  supports_data_gen=False,
2302
+ supports_function_calling=False,
2092
2303
  ),
2093
2304
  ],
2094
2305
  ),
@@ -2110,6 +2321,7 @@ built_in_models: List[KilnModel] = [
2110
2321
  require_openrouter_reasoning=True,
2111
2322
  openrouter_skip_required_parameters=True,
2112
2323
  parser=ModelParserID.r1_thinking,
2324
+ supports_function_calling=False,
2113
2325
  ),
2114
2326
  KilnModelProvider(
2115
2327
  name=ModelProviderName.ollama,
@@ -2120,6 +2332,18 @@ built_in_models: List[KilnModel] = [
2120
2332
  # Best mode, but fails to often to enable without warning
2121
2333
  structured_output_mode=StructuredOutputMode.json_instructions,
2122
2334
  model_id="deepseek-r1:8b",
2335
+ supports_function_calling=False,
2336
+ ),
2337
+ KilnModelProvider(
2338
+ name=ModelProviderName.docker_model_runner,
2339
+ supports_structured_output=False,
2340
+ supports_data_gen=False,
2341
+ parser=ModelParserID.r1_thinking,
2342
+ reasoning_capable=True,
2343
+ # Best mode, but fails to often to enable without warning
2344
+ structured_output_mode=StructuredOutputMode.json_instructions,
2345
+ model_id="ai/deepseek-r1-distill-llama:8B-Q4_K_M",
2346
+ supports_function_calling=False,
2123
2347
  ),
2124
2348
  ],
2125
2349
  ),
@@ -2139,6 +2363,7 @@ built_in_models: List[KilnModel] = [
2139
2363
  reasoning_capable=True,
2140
2364
  r1_openrouter_options=True,
2141
2365
  require_openrouter_reasoning=True,
2366
+ supports_function_calling=False,
2142
2367
  ),
2143
2368
  KilnModelProvider(
2144
2369
  name=ModelProviderName.ollama,
@@ -2149,6 +2374,7 @@ built_in_models: List[KilnModel] = [
2149
2374
  reasoning_capable=True,
2150
2375
  structured_output_mode=StructuredOutputMode.json_instructions,
2151
2376
  model_id="deepseek-r1:7b",
2377
+ supports_function_calling=False,
2152
2378
  ),
2153
2379
  KilnModelProvider(
2154
2380
  name=ModelProviderName.siliconflow_cn,
@@ -2159,6 +2385,7 @@ built_in_models: List[KilnModel] = [
2159
2385
  structured_output_mode=StructuredOutputMode.json_instructions,
2160
2386
  reasoning_capable=True,
2161
2387
  reasoning_optional_for_structured_output=True,
2388
+ supports_function_calling=False,
2162
2389
  ),
2163
2390
  ],
2164
2391
  ),
@@ -2179,6 +2406,7 @@ built_in_models: List[KilnModel] = [
2179
2406
  require_openrouter_reasoning=True,
2180
2407
  openrouter_skip_required_parameters=True,
2181
2408
  parser=ModelParserID.r1_thinking,
2409
+ supports_function_calling=False,
2182
2410
  ),
2183
2411
  KilnModelProvider(
2184
2412
  name=ModelProviderName.ollama,
@@ -2188,6 +2416,7 @@ built_in_models: List[KilnModel] = [
2188
2416
  reasoning_capable=True,
2189
2417
  structured_output_mode=StructuredOutputMode.json_instructions,
2190
2418
  model_id="deepseek-r1:1.5b",
2419
+ supports_function_calling=False,
2191
2420
  ),
2192
2421
  KilnModelProvider(
2193
2422
  name=ModelProviderName.together_ai,
@@ -2196,6 +2425,7 @@ built_in_models: List[KilnModel] = [
2196
2425
  parser=ModelParserID.r1_thinking,
2197
2426
  supports_structured_output=False,
2198
2427
  supports_data_gen=False,
2428
+ supports_function_calling=False,
2199
2429
  ),
2200
2430
  ],
2201
2431
  ),
@@ -2212,6 +2442,7 @@ built_in_models: List[KilnModel] = [
2212
2442
  model_id="dolphin-mixtral:8x22b",
2213
2443
  uncensored=True,
2214
2444
  suggested_for_uncensored_data_gen=True,
2445
+ supports_function_calling=False,
2215
2446
  ),
2216
2447
  KilnModelProvider(
2217
2448
  name=ModelProviderName.openrouter,
@@ -2220,6 +2451,7 @@ built_in_models: List[KilnModel] = [
2220
2451
  model_id="cognitivecomputations/dolphin-mixtral-8x22b",
2221
2452
  uncensored=True,
2222
2453
  suggested_for_uncensored_data_gen=True,
2454
+ supports_function_calling=False,
2223
2455
  ),
2224
2456
  ],
2225
2457
  ),
@@ -2304,6 +2536,7 @@ built_in_models: List[KilnModel] = [
2304
2536
  r1_openrouter_options=True,
2305
2537
  parser=ModelParserID.r1_thinking,
2306
2538
  supports_data_gen=False,
2539
+ supports_function_calling=False,
2307
2540
  ),
2308
2541
  KilnModelProvider(
2309
2542
  name=ModelProviderName.ollama,
@@ -2311,6 +2544,15 @@ built_in_models: List[KilnModel] = [
2311
2544
  supports_data_gen=False,
2312
2545
  reasoning_capable=True,
2313
2546
  structured_output_mode=StructuredOutputMode.json_schema,
2547
+ supports_function_calling=False,
2548
+ ),
2549
+ KilnModelProvider(
2550
+ name=ModelProviderName.docker_model_runner,
2551
+ model_id="ai/qwen3:0.6B-F16",
2552
+ supports_data_gen=False,
2553
+ reasoning_capable=True,
2554
+ structured_output_mode=StructuredOutputMode.json_schema,
2555
+ supports_function_calling=False,
2314
2556
  ),
2315
2557
  ],
2316
2558
  ),
@@ -2330,6 +2572,7 @@ built_in_models: List[KilnModel] = [
2330
2572
  r1_openrouter_options=True,
2331
2573
  parser=ModelParserID.r1_thinking,
2332
2574
  supports_data_gen=False,
2575
+ supports_function_calling=False,
2333
2576
  ),
2334
2577
  KilnModelProvider(
2335
2578
  name=ModelProviderName.ollama,
@@ -2337,6 +2580,7 @@ built_in_models: List[KilnModel] = [
2337
2580
  supports_data_gen=False,
2338
2581
  reasoning_capable=True,
2339
2582
  structured_output_mode=StructuredOutputMode.json_schema,
2583
+ supports_function_calling=False,
2340
2584
  ),
2341
2585
  ],
2342
2586
  ),
@@ -2353,6 +2597,7 @@ built_in_models: List[KilnModel] = [
2353
2597
  formatter=ModelFormatterID.qwen3_style_no_think,
2354
2598
  supports_data_gen=False,
2355
2599
  parser=ModelParserID.optional_r1_thinking,
2600
+ supports_function_calling=False,
2356
2601
  ),
2357
2602
  KilnModelProvider(
2358
2603
  name=ModelProviderName.ollama,
@@ -2360,6 +2605,7 @@ built_in_models: List[KilnModel] = [
2360
2605
  formatter=ModelFormatterID.qwen3_style_no_think,
2361
2606
  supports_data_gen=False,
2362
2607
  structured_output_mode=StructuredOutputMode.json_schema,
2608
+ supports_function_calling=False,
2363
2609
  ),
2364
2610
  ],
2365
2611
  ),
@@ -2378,6 +2624,7 @@ built_in_models: List[KilnModel] = [
2378
2624
  r1_openrouter_options=True,
2379
2625
  parser=ModelParserID.r1_thinking,
2380
2626
  supports_data_gen=False,
2627
+ supports_function_calling=False,
2381
2628
  ),
2382
2629
  KilnModelProvider(
2383
2630
  name=ModelProviderName.ollama,
@@ -2385,6 +2632,7 @@ built_in_models: List[KilnModel] = [
2385
2632
  supports_data_gen=False,
2386
2633
  reasoning_capable=True,
2387
2634
  structured_output_mode=StructuredOutputMode.json_schema,
2635
+ supports_function_calling=False,
2388
2636
  ),
2389
2637
  ],
2390
2638
  ),
@@ -2401,6 +2649,7 @@ built_in_models: List[KilnModel] = [
2401
2649
  formatter=ModelFormatterID.qwen3_style_no_think,
2402
2650
  supports_data_gen=False,
2403
2651
  parser=ModelParserID.optional_r1_thinking,
2652
+ supports_function_calling=False,
2404
2653
  ),
2405
2654
  KilnModelProvider(
2406
2655
  name=ModelProviderName.ollama,
@@ -2408,6 +2657,7 @@ built_in_models: List[KilnModel] = [
2408
2657
  structured_output_mode=StructuredOutputMode.json_schema,
2409
2658
  formatter=ModelFormatterID.qwen3_style_no_think,
2410
2659
  supports_data_gen=False,
2660
+ supports_function_calling=False,
2411
2661
  ),
2412
2662
  ],
2413
2663
  ),
@@ -2427,6 +2677,7 @@ built_in_models: List[KilnModel] = [
2427
2677
  r1_openrouter_options=True,
2428
2678
  parser=ModelParserID.r1_thinking,
2429
2679
  supports_data_gen=False,
2680
+ supports_function_calling=False,
2430
2681
  ),
2431
2682
  KilnModelProvider(
2432
2683
  name=ModelProviderName.ollama,
@@ -2434,6 +2685,7 @@ built_in_models: List[KilnModel] = [
2434
2685
  supports_data_gen=False,
2435
2686
  reasoning_capable=True,
2436
2687
  structured_output_mode=StructuredOutputMode.json_schema,
2688
+ supports_function_calling=False,
2437
2689
  ),
2438
2690
  KilnModelProvider(
2439
2691
  name=ModelProviderName.siliconflow_cn,
@@ -2443,6 +2695,15 @@ built_in_models: List[KilnModel] = [
2443
2695
  siliconflow_enable_thinking=True,
2444
2696
  reasoning_optional_for_structured_output=True,
2445
2697
  supports_data_gen=False,
2698
+ supports_function_calling=False,
2699
+ ),
2700
+ KilnModelProvider(
2701
+ name=ModelProviderName.docker_model_runner,
2702
+ model_id="ai/qwen3:8B-Q4_K_M",
2703
+ supports_data_gen=False,
2704
+ reasoning_capable=True,
2705
+ structured_output_mode=StructuredOutputMode.json_schema,
2706
+ supports_function_calling=False,
2446
2707
  ),
2447
2708
  ],
2448
2709
  ),
@@ -2459,6 +2720,7 @@ built_in_models: List[KilnModel] = [
2459
2720
  formatter=ModelFormatterID.qwen3_style_no_think,
2460
2721
  supports_data_gen=False,
2461
2722
  parser=ModelParserID.optional_r1_thinking,
2723
+ supports_function_calling=False,
2462
2724
  ),
2463
2725
  KilnModelProvider(
2464
2726
  name=ModelProviderName.ollama,
@@ -2466,6 +2728,7 @@ built_in_models: List[KilnModel] = [
2466
2728
  structured_output_mode=StructuredOutputMode.json_schema,
2467
2729
  formatter=ModelFormatterID.qwen3_style_no_think,
2468
2730
  supports_data_gen=False,
2731
+ supports_function_calling=False,
2469
2732
  ),
2470
2733
  KilnModelProvider(
2471
2734
  name=ModelProviderName.siliconflow_cn,
@@ -2473,6 +2736,7 @@ built_in_models: List[KilnModel] = [
2473
2736
  structured_output_mode=StructuredOutputMode.json_schema,
2474
2737
  siliconflow_enable_thinking=False,
2475
2738
  supports_data_gen=False,
2739
+ supports_function_calling=False,
2476
2740
  ),
2477
2741
  ],
2478
2742
  ),
@@ -2491,6 +2755,7 @@ built_in_models: List[KilnModel] = [
2491
2755
  r1_openrouter_options=True,
2492
2756
  parser=ModelParserID.r1_thinking,
2493
2757
  supports_data_gen=True,
2758
+ supports_function_calling=False,
2494
2759
  ),
2495
2760
  KilnModelProvider(
2496
2761
  name=ModelProviderName.ollama,
@@ -2498,6 +2763,7 @@ built_in_models: List[KilnModel] = [
2498
2763
  supports_data_gen=True,
2499
2764
  reasoning_capable=True,
2500
2765
  structured_output_mode=StructuredOutputMode.json_schema,
2766
+ supports_function_calling=False,
2501
2767
  ),
2502
2768
  KilnModelProvider(
2503
2769
  name=ModelProviderName.siliconflow_cn,
@@ -2507,6 +2773,15 @@ built_in_models: List[KilnModel] = [
2507
2773
  reasoning_capable=True,
2508
2774
  siliconflow_enable_thinking=True,
2509
2775
  reasoning_optional_for_structured_output=True,
2776
+ supports_function_calling=False,
2777
+ ),
2778
+ KilnModelProvider(
2779
+ name=ModelProviderName.docker_model_runner,
2780
+ model_id="ai/qwen3:14B-Q6_K",
2781
+ supports_data_gen=True,
2782
+ reasoning_capable=True,
2783
+ structured_output_mode=StructuredOutputMode.json_schema,
2784
+ supports_function_calling=False,
2510
2785
  ),
2511
2786
  ],
2512
2787
  ),
@@ -2553,6 +2828,12 @@ built_in_models: List[KilnModel] = [
2553
2828
  reasoning_capable=True,
2554
2829
  structured_output_mode=StructuredOutputMode.json_schema,
2555
2830
  ),
2831
+ KilnModelProvider(
2832
+ name=ModelProviderName.docker_model_runner,
2833
+ model_id="ai/qwen3:30B-A3B-Q4_K_M",
2834
+ reasoning_capable=True,
2835
+ structured_output_mode=StructuredOutputMode.json_schema,
2836
+ ),
2556
2837
  ],
2557
2838
  ),
2558
2839
  # Qwen 3 30B (3B Active)
@@ -2658,7 +2939,8 @@ built_in_models: List[KilnModel] = [
2658
2939
  supports_data_gen=True,
2659
2940
  reasoning_capable=True,
2660
2941
  structured_output_mode=StructuredOutputMode.json_instructions,
2661
- parser=ModelParserID.r1_thinking,
2942
+ # This model doesn't return reasoning content after a tool call so we need to allow optional reasoning.
2943
+ parser=ModelParserID.optional_r1_thinking,
2662
2944
  ),
2663
2945
  KilnModelProvider(
2664
2946
  name=ModelProviderName.openrouter,
@@ -2669,6 +2951,8 @@ built_in_models: List[KilnModel] = [
2669
2951
  structured_output_mode=StructuredOutputMode.json_instructions,
2670
2952
  parser=ModelParserID.r1_thinking,
2671
2953
  supports_data_gen=True,
2954
+ # Not reliable, even for simple functions
2955
+ supports_function_calling=False,
2672
2956
  ),
2673
2957
  KilnModelProvider(
2674
2958
  name=ModelProviderName.ollama,
@@ -2691,7 +2975,8 @@ built_in_models: List[KilnModel] = [
2691
2975
  structured_output_mode=StructuredOutputMode.json_instructions,
2692
2976
  supports_data_gen=True,
2693
2977
  reasoning_capable=True,
2694
- parser=ModelParserID.r1_thinking,
2978
+ # This model doesn't return reasoning content after a tool call so we need to allow optional reasoning.
2979
+ parser=ModelParserID.optional_r1_thinking,
2695
2980
  ),
2696
2981
  ],
2697
2982
  ),
@@ -2765,6 +3050,7 @@ built_in_models: List[KilnModel] = [
2765
3050
  reasoning_capable=True,
2766
3051
  structured_output_mode=StructuredOutputMode.json_instructions,
2767
3052
  parser=ModelParserID.r1_thinking,
3053
+ supports_function_calling=False,
2768
3054
  ),
2769
3055
  ],
2770
3056
  ),
@@ -2806,6 +3092,7 @@ built_in_models: List[KilnModel] = [
2806
3092
  reasoning_capable=True,
2807
3093
  structured_output_mode=StructuredOutputMode.json_instructions,
2808
3094
  parser=ModelParserID.r1_thinking,
3095
+ supports_function_calling=False,
2809
3096
  ),
2810
3097
  KilnModelProvider(
2811
3098
  name=ModelProviderName.siliconflow_cn,
@@ -2849,6 +3136,7 @@ built_in_models: List[KilnModel] = [
2849
3136
  model_id="Qwen/Qwen3-235B-A22B-Instruct-2507-tput",
2850
3137
  supports_data_gen=True,
2851
3138
  structured_output_mode=StructuredOutputMode.json_instructions,
3139
+ supports_function_calling=False,
2852
3140
  ),
2853
3141
  ],
2854
3142
  ),
@@ -2889,6 +3177,7 @@ built_in_models: List[KilnModel] = [
2889
3177
  formatter=ModelFormatterID.qwen3_style_no_think,
2890
3178
  structured_output_mode=StructuredOutputMode.json_instructions,
2891
3179
  parser=ModelParserID.optional_r1_thinking,
3180
+ supports_function_calling=False,
2892
3181
  ),
2893
3182
  KilnModelProvider(
2894
3183
  name=ModelProviderName.siliconflow_cn,
@@ -2911,6 +3200,54 @@ built_in_models: List[KilnModel] = [
2911
3200
  structured_output_mode=StructuredOutputMode.json_schema,
2912
3201
  reasoning_capable=True,
2913
3202
  reasoning_optional_for_structured_output=True,
3203
+ supports_function_calling=False,
3204
+ ),
3205
+ ],
3206
+ ),
3207
+ # GLM 4.5
3208
+ KilnModel(
3209
+ family=ModelFamily.glm,
3210
+ name=ModelName.glm_4_5,
3211
+ friendly_name="GLM 4.5",
3212
+ providers=[
3213
+ KilnModelProvider(
3214
+ name=ModelProviderName.openrouter,
3215
+ model_id="z-ai/glm-4.5",
3216
+ structured_output_mode=StructuredOutputMode.json_instructions,
3217
+ reasoning_capable=True,
3218
+ ),
3219
+ KilnModelProvider(
3220
+ name=ModelProviderName.fireworks_ai,
3221
+ model_id="accounts/fireworks/models/glm-4p5",
3222
+ structured_output_mode=StructuredOutputMode.json_instructions,
3223
+ reasoning_capable=True,
3224
+ ),
3225
+ ],
3226
+ ),
3227
+ # GLM 4.5 AIR
3228
+ KilnModel(
3229
+ family=ModelFamily.glm,
3230
+ name=ModelName.glm_4_5_air,
3231
+ friendly_name="GLM 4.5 AIR",
3232
+ providers=[
3233
+ KilnModelProvider(
3234
+ name=ModelProviderName.openrouter,
3235
+ model_id="z-ai/glm-4.5-air",
3236
+ structured_output_mode=StructuredOutputMode.json_instructions,
3237
+ reasoning_capable=True,
3238
+ ),
3239
+ KilnModelProvider(
3240
+ name=ModelProviderName.fireworks_ai,
3241
+ model_id="accounts/fireworks/models/glm-4p5-air",
3242
+ structured_output_mode=StructuredOutputMode.json_instructions,
3243
+ reasoning_capable=True,
3244
+ ),
3245
+ KilnModelProvider(
3246
+ name=ModelProviderName.together_ai,
3247
+ model_id="zai-org/GLM-4.5-Air-FP8",
3248
+ structured_output_mode=StructuredOutputMode.json_instructions,
3249
+ reasoning_capable=True,
3250
+ parser=ModelParserID.r1_thinking,
2914
3251
  ),
2915
3252
  ],
2916
3253
  ),
@@ -2967,6 +3304,7 @@ built_in_models: List[KilnModel] = [
2967
3304
  structured_output_mode=StructuredOutputMode.json_schema,
2968
3305
  reasoning_capable=True,
2969
3306
  reasoning_optional_for_structured_output=True,
3307
+ supports_function_calling=False,
2970
3308
  ),
2971
3309
  ],
2972
3310
  ),
@@ -2982,6 +3320,7 @@ built_in_models: List[KilnModel] = [
2982
3320
  structured_output_mode=StructuredOutputMode.json_instructions,
2983
3321
  reasoning_capable=True,
2984
3322
  supports_data_gen=False,
3323
+ supports_function_calling=False,
2985
3324
  ),
2986
3325
  ],
2987
3326
  ),
@@ -2998,6 +3337,7 @@ built_in_models: List[KilnModel] = [
2998
3337
  reasoning_capable=True,
2999
3338
  reasoning_optional_for_structured_output=True,
3000
3339
  supports_data_gen=False,
3340
+ supports_function_calling=False,
3001
3341
  ),
3002
3342
  ],
3003
3343
  ),
@@ -3014,6 +3354,7 @@ built_in_models: List[KilnModel] = [
3014
3354
  reasoning_capable=True,
3015
3355
  reasoning_optional_for_structured_output=True,
3016
3356
  supports_data_gen=False,
3357
+ supports_function_calling=False,
3017
3358
  ),
3018
3359
  ],
3019
3360
  ),
@@ -3029,12 +3370,14 @@ built_in_models: List[KilnModel] = [
3029
3370
  structured_output_mode=StructuredOutputMode.json_instructions,
3030
3371
  supports_data_gen=True,
3031
3372
  r1_openrouter_options=True,
3373
+ supports_function_calling=False,
3032
3374
  ),
3033
3375
  KilnModelProvider(
3034
3376
  name=ModelProviderName.siliconflow_cn,
3035
3377
  model_id="baidu/ERNIE-4.5-300B-A47B",
3036
3378
  structured_output_mode=StructuredOutputMode.json_schema,
3037
3379
  supports_data_gen=True,
3380
+ supports_function_calling=False,
3038
3381
  ),
3039
3382
  ],
3040
3383
  ),
@@ -3055,6 +3398,7 @@ built_in_models: List[KilnModel] = [
3055
3398
  siliconflow_enable_thinking=True,
3056
3399
  reasoning_optional_for_structured_output=True,
3057
3400
  supports_data_gen=False,
3401
+ supports_function_calling=False,
3058
3402
  ),
3059
3403
  ],
3060
3404
  ),
@@ -3078,6 +3422,7 @@ built_in_models: List[KilnModel] = [
3078
3422
  structured_output_mode=StructuredOutputMode.json_instructions,
3079
3423
  reasoning_capable=True,
3080
3424
  supports_data_gen=True,
3425
+ supports_function_calling=False,
3081
3426
  ),
3082
3427
  ],
3083
3428
  ),
@@ -3093,6 +3438,7 @@ built_in_models: List[KilnModel] = [
3093
3438
  structured_output_mode=StructuredOutputMode.json_instructions,
3094
3439
  reasoning_capable=True,
3095
3440
  supports_data_gen=True,
3441
+ supports_function_calling=False,
3096
3442
  ),
3097
3443
  ],
3098
3444
  ),