duckdb 0.7.2-dev2820.0 → 0.7.2-dev2867.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. package/package.json +1 -1
  2. package/src/duckdb/extension/icu/icu-datepart.cpp +51 -1
  3. package/src/duckdb/extension/parquet/parquet-extension.cpp +5 -4
  4. package/src/duckdb/src/common/local_file_system.cpp +1 -3
  5. package/src/duckdb/src/common/multi_file_reader.cpp +11 -8
  6. package/src/duckdb/src/execution/operator/schema/physical_create_type.cpp +11 -2
  7. package/src/duckdb/src/function/table/read_csv.cpp +7 -4
  8. package/src/duckdb/src/function/table/version/pragma_version.cpp +2 -2
  9. package/src/duckdb/src/include/duckdb/common/multi_file_reader.hpp +5 -4
  10. package/src/duckdb/src/include/duckdb/execution/operator/schema/physical_create_type.hpp +4 -0
  11. package/src/duckdb/src/include/duckdb/main/database.hpp +1 -0
  12. package/src/duckdb/src/include/duckdb/main/database_manager.hpp +3 -0
  13. package/src/duckdb/src/include/duckdb/main/extension_helper.hpp +0 -2
  14. package/src/duckdb/src/include/duckdb/parser/tableref/pivotref.hpp +3 -0
  15. package/src/duckdb/src/include/duckdb/parser/transformer.hpp +5 -1
  16. package/src/duckdb/src/main/attached_database.cpp +5 -3
  17. package/src/duckdb/src/main/database.cpp +34 -37
  18. package/src/duckdb/src/main/extension/extension_load.cpp +13 -34
  19. package/src/duckdb/src/parser/transform/statement/transform_create_function.cpp +1 -4
  20. package/src/duckdb/src/parser/transform/statement/transform_create_view.cpp +2 -4
  21. package/src/duckdb/src/parser/transform/statement/transform_pivot_stmt.cpp +43 -24
  22. package/src/duckdb/src/parser/transform/tableref/transform_pivot.cpp +3 -0
  23. package/src/duckdb/src/planner/binder/statement/bind_create.cpp +17 -28
  24. package/src/duckdb/third_party/fmt/format.cc +0 -5
  25. package/src/duckdb/third_party/fmt/include/fmt/core.h +10 -12
  26. package/src/duckdb/third_party/fmt/include/fmt/format-inl.h +2 -33
  27. package/src/duckdb/third_party/fmt/include/fmt/format.h +61 -24
  28. package/src/duckdb/third_party/fmt/include/fmt/printf.h +15 -1
  29. package/src/duckdb/third_party/libpg_query/include/nodes/parsenodes.hpp +1 -0
  30. package/src/duckdb/third_party/libpg_query/src_backend_parser_gram.cpp +10735 -10674
package/package.json CHANGED
@@ -2,7 +2,7 @@
2
2
  "name": "duckdb",
3
3
  "main": "./lib/duckdb.js",
4
4
  "types": "./lib/duckdb.d.ts",
5
- "version": "0.7.2-dev2820.0",
5
+ "version": "0.7.2-dev2867.0",
6
6
  "description": "DuckDB node.js API",
7
7
  "gypfile": true,
8
8
  "dependencies": {
@@ -203,6 +203,14 @@ struct ICUDatePart : public ICUDateFunc {
203
203
  return Date::EpochToDate(ExtractEpoch(calendar, 0));
204
204
  }
205
205
 
206
+ static string_t MonthName(icu::Calendar *calendar, const uint64_t micros) {
207
+ return Date::MONTH_NAMES[ExtractMonth(calendar, micros) - 1];
208
+ }
209
+
210
+ static string_t DayName(icu::Calendar *calendar, const uint64_t micros) {
211
+ return Date::DAY_NAMES[ExtractDayOfWeek(calendar, micros)];
212
+ }
213
+
206
214
  template <typename RESULT_TYPE>
207
215
  struct BindAdapterData : public BindData {
208
216
  using result_t = RESULT_TYPE;
@@ -246,7 +254,7 @@ struct ICUDatePart : public ICUDateFunc {
246
254
  return info.adapters[0](calendar, micros);
247
255
  } else {
248
256
  mask.SetInvalid(idx);
249
- return RESULT_TYPE(0);
257
+ return RESULT_TYPE();
250
258
  }
251
259
  });
252
260
  }
@@ -482,6 +490,44 @@ struct ICUDatePart : public ICUDateFunc {
482
490
  CreateScalarFunctionInfo func_info(set);
483
491
  catalog.AddFunction(context, func_info);
484
492
  }
493
+
494
+ static unique_ptr<FunctionData> BindMonthName(ClientContext &context, ScalarFunction &bound_function,
495
+ vector<unique_ptr<Expression>> &arguments) {
496
+ using data_t = BindAdapterData<string_t>;
497
+ return BindAdapter<data_t>(context, bound_function, arguments, MonthName);
498
+ }
499
+
500
+ template <typename INPUT_TYPE>
501
+ static ScalarFunction GetMonthNameFunction(const LogicalType &temporal_type) {
502
+ return ScalarFunction({temporal_type}, LogicalType::VARCHAR, UnaryTimestampFunction<INPUT_TYPE, string_t>,
503
+ BindMonthName);
504
+ }
505
+ static void AddMonthNameFunctions(const string &name, ClientContext &context) {
506
+ auto &catalog = Catalog::GetSystemCatalog(context);
507
+ ScalarFunctionSet set(name);
508
+ set.AddFunction(GetMonthNameFunction<timestamp_t>(LogicalType::TIMESTAMP_TZ));
509
+ CreateScalarFunctionInfo func_info(set);
510
+ catalog.AddFunction(context, func_info);
511
+ }
512
+
513
+ static unique_ptr<FunctionData> BindDayName(ClientContext &context, ScalarFunction &bound_function,
514
+ vector<unique_ptr<Expression>> &arguments) {
515
+ using data_t = BindAdapterData<string_t>;
516
+ return BindAdapter<data_t>(context, bound_function, arguments, DayName);
517
+ }
518
+
519
+ template <typename INPUT_TYPE>
520
+ static ScalarFunction GetDayNameFunction(const LogicalType &temporal_type) {
521
+ return ScalarFunction({temporal_type}, LogicalType::VARCHAR, UnaryTimestampFunction<INPUT_TYPE, string_t>,
522
+ BindDayName);
523
+ }
524
+ static void AddDayNameFunctions(const string &name, ClientContext &context) {
525
+ auto &catalog = Catalog::GetSystemCatalog(context);
526
+ ScalarFunctionSet set(name);
527
+ set.AddFunction(GetDayNameFunction<timestamp_t>(LogicalType::TIMESTAMP_TZ));
528
+ CreateScalarFunctionInfo func_info(set);
529
+ catalog.AddFunction(context, func_info);
530
+ }
485
531
  };
486
532
 
487
533
  void RegisterICUDatePartFunctions(ClientContext &context) {
@@ -520,6 +566,10 @@ void RegisterICUDatePartFunctions(ClientContext &context) {
520
566
  // register the last_day function
521
567
  ICUDatePart::AddLastDayFunctions("last_day", context);
522
568
 
569
+ // register the dayname/monthname functions
570
+ ICUDatePart::AddMonthNameFunctions("monthname", context);
571
+ ICUDatePart::AddDayNameFunctions("dayname", context);
572
+
523
573
  // finally the actual date_part function
524
574
  ICUDatePart::AddDatePartFunctions("date_part", context);
525
575
  ICUDatePart::AddDatePartFunctions("datepart", context);
@@ -369,7 +369,8 @@ public:
369
369
  continue;
370
370
  }
371
371
  MultiFileReader::InitializeReader(*reader, bind_data.parquet_options.file_options, bind_data.reader_bind,
372
- bind_data.types, bind_data.names, input.column_ids, input.filters);
372
+ bind_data.types, bind_data.names, input.column_ids, input.filters,
373
+ bind_data.files[0]);
373
374
  }
374
375
 
375
376
  result->column_ids = input.column_ids;
@@ -561,9 +562,9 @@ public:
561
562
  shared_ptr<ParquetReader> reader;
562
563
  try {
563
564
  reader = make_shared<ParquetReader>(context, file, pq_options);
564
- MultiFileReader::InitializeReader(*reader, bind_data.parquet_options.file_options,
565
- bind_data.reader_bind, bind_data.types, bind_data.names,
566
- parallel_state.column_ids, parallel_state.filters);
565
+ MultiFileReader::InitializeReader(
566
+ *reader, bind_data.parquet_options.file_options, bind_data.reader_bind, bind_data.types,
567
+ bind_data.names, parallel_state.column_ids, parallel_state.filters, bind_data.files.front());
567
568
  } catch (...) {
568
569
  parallel_lock.lock();
569
570
  parallel_state.error_opening_file = true;
@@ -977,7 +977,6 @@ vector<string> LocalFileSystem::Glob(const string &path, FileOpener *opener) {
977
977
  throw IOException("Cannot use multiple \'**\' in one path");
978
978
  }
979
979
 
980
- bool recursive_search = false;
981
980
  for (idx_t i = absolute_path ? 1 : 0; i < splits.size(); i++) {
982
981
  bool is_last_chunk = i + 1 == splits.size();
983
982
  bool has_glob = HasGlob(splits[i]);
@@ -989,7 +988,7 @@ vector<string> LocalFileSystem::Glob(const string &path, FileOpener *opener) {
989
988
  if (previous_directories.empty()) {
990
989
  result.push_back(splits[i]);
991
990
  } else {
992
- if (recursive_search && is_last_chunk) {
991
+ if (is_last_chunk) {
993
992
  for (auto &prev_directory : previous_directories) {
994
993
  const string filename = JoinPath(prev_directory, splits[i]);
995
994
  if (FileExists(filename) || DirectoryExists(filename)) {
@@ -1004,7 +1003,6 @@ vector<string> LocalFileSystem::Glob(const string &path, FileOpener *opener) {
1004
1003
  }
1005
1004
  } else {
1006
1005
  if (IsCrawl(splits[i])) {
1007
- recursive_search = true;
1008
1006
  if (!is_last_chunk) {
1009
1007
  result = previous_directories;
1010
1008
  }
@@ -192,7 +192,7 @@ void MultiFileReader::FinalizeBind(const MultiFileReaderOptions &file_options, c
192
192
  void MultiFileReader::CreateNameMapping(const string &file_name, const vector<LogicalType> &local_types,
193
193
  const vector<string> &local_names, const vector<LogicalType> &global_types,
194
194
  const vector<string> &global_names, const vector<column_t> &global_column_ids,
195
- MultiFileReaderData &reader_data) {
195
+ MultiFileReaderData &reader_data, const string &initial_file) {
196
196
  D_ASSERT(global_types.size() == global_names.size());
197
197
  D_ASSERT(local_types.size() == local_names.size());
198
198
  // we have expected types: create a map of name -> column index
@@ -229,11 +229,12 @@ void MultiFileReader::CreateNameMapping(const string &file_name, const vector<Lo
229
229
  }
230
230
  candidate_names += local_name;
231
231
  }
232
- throw IOException(StringUtil::Format(
233
- "Failed to read file \"%s\": schema mismatch in glob: column \"%s\" was read from "
234
- "the original file, but could not be found in file \"%s\".\nCandidate names: %s\nIf you are trying to "
235
- "read files with different schemas, try setting union_by_name=True",
236
- file_name, global_name, file_name, candidate_names));
232
+ throw IOException(
233
+ StringUtil::Format("Failed to read file \"%s\": schema mismatch in glob: column \"%s\" was read from "
234
+ "the original file \"%s\", but could not be found in file \"%s\".\nCandidate names: "
235
+ "%s\nIf you are trying to "
236
+ "read files with different schemas, try setting union_by_name=True",
237
+ file_name, global_name, initial_file, file_name, candidate_names));
237
238
  }
238
239
  // we found the column in the local file - check if the types are the same
239
240
  auto local_id = entry->second;
@@ -254,8 +255,10 @@ void MultiFileReader::CreateNameMapping(const string &file_name, const vector<Lo
254
255
  void MultiFileReader::CreateMapping(const string &file_name, const vector<LogicalType> &local_types,
255
256
  const vector<string> &local_names, const vector<LogicalType> &global_types,
256
257
  const vector<string> &global_names, const vector<column_t> &global_column_ids,
257
- optional_ptr<TableFilterSet> filters, MultiFileReaderData &reader_data) {
258
- CreateNameMapping(file_name, local_types, local_names, global_types, global_names, global_column_ids, reader_data);
258
+ optional_ptr<TableFilterSet> filters, MultiFileReaderData &reader_data,
259
+ const string &initial_file) {
260
+ CreateNameMapping(file_name, local_types, local_names, global_types, global_names, global_column_ids, reader_data,
261
+ initial_file);
259
262
  if (filters) {
260
263
  reader_data.filter_map.resize(global_types.size());
261
264
  for (idx_t c = 0; c < reader_data.column_mapping.size(); c++) {
@@ -3,6 +3,7 @@
3
3
  #include "duckdb/catalog/catalog.hpp"
4
4
  #include "duckdb/common/types/column/column_data_collection.hpp"
5
5
  #include "duckdb/catalog/catalog_entry/type_catalog_entry.hpp"
6
+ #include "duckdb/common/string_map_set.hpp"
6
7
 
7
8
  namespace duckdb {
8
9
 
@@ -21,6 +22,7 @@ public:
21
22
  Vector result;
22
23
  idx_t size = 0;
23
24
  idx_t capacity = STANDARD_VECTOR_SIZE;
25
+ string_set_t found_strings;
24
26
  };
25
27
 
26
28
  unique_ptr<GlobalSinkState> PhysicalCreateType::GetGlobalSinkState(ClientContext &context) const {
@@ -52,8 +54,15 @@ SinkResultType PhysicalCreateType::Sink(ExecutionContext &context, GlobalSinkSta
52
54
  if (!sdata.validity.RowIsValid(idx)) {
53
55
  throw InvalidInputException("Attempted to create ENUM type with NULL value!");
54
56
  }
55
- result_ptr[gstate.size++] =
56
- StringVector::AddStringOrBlob(gstate.result, src_ptr[idx].GetData(), src_ptr[idx].GetSize());
57
+ auto str = src_ptr[idx];
58
+ auto entry = gstate.found_strings.find(src_ptr[idx]);
59
+ if (entry != gstate.found_strings.end()) {
60
+ // entry was already found - skip
61
+ continue;
62
+ }
63
+ auto owned_string = StringVector::AddStringOrBlob(gstate.result, str.GetData(), str.GetSize());
64
+ gstate.found_strings.insert(owned_string);
65
+ result_ptr[gstate.size++] = owned_string;
57
66
  }
58
67
  return SinkResultType::NEED_MORE_INPUT;
59
68
  }
@@ -497,7 +497,8 @@ bool ParallelCSVGlobalState::Next(ClientContext &context, const ReadCSVData &bin
497
497
  }
498
498
  reader->options.file_path = current_file_path;
499
499
  MultiFileReader::InitializeReader(*reader, bind_data.options.file_options, bind_data.reader_bind,
500
- bind_data.return_types, bind_data.return_names, column_ids, nullptr);
500
+ bind_data.return_types, bind_data.return_names, column_ids, nullptr,
501
+ bind_data.files.front());
501
502
  } else {
502
503
  // update the current reader
503
504
  reader->SetBufferRead(std::move(result));
@@ -660,7 +661,8 @@ struct SingleThreadedCSVState : public GlobalTableFunctionState {
660
661
  result->names = csv_names;
661
662
  }
662
663
  MultiFileReader::InitializeReader(*result, bind_data.options.file_options, bind_data.reader_bind,
663
- bind_data.return_types, bind_data.return_names, column_ids, nullptr);
664
+ bind_data.return_types, bind_data.return_names, column_ids, nullptr,
665
+ bind_data.files.front());
664
666
  }
665
667
  total_size = result->file_handle->FileSize();
666
668
  return result;
@@ -707,14 +709,15 @@ static unique_ptr<GlobalTableFunctionState> SingleThreadedCSVInit(ClientContext
707
709
  }
708
710
  }
709
711
  MultiFileReader::InitializeReader(*result->initial_reader, bind_data.options.file_options, bind_data.reader_bind,
710
- bind_data.return_types, bind_data.return_names, input.column_ids, input.filters);
712
+ bind_data.return_types, bind_data.return_names, input.column_ids, input.filters,
713
+ bind_data.files.front());
711
714
  for (auto &reader : bind_data.union_readers) {
712
715
  if (!reader) {
713
716
  continue;
714
717
  }
715
718
  MultiFileReader::InitializeReader(*reader, bind_data.options.file_options, bind_data.reader_bind,
716
719
  bind_data.return_types, bind_data.return_names, input.column_ids,
717
- input.filters);
720
+ input.filters, bind_data.files.front());
718
721
  }
719
722
  result->column_ids = input.column_ids;
720
723
 
@@ -1,8 +1,8 @@
1
1
  #ifndef DUCKDB_VERSION
2
- #define DUCKDB_VERSION "0.7.2-dev2820"
2
+ #define DUCKDB_VERSION "0.7.2-dev2867"
3
3
  #endif
4
4
  #ifndef DUCKDB_SOURCE_ID
5
- #define DUCKDB_SOURCE_ID "fc797c18cf"
5
+ #define DUCKDB_SOURCE_ID "aa20f173b1"
6
6
  #endif
7
7
  #include "duckdb/function/table/system_functions.hpp"
8
8
  #include "duckdb/main/database.hpp"
@@ -107,7 +107,8 @@ struct MultiFileReader {
107
107
  DUCKDB_API static void CreateMapping(const string &file_name, const vector<LogicalType> &local_types,
108
108
  const vector<string> &local_names, const vector<LogicalType> &global_types,
109
109
  const vector<string> &global_names, const vector<column_t> &global_column_ids,
110
- optional_ptr<TableFilterSet> filters, MultiFileReaderData &reader_data);
110
+ optional_ptr<TableFilterSet> filters, MultiFileReaderData &reader_data,
111
+ const string &initial_file);
111
112
  //! Finalize the reading of a chunk - applying any constants that are required
112
113
  DUCKDB_API static void FinalizeChunk(const MultiFileReaderBindData &bind_data,
113
114
  const MultiFileReaderData &reader_data, DataChunk &chunk);
@@ -156,11 +157,11 @@ struct MultiFileReader {
156
157
  static void InitializeReader(READER_CLASS &reader, const MultiFileReaderOptions &options,
157
158
  const MultiFileReaderBindData &bind_data, const vector<LogicalType> &global_types,
158
159
  const vector<string> &global_names, const vector<column_t> &global_column_ids,
159
- optional_ptr<TableFilterSet> table_filters) {
160
+ optional_ptr<TableFilterSet> table_filters, const string &initial_file) {
160
161
  FinalizeBind(options, bind_data, reader.GetFileName(), reader.GetNames(), global_types, global_names,
161
162
  global_column_ids, reader.reader_data);
162
163
  CreateMapping(reader.GetFileName(), reader.GetTypes(), reader.GetNames(), global_types, global_names,
163
- global_column_ids, table_filters, reader.reader_data);
164
+ global_column_ids, table_filters, reader.reader_data, initial_file);
164
165
  reader.reader_data.filters = table_filters;
165
166
  }
166
167
 
@@ -193,7 +194,7 @@ private:
193
194
  static void CreateNameMapping(const string &file_name, const vector<LogicalType> &local_types,
194
195
  const vector<string> &local_names, const vector<LogicalType> &global_types,
195
196
  const vector<string> &global_names, const vector<column_t> &global_column_ids,
196
- MultiFileReaderData &reader_data);
197
+ MultiFileReaderData &reader_data, const string &initial_file);
197
198
  };
198
199
 
199
200
  } // namespace duckdb
@@ -47,6 +47,10 @@ public:
47
47
  bool ParallelSink() const override {
48
48
  return false;
49
49
  }
50
+
51
+ bool SinkOrderDependent() const override {
52
+ return true;
53
+ }
50
54
  };
51
55
 
52
56
  } // namespace duckdb
@@ -60,6 +60,7 @@ public:
60
60
 
61
61
  private:
62
62
  void Initialize(const char *path, DBConfig *config);
63
+ void CreateDatabase(const string &database_type);
63
64
 
64
65
  void Configure(DBConfig &config);
65
66
 
@@ -57,6 +57,9 @@ public:
57
57
  idx_t ModifyCatalog() {
58
58
  return catalog_version++;
59
59
  }
60
+ bool HasDefaultDatabase() {
61
+ return !default_database.empty();
62
+ }
60
63
 
61
64
  private:
62
65
  //! The system database is a special database that holds system entries (e.g. functions)
@@ -56,8 +56,6 @@ public:
56
56
 
57
57
  static const vector<string> GetPublicKeys();
58
58
 
59
- static void StorageInit(string &extension, DBConfig &config);
60
-
61
59
  // Returns extension name, or empty string if not a replacement open path
62
60
  static string ExtractExtensionPrefixFromPath(const string &path);
63
61
 
@@ -9,6 +9,7 @@
9
9
  #pragma once
10
10
 
11
11
  #include "duckdb/parser/tableref.hpp"
12
+ #include "duckdb/parser/query_node/select_node.hpp"
12
13
 
13
14
  namespace duckdb {
14
15
 
@@ -43,6 +44,8 @@ struct PivotColumn {
43
44
  vector<PivotColumnEntry> entries;
44
45
  //! The enum to read pivot values from (if any)
45
46
  string pivot_enum;
47
+ //! Subquery (if any) - used during transform only
48
+ unique_ptr<QueryNode> subquery;
46
49
 
47
50
  string ToString() const;
48
51
  bool Equals(const PivotColumn &other) const;
@@ -45,6 +45,7 @@ class Transformer {
45
45
  string enum_name;
46
46
  unique_ptr<SelectNode> base;
47
47
  unique_ptr<ParsedExpression> column;
48
+ unique_ptr<QueryNode> subquery;
48
49
  };
49
50
 
50
51
  public:
@@ -85,10 +86,13 @@ private:
85
86
  bool GetNamedParam(const string &name, int32_t &index);
86
87
  bool HasNamedParameters() const;
87
88
 
88
- void AddPivotEntry(string enum_name, unique_ptr<SelectNode> source, unique_ptr<ParsedExpression> column);
89
+ void AddPivotEntry(string enum_name, unique_ptr<SelectNode> source, unique_ptr<ParsedExpression> column,
90
+ unique_ptr<QueryNode> subquery);
89
91
  unique_ptr<SQLStatement> GenerateCreateEnumStmt(unique_ptr<CreatePivotEntry> entry);
90
92
  bool HasPivotEntries();
91
93
  idx_t PivotEntryCount();
94
+ vector<unique_ptr<CreatePivotEntry>> &GetPivotEntries();
95
+ void PivotEntryCheck(const string &type);
92
96
  void ExtractCTEsRecursive(CommonTableExpressionMap &cte_map);
93
97
 
94
98
  private:
@@ -1,9 +1,11 @@
1
1
  #include "duckdb/main/attached_database.hpp"
2
- #include "duckdb/storage/storage_manager.hpp"
3
- #include "duckdb/transaction/duck_transaction_manager.hpp"
4
- #include "duckdb/common/file_system.hpp"
2
+
5
3
  #include "duckdb/catalog/duck_catalog.hpp"
4
+ #include "duckdb/common/file_system.hpp"
5
+ #include "duckdb/parser/parsed_data/attach_info.hpp"
6
6
  #include "duckdb/storage/storage_extension.hpp"
7
+ #include "duckdb/storage/storage_manager.hpp"
8
+ #include "duckdb/transaction/duck_transaction_manager.hpp"
7
9
 
8
10
  namespace duckdb {
9
11
 
@@ -149,8 +149,9 @@ duckdb::unique_ptr<AttachedDatabase> DatabaseInstance::CreateAttachedDatabase(At
149
149
  AccessMode access_mode) {
150
150
  duckdb::unique_ptr<AttachedDatabase> attached_database;
151
151
  if (!type.empty()) {
152
- // find the storage extensionon database
153
- auto entry = config.storage_extensions.find(type);
152
+ // find the storage extension
153
+ auto extension_name = ExtensionHelper::ApplyExtensionAlias(type);
154
+ auto entry = config.storage_extensions.find(extension_name);
154
155
  if (entry == config.storage_extensions.end()) {
155
156
  throw BinderException("Unrecognized storage type \"%s\"", type);
156
157
  }
@@ -171,6 +172,33 @@ duckdb::unique_ptr<AttachedDatabase> DatabaseInstance::CreateAttachedDatabase(At
171
172
  return attached_database;
172
173
  }
173
174
 
175
+ void DatabaseInstance::CreateDatabase(const string &database_type) {
176
+ AttachInfo info;
177
+ info.name = AttachedDatabase::ExtractDatabaseName(config.options.database_path);
178
+ info.path = config.options.database_path;
179
+
180
+ auto attached_database = CreateAttachedDatabase(info, database_type, config.options.access_mode);
181
+ auto initial_database = attached_database.get();
182
+ {
183
+ Connection con(*this);
184
+ con.BeginTransaction();
185
+ db_manager->AddDatabase(*con.context, std::move(attached_database));
186
+ con.Commit();
187
+ }
188
+
189
+ // initialize the database
190
+ initial_database->Initialize();
191
+ }
192
+
193
+ void ThrowExtensionSetUnrecognizedOptions(const unordered_map<string, Value> &unrecognized_options) {
194
+ auto unrecognized_options_iter = unrecognized_options.begin();
195
+ string unrecognized_option_keys = unrecognized_options_iter->first;
196
+ for (; unrecognized_options_iter == unrecognized_options.end(); ++unrecognized_options_iter) {
197
+ unrecognized_option_keys = "," + unrecognized_options_iter->first;
198
+ }
199
+ throw InvalidInputException("Unrecognized configuration property \"%s\"", unrecognized_option_keys);
200
+ }
201
+
174
202
  void DatabaseInstance::Initialize(const char *database_path, DBConfig *user_config) {
175
203
  DBConfig default_config;
176
204
  DBConfig *config_ptr = &default_config;
@@ -208,27 +236,9 @@ void DatabaseInstance::Initialize(const char *database_path, DBConfig *user_conf
208
236
 
209
237
  // check if we are opening a standard DuckDB database or an extension database
210
238
  auto database_type = ExtractDatabaseType(config.options.database_path);
211
- if (!database_type.empty()) {
212
- // we are opening an extension database, run storage_init
213
- ExtensionHelper::StorageInit(database_type, config);
214
- }
215
- AttachInfo info;
216
- info.name = AttachedDatabase::ExtractDatabaseName(config.options.database_path);
217
- info.path = config.options.database_path;
218
-
219
- auto attached_database = CreateAttachedDatabase(info, database_type, config.options.access_mode);
220
- auto initial_database = attached_database.get();
221
- {
222
- Connection con(*this);
223
- con.BeginTransaction();
224
- db_manager->AddDatabase(*con.context, std::move(attached_database));
225
- con.Commit();
226
- }
227
239
 
228
240
  // initialize the system catalog
229
241
  db_manager->InitializeSystemCatalog();
230
- // initialize the database
231
- initial_database->Initialize();
232
242
 
233
243
  if (!database_type.empty()) {
234
244
  // if we are opening an extension database - load the extension
@@ -236,24 +246,11 @@ void DatabaseInstance::Initialize(const char *database_path, DBConfig *user_conf
236
246
  }
237
247
 
238
248
  if (!config.options.unrecognized_options.empty()) {
239
- // check if all unrecognized options can be handled by the loaded extension(s)
240
- for (auto &unrecognized_option : config.options.unrecognized_options) {
241
- auto entry = config.extension_parameters.find(unrecognized_option.first);
242
- if (entry == config.extension_parameters.end()) {
243
- throw InvalidInputException("Unrecognized configuration property \"%s\"", unrecognized_option.first);
244
- }
245
- }
249
+ ThrowExtensionSetUnrecognizedOptions(config.options.unrecognized_options);
250
+ }
246
251
 
247
- // if so - set the options
248
- Connection con(*this);
249
- con.BeginTransaction();
250
- for (auto &unrecognized_option : config.options.unrecognized_options) {
251
- auto entry = config.extension_parameters.find(unrecognized_option.first);
252
- D_ASSERT(entry != config.extension_parameters.end());
253
- PhysicalSet::SetExtensionVariable(*con.context, entry->second, unrecognized_option.first, SetScope::GLOBAL,
254
- unrecognized_option.second);
255
- }
256
- con.Commit();
252
+ if (!db_manager->HasDefaultDatabase()) {
253
+ CreateDatabase(database_type);
257
254
  }
258
255
 
259
256
  // only increase thread count after storage init because we get races on catalog otherwise
@@ -19,7 +19,7 @@ namespace duckdb {
19
19
  //===--------------------------------------------------------------------===//
20
20
  typedef void (*ext_init_fun_t)(DatabaseInstance &);
21
21
  typedef const char *(*ext_version_fun_t)(void);
22
- typedef void (*ext_storage_init_t)(DBConfig &);
22
+ typedef bool (*ext_is_storage_t)(void);
23
23
 
24
24
  template <class T>
25
25
  static T LoadFunctionFromDLL(void *dll, const string &function_name, const string &filename) {
@@ -201,7 +201,18 @@ ExtensionInitResult ExtensionHelper::InitialLoad(DBConfig &config, FileOpener *o
201
201
  string error;
202
202
  ExtensionInitResult result;
203
203
  if (!TryInitialLoad(config, opener, extension, result, error)) {
204
- throw IOException(error);
204
+ if (!ExtensionHelper::AllowAutoInstall(extension)) {
205
+ throw IOException(error);
206
+ }
207
+ // the extension load failed - try installing the extension
208
+ if (!config.file_system) {
209
+ throw InternalException("Attempting to install an extension without a file system");
210
+ }
211
+ ExtensionHelper::InstallExtension(config, *config.file_system, extension, false);
212
+ // try loading again
213
+ if (!TryInitialLoad(config, nullptr, extension, result, error)) {
214
+ throw IOException(error);
215
+ }
205
216
  }
206
217
  return result;
207
218
  }
@@ -251,38 +262,6 @@ void ExtensionHelper::LoadExternalExtension(ClientContext &context, const string
251
262
  LoadExternalExtension(DatabaseInstance::GetDatabase(context), FileSystem::GetFileOpener(context), extension);
252
263
  }
253
264
 
254
- void ExtensionHelper::StorageInit(string &extension, DBConfig &config) {
255
- extension = ExtensionHelper::ApplyExtensionAlias(extension);
256
- ExtensionInitResult res;
257
- string error;
258
- if (!TryInitialLoad(config, nullptr, extension, res, error)) {
259
- if (!ExtensionHelper::AllowAutoInstall(extension)) {
260
- throw IOException(error);
261
- }
262
- // the extension load failed - try installing the extension
263
- if (!config.file_system) {
264
- throw InternalException("Attempting to install an extension without a file system");
265
- }
266
- ExtensionHelper::InstallExtension(config, *config.file_system, extension, false);
267
- // try loading again
268
- if (!TryInitialLoad(config, nullptr, extension, res, error)) {
269
- throw IOException(error);
270
- }
271
- }
272
- auto storage_fun_name = res.basename + "_storage_init";
273
-
274
- ext_storage_init_t storage_init_fun;
275
- storage_init_fun = LoadFunctionFromDLL<ext_storage_init_t>(res.lib_hdl, storage_fun_name, res.filename);
276
-
277
- try {
278
- (*storage_init_fun)(config);
279
- } catch (std::exception &e) {
280
- throw InvalidInputException(
281
- "Storage initialization function \"%s\" from file \"%s\" threw an exception: \"%s\"", storage_fun_name,
282
- res.filename, e.what());
283
- }
284
- }
285
-
286
265
  string ExtensionHelper::ExtractExtensionPrefixFromPath(const string &path) {
287
266
  auto first_colon = path.find(':');
288
267
  if (first_colon == string::npos || first_colon < 2) { // needs to be at least two characters because windows c: ...
@@ -29,10 +29,7 @@ unique_ptr<CreateStatement> Transformer::TransformCreateFunction(duckdb_libpgque
29
29
  auto query_node = TransformSelect(stmt->query, true)->node->Copy();
30
30
  macro_func = make_uniq<TableMacroFunction>(std::move(query_node));
31
31
  }
32
- if (HasPivotEntries()) {
33
- throw ParserException("Cannot use PIVOT statement syntax in a macro. Use the SQL standard PIVOT syntax in the "
34
- "FROM clause instead.");
35
- }
32
+ PivotEntryCheck("macro");
36
33
 
37
34
  auto info =
38
35
  make_uniq<CreateMacroInfo>((stmt->function ? CatalogType::MACRO_ENTRY : CatalogType::TABLE_MACRO_ENTRY));
@@ -26,10 +26,8 @@ unique_ptr<CreateStatement> Transformer::TransformCreateView(duckdb_libpgquery::
26
26
  info->on_conflict = TransformOnConflict(stmt->onconflict);
27
27
 
28
28
  info->query = TransformSelect(stmt->query, false);
29
- if (HasPivotEntries()) {
30
- throw ParserException("Cannot use PIVOT statement syntax in a view. Use the SQL standard PIVOT syntax in the "
31
- "FROM clause instead.");
32
- }
29
+
30
+ PivotEntryCheck("view");
33
31
 
34
32
  if (stmt->aliases && stmt->aliases->length > 0) {
35
33
  for (auto c = stmt->aliases->head; c != nullptr; c = lnext(c)) {