duckdb 0.7.2-dev614.0 → 0.7.2-dev654.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/duckdb/src/catalog/catalog_entry/table_catalog_entry.cpp +3 -3
- package/src/duckdb/src/catalog/dependency_list.cpp +12 -0
- package/src/duckdb/src/common/string_util.cpp +4 -1
- package/src/duckdb/src/execution/operator/aggregate/physical_window.cpp +3 -0
- package/src/duckdb/src/execution/operator/helper/physical_vacuum.cpp +3 -0
- package/src/duckdb/src/execution/physical_plan/plan_comparison_join.cpp +3 -0
- package/src/duckdb/src/function/aggregate/distributive/bitstring_agg.cpp +22 -7
- package/src/duckdb/src/function/aggregate/distributive/first.cpp +1 -0
- package/src/duckdb/src/function/aggregate/holistic/approximate_quantile.cpp +5 -2
- package/src/duckdb/src/function/table/checkpoint.cpp +5 -1
- package/src/duckdb/src/function/table/system/duckdb_constraints.cpp +2 -2
- package/src/duckdb/src/function/table/version/pragma_version.cpp +2 -2
- package/src/duckdb/src/include/duckdb/catalog/dependency_list.hpp +3 -0
- package/src/duckdb/src/include/duckdb/parser/transformer.hpp +3 -0
- package/src/duckdb/src/include/duckdb/storage/statistics/distinct_statistics.hpp +2 -0
- package/src/duckdb/src/optimizer/pushdown/pushdown_aggregate.cpp +33 -5
- package/src/duckdb/src/optimizer/rule/move_constants.cpp +8 -2
- package/src/duckdb/src/parser/transform/expression/transform_function.cpp +16 -0
- package/src/duckdb/src/parser/transform/statement/transform_select_node.cpp +1 -2
- package/src/duckdb/src/planner/binder/query_node/bind_select_node.cpp +25 -13
- package/src/duckdb/src/planner/binder/statement/bind_copy.cpp +2 -2
- package/src/duckdb/src/planner/binder/statement/bind_create_table.cpp +7 -0
- package/src/duckdb/src/planner/binder/statement/bind_insert.cpp +10 -6
- package/src/duckdb/src/planner/binder/statement/bind_update.cpp +3 -1
- package/src/duckdb/src/planner/expression_binder/having_binder.cpp +3 -0
- package/src/duckdb/src/storage/statistics/column_statistics.cpp +1 -2
- package/src/duckdb/src/storage/statistics/distinct_statistics.cpp +4 -0
- package/src/duckdb/src/storage/table/row_group.cpp +6 -1
package/package.json
CHANGED
@@ -193,14 +193,14 @@ const vector<unique_ptr<Constraint>> &TableCatalogEntry::GetConstraints() {
|
|
193
193
|
}
|
194
194
|
|
195
195
|
DataTable &TableCatalogEntry::GetStorage() {
|
196
|
-
throw InternalException("Calling GetStorage on a TableCatalogEntry that is not a
|
196
|
+
throw InternalException("Calling GetStorage on a TableCatalogEntry that is not a DuckTableEntry");
|
197
197
|
}
|
198
198
|
|
199
199
|
DataTable *TableCatalogEntry::GetStoragePtr() {
|
200
|
-
throw InternalException("Calling GetStoragePtr on a TableCatalogEntry that is not a
|
200
|
+
throw InternalException("Calling GetStoragePtr on a TableCatalogEntry that is not a DuckTableEntry");
|
201
201
|
}
|
202
202
|
|
203
203
|
const vector<unique_ptr<BoundConstraint>> &TableCatalogEntry::GetBoundConstraints() {
|
204
|
-
throw InternalException("Calling GetBoundConstraints on a TableCatalogEntry that is not a
|
204
|
+
throw InternalException("Calling GetBoundConstraints on a TableCatalogEntry that is not a DuckTableEntry");
|
205
205
|
}
|
206
206
|
} // namespace duckdb
|
@@ -1,5 +1,6 @@
|
|
1
1
|
#include "duckdb/catalog/dependency_list.hpp"
|
2
2
|
#include "duckdb/catalog/catalog_entry.hpp"
|
3
|
+
#include "duckdb/catalog/catalog.hpp"
|
3
4
|
|
4
5
|
namespace duckdb {
|
5
6
|
|
@@ -10,4 +11,15 @@ void DependencyList::AddDependency(CatalogEntry *entry) {
|
|
10
11
|
set.insert(entry);
|
11
12
|
}
|
12
13
|
|
14
|
+
void DependencyList::VerifyDependencies(Catalog *catalog, const string &name) {
|
15
|
+
for (auto &dep : set) {
|
16
|
+
if (dep->catalog != catalog) {
|
17
|
+
throw DependencyException(
|
18
|
+
"Error adding dependency for object \"%s\" - dependency \"%s\" is in catalog "
|
19
|
+
"\"%s\", which does not match the catalog \"%s\".\nCross catalog dependencies are not supported.",
|
20
|
+
name, dep->name, dep->catalog->GetName(), catalog->GetName());
|
21
|
+
}
|
22
|
+
}
|
23
|
+
}
|
24
|
+
|
13
25
|
} // namespace duckdb
|
@@ -191,11 +191,14 @@ vector<string> StringUtil::Split(const string &input, const string &split) {
|
|
191
191
|
|
192
192
|
// Push the substring [last, next) on to splits
|
193
193
|
string substr = input.substr(last, next - last);
|
194
|
-
if (substr.empty()
|
194
|
+
if (!substr.empty()) {
|
195
195
|
splits.push_back(substr);
|
196
196
|
}
|
197
197
|
last = next + split_len;
|
198
198
|
}
|
199
|
+
if (splits.empty()) {
|
200
|
+
splits.push_back(input);
|
201
|
+
}
|
199
202
|
return splits;
|
200
203
|
}
|
201
204
|
|
@@ -1559,6 +1559,9 @@ TaskExecutionResult WindowMergeTask::ExecuteTask(TaskExecutionMode mode) {
|
|
1559
1559
|
size_t sorted = 0;
|
1560
1560
|
while (sorted < hash_groups.states.size()) {
|
1561
1561
|
// First check if there is an unfinished task for this thread
|
1562
|
+
if (executor.HasError()) {
|
1563
|
+
return TaskExecutionResult::TASK_ERROR;
|
1564
|
+
}
|
1562
1565
|
if (!local_state.TaskFinished()) {
|
1563
1566
|
local_state.ExecuteTask();
|
1564
1567
|
continue;
|
@@ -49,6 +49,9 @@ SinkResultType PhysicalVacuum::Sink(ExecutionContext &context, GlobalSinkState &
|
|
49
49
|
D_ASSERT(lstate.column_distinct_stats.size() == info->column_id_map.size());
|
50
50
|
|
51
51
|
for (idx_t col_idx = 0; col_idx < input.data.size(); col_idx++) {
|
52
|
+
if (!DistinctStatistics::TypeIsSupported(input.data[col_idx].GetType())) {
|
53
|
+
continue;
|
54
|
+
}
|
52
55
|
lstate.column_distinct_stats[col_idx]->Update(input.data[col_idx], input.size(), false);
|
53
56
|
}
|
54
57
|
|
@@ -154,6 +154,9 @@ static void CanUseIndexJoin(TableScanBindData *tbl, Expression &expr, Index **re
|
|
154
154
|
|
155
155
|
void TransformIndexJoin(ClientContext &context, LogicalComparisonJoin &op, Index **left_index, Index **right_index,
|
156
156
|
PhysicalOperator *left, PhysicalOperator *right) {
|
157
|
+
if (op.type == LogicalOperatorType::LOGICAL_DELIM_JOIN) {
|
158
|
+
return;
|
159
|
+
}
|
157
160
|
// check if one of the tables has an index on column
|
158
161
|
if (op.join_type == JoinType::INNER && op.conditions.size() == 1) {
|
159
162
|
// check if one of the children are table scans and if they have an index in the join attribute
|
@@ -6,6 +6,7 @@
|
|
6
6
|
#include "duckdb/storage/statistics/base_statistics.hpp"
|
7
7
|
#include "duckdb/execution/expression_executor.hpp"
|
8
8
|
#include "duckdb/common/types/cast_helpers.hpp"
|
9
|
+
#include "duckdb/common/operator/subtract.hpp"
|
9
10
|
|
10
11
|
namespace duckdb {
|
11
12
|
|
@@ -93,7 +94,16 @@ struct BitStringAggOperation {
|
|
93
94
|
|
94
95
|
template <class INPUT_TYPE>
|
95
96
|
static idx_t GetRange(INPUT_TYPE min, INPUT_TYPE max) {
|
96
|
-
|
97
|
+
D_ASSERT(max >= min);
|
98
|
+
INPUT_TYPE result;
|
99
|
+
if (!TrySubtractOperator::Operation(max, min, result)) {
|
100
|
+
return NumericLimits<idx_t>::Maximum();
|
101
|
+
}
|
102
|
+
idx_t val(result);
|
103
|
+
if (val == NumericLimits<idx_t>::Maximum()) {
|
104
|
+
return val;
|
105
|
+
}
|
106
|
+
return val + 1;
|
97
107
|
}
|
98
108
|
|
99
109
|
template <class INPUT_TYPE, class STATE>
|
@@ -162,12 +172,15 @@ void BitStringAggOperation::Execute(BitAggState<hugeint_t> *state, hugeint_t inp
|
|
162
172
|
|
163
173
|
template <>
|
164
174
|
idx_t BitStringAggOperation::GetRange(hugeint_t min, hugeint_t max) {
|
165
|
-
|
166
|
-
if (
|
167
|
-
return
|
168
|
-
}
|
169
|
-
|
175
|
+
hugeint_t result;
|
176
|
+
if (!TrySubtractOperator::Operation(max, min, result)) {
|
177
|
+
return NumericLimits<idx_t>::Maximum();
|
178
|
+
}
|
179
|
+
idx_t range;
|
180
|
+
if (!Hugeint::TryCast(result + 1, range)) {
|
181
|
+
return NumericLimits<idx_t>::Maximum();
|
170
182
|
}
|
183
|
+
return range;
|
171
184
|
}
|
172
185
|
|
173
186
|
unique_ptr<BaseStatistics> BitstringPropagateStats(ClientContext &context, BoundAggregateExpression &expr,
|
@@ -185,8 +198,10 @@ unique_ptr<BaseStatistics> BitstringPropagateStats(ClientContext &context, Bound
|
|
185
198
|
|
186
199
|
unique_ptr<FunctionData> BindBitstringAgg(ClientContext &context, AggregateFunction &function,
|
187
200
|
vector<unique_ptr<Expression>> &arguments) {
|
188
|
-
|
189
201
|
if (arguments.size() == 3) {
|
202
|
+
if (!arguments[1]->IsFoldable() && !arguments[2]->IsFoldable()) {
|
203
|
+
throw BinderException("bitstring_agg requires a constant min and max argument");
|
204
|
+
}
|
190
205
|
auto min = ExpressionExecutor::EvaluateScalar(context, *arguments[1]);
|
191
206
|
auto max = ExpressionExecutor::EvaluateScalar(context, *arguments[2]);
|
192
207
|
Function::EraseArgument(function, arguments, 2);
|
@@ -70,11 +70,14 @@ struct ApproxQuantileOperation {
|
|
70
70
|
|
71
71
|
template <class INPUT_TYPE, class STATE, class OP>
|
72
72
|
static void Operation(STATE *state, AggregateInputData &, INPUT_TYPE *data, ValidityMask &mask, idx_t idx) {
|
73
|
+
auto val = Cast::template Operation<INPUT_TYPE, SAVE_TYPE>(data[idx]);
|
74
|
+
if (!Value::DoubleIsFinite(val)) {
|
75
|
+
return;
|
76
|
+
}
|
73
77
|
if (!state->h) {
|
74
78
|
state->h = new duckdb_tdigest::TDigest(100);
|
75
79
|
}
|
76
|
-
|
77
|
-
state->h->add(Cast::template Operation<INPUT_TYPE, SAVE_TYPE>(data[idx]));
|
80
|
+
state->h->add(val);
|
78
81
|
state->pos++;
|
79
82
|
}
|
80
83
|
|
@@ -32,7 +32,11 @@ static unique_ptr<FunctionData> CheckpointBind(ClientContext &context, TableFunc
|
|
32
32
|
AttachedDatabase *db;
|
33
33
|
auto &db_manager = DatabaseManager::Get(context);
|
34
34
|
if (!input.inputs.empty()) {
|
35
|
-
|
35
|
+
auto &db_name = StringValue::Get(input.inputs[0]);
|
36
|
+
db = db_manager.GetDatabase(context, db_name);
|
37
|
+
if (!db) {
|
38
|
+
throw BinderException("Database \"%s\" not found", db_name);
|
39
|
+
}
|
36
40
|
} else {
|
37
41
|
db = db_manager.GetDatabase(context, DatabaseManager::GetDefaultDatabase(context));
|
38
42
|
}
|
@@ -204,8 +204,8 @@ void DuckDBConstraintsFunction(ClientContext &context, TableFunctionInput &data_
|
|
204
204
|
const auto &bound_foreign_key = (const BoundForeignKeyConstraint &)bound_constraint;
|
205
205
|
const auto &info = bound_foreign_key.info;
|
206
206
|
// find the other table
|
207
|
-
auto table_entry =
|
208
|
-
|
207
|
+
auto table_entry = Catalog::GetEntry<TableCatalogEntry>(context, table.catalog->GetName(),
|
208
|
+
info.schema, info.table, true);
|
209
209
|
if (!table_entry) {
|
210
210
|
throw InternalException("dukdb_constraints: entry %s.%s referenced in foreign key not found",
|
211
211
|
info.schema, info.table);
|
@@ -1,8 +1,8 @@
|
|
1
1
|
#ifndef DUCKDB_VERSION
|
2
|
-
#define DUCKDB_VERSION "0.7.2-
|
2
|
+
#define DUCKDB_VERSION "0.7.2-dev654"
|
3
3
|
#endif
|
4
4
|
#ifndef DUCKDB_SOURCE_ID
|
5
|
-
#define DUCKDB_SOURCE_ID "
|
5
|
+
#define DUCKDB_SOURCE_ID "6525767cf1"
|
6
6
|
#endif
|
7
7
|
#include "duckdb/function/table/system_functions.hpp"
|
8
8
|
#include "duckdb/main/database.hpp"
|
@@ -12,6 +12,7 @@
|
|
12
12
|
#include "duckdb/common/unordered_set.hpp"
|
13
13
|
|
14
14
|
namespace duckdb {
|
15
|
+
class Catalog;
|
15
16
|
class CatalogEntry;
|
16
17
|
|
17
18
|
//! The DependencyList
|
@@ -21,6 +22,8 @@ class DependencyList {
|
|
21
22
|
public:
|
22
23
|
DUCKDB_API void AddDependency(CatalogEntry *entry);
|
23
24
|
|
25
|
+
DUCKDB_API void VerifyDependencies(Catalog *catalog, const string &name);
|
26
|
+
|
24
27
|
private:
|
25
28
|
unordered_set<CatalogEntry *> set;
|
26
29
|
};
|
@@ -71,8 +71,11 @@ private:
|
|
71
71
|
vector<unique_ptr<CreatePivotEntry>> pivot_entries;
|
72
72
|
//! Sets of stored CTEs, if any
|
73
73
|
vector<CommonTableExpressionMap *> stored_cte_map;
|
74
|
+
//! Whether or not we are currently binding a window definition
|
75
|
+
bool in_window_definition = false;
|
74
76
|
|
75
77
|
void Clear();
|
78
|
+
bool InWindowDefinition();
|
76
79
|
|
77
80
|
void SetParamCount(idx_t new_count) {
|
78
81
|
if (parent) {
|
@@ -46,6 +46,8 @@ public:
|
|
46
46
|
string ToString() const;
|
47
47
|
idx_t GetCount() const;
|
48
48
|
|
49
|
+
static bool TypeIsSupported(const LogicalType &type);
|
50
|
+
|
49
51
|
private:
|
50
52
|
//! For distinct statistics we sample the input to speed up insertions
|
51
53
|
static constexpr const double SAMPLE_RATE = 0.1;
|
@@ -9,6 +9,14 @@ namespace duckdb {
|
|
9
9
|
|
10
10
|
using Filter = FilterPushdown::Filter;
|
11
11
|
|
12
|
+
static void ExtractFilterBindings(Expression &expr, vector<ColumnBinding> &bindings) {
|
13
|
+
if (expr.type == ExpressionType::BOUND_COLUMN_REF) {
|
14
|
+
auto &colref = (BoundColumnRefExpression &)expr;
|
15
|
+
bindings.push_back(colref.binding);
|
16
|
+
}
|
17
|
+
ExpressionIterator::EnumerateChildren(expr, [&](Expression &child) { ExtractFilterBindings(child, bindings); });
|
18
|
+
}
|
19
|
+
|
12
20
|
static unique_ptr<Expression> ReplaceGroupBindings(LogicalAggregate &proj, unique_ptr<Expression> expr) {
|
13
21
|
if (expr->type == ExpressionType::BOUND_COLUMN_REF) {
|
14
22
|
auto &colref = (BoundColumnRefExpression &)*expr;
|
@@ -40,14 +48,34 @@ unique_ptr<LogicalOperator> FilterPushdown::PushdownAggregate(unique_ptr<Logical
|
|
40
48
|
// filter on GROUPINGS function: cannot pushdown
|
41
49
|
continue;
|
42
50
|
}
|
43
|
-
//
|
44
|
-
|
51
|
+
// no aggregate! we are filtering on a group
|
52
|
+
// we can only push this down if the filter is in all grouping sets
|
53
|
+
vector<ColumnBinding> bindings;
|
54
|
+
ExtractFilterBindings(*f.filter, bindings);
|
55
|
+
|
56
|
+
bool can_pushdown_filter = true;
|
57
|
+
if (aggr.grouping_sets.empty()) {
|
58
|
+
// empty grouping set - we cannot pushdown the filter
|
59
|
+
can_pushdown_filter = false;
|
60
|
+
}
|
45
61
|
for (auto &grp : aggr.grouping_sets) {
|
46
|
-
if
|
47
|
-
|
62
|
+
// check for each of the grouping sets if they contain all groups
|
63
|
+
if (bindings.empty()) {
|
64
|
+
// we can never push down empty grouping sets
|
65
|
+
can_pushdown_filter = false;
|
66
|
+
break;
|
67
|
+
}
|
68
|
+
for (auto &binding : bindings) {
|
69
|
+
if (grp.find(binding.column_index) == grp.end()) {
|
70
|
+
can_pushdown_filter = false;
|
71
|
+
break;
|
72
|
+
}
|
73
|
+
}
|
74
|
+
if (!can_pushdown_filter) {
|
75
|
+
break;
|
48
76
|
}
|
49
77
|
}
|
50
|
-
if (
|
78
|
+
if (!can_pushdown_filter) {
|
51
79
|
continue;
|
52
80
|
}
|
53
81
|
// no aggregate! we can push this down
|
@@ -73,7 +73,10 @@ unique_ptr<Expression> MoveConstantsRule::Apply(LogicalOperator &op, vector<Expr
|
|
73
73
|
}
|
74
74
|
auto result_value = Value::HUGEINT(outer_value);
|
75
75
|
if (!result_value.DefaultTryCastAs(constant_type)) {
|
76
|
-
// if the cast is not possible then
|
76
|
+
// if the cast is not possible then an equality comparison is not possible
|
77
|
+
if (comparison->type != ExpressionType::COMPARE_EQUAL) {
|
78
|
+
return nullptr;
|
79
|
+
}
|
77
80
|
return ExpressionRewriter::ConstantOrNull(std::move(arithmetic->children[arithmetic_child_index]),
|
78
81
|
Value::BOOLEAN(false));
|
79
82
|
}
|
@@ -86,7 +89,10 @@ unique_ptr<Expression> MoveConstantsRule::Apply(LogicalOperator &op, vector<Expr
|
|
86
89
|
}
|
87
90
|
auto result_value = Value::HUGEINT(inner_value);
|
88
91
|
if (!result_value.DefaultTryCastAs(constant_type)) {
|
89
|
-
// if the cast is not possible then
|
92
|
+
// if the cast is not possible then an equality comparison is not possible
|
93
|
+
if (comparison->type != ExpressionType::COMPARE_EQUAL) {
|
94
|
+
return nullptr;
|
95
|
+
}
|
90
96
|
return ExpressionRewriter::ConstantOrNull(std::move(arithmetic->children[arithmetic_child_index]),
|
91
97
|
Value::BOOLEAN(false));
|
92
98
|
}
|
@@ -105,6 +105,16 @@ bool Transformer::ExpressionIsEmptyStar(ParsedExpression &expr) {
|
|
105
105
|
return false;
|
106
106
|
}
|
107
107
|
|
108
|
+
bool Transformer::InWindowDefinition() {
|
109
|
+
if (in_window_definition) {
|
110
|
+
return true;
|
111
|
+
}
|
112
|
+
if (parent) {
|
113
|
+
return parent->InWindowDefinition();
|
114
|
+
}
|
115
|
+
return false;
|
116
|
+
}
|
117
|
+
|
108
118
|
unique_ptr<ParsedExpression> Transformer::TransformFuncCall(duckdb_libpgquery::PGFuncCall *root) {
|
109
119
|
auto name = root->funcname;
|
110
120
|
string catalog, schema, function_name;
|
@@ -139,6 +149,10 @@ unique_ptr<ParsedExpression> Transformer::TransformFuncCall(duckdb_libpgquery::P
|
|
139
149
|
|
140
150
|
auto lowercase_name = StringUtil::Lower(function_name);
|
141
151
|
if (root->over) {
|
152
|
+
if (InWindowDefinition()) {
|
153
|
+
throw ParserException("window functions are not allowed in window definitions");
|
154
|
+
}
|
155
|
+
|
142
156
|
const auto win_fun_type = WindowToExpressionType(lowercase_name);
|
143
157
|
if (win_fun_type == ExpressionType::INVALID) {
|
144
158
|
throw InternalException("Unknown/unsupported window function");
|
@@ -218,8 +232,10 @@ unique_ptr<ParsedExpression> Transformer::TransformFuncCall(duckdb_libpgquery::P
|
|
218
232
|
window_ref = it->second;
|
219
233
|
D_ASSERT(window_ref);
|
220
234
|
}
|
235
|
+
in_window_definition = true;
|
221
236
|
TransformWindowDef(window_ref, expr.get());
|
222
237
|
TransformWindowFrame(window_spec, expr.get());
|
238
|
+
in_window_definition = false;
|
223
239
|
expr->query_location = root->location;
|
224
240
|
return std::move(expr);
|
225
241
|
}
|
@@ -26,8 +26,7 @@ unique_ptr<QueryNode> Transformer::TransformSelectInternal(duckdb_libpgquery::PG
|
|
26
26
|
auto window_def = reinterpret_cast<duckdb_libpgquery::PGWindowDef *>(window_ele->data.ptr_value);
|
27
27
|
D_ASSERT(window_def);
|
28
28
|
D_ASSERT(window_def->name);
|
29
|
-
|
30
|
-
|
29
|
+
string window_name(window_def->name);
|
31
30
|
auto it = window_clauses.find(window_name);
|
32
31
|
if (it != window_clauses.end()) {
|
33
32
|
throw ParserException("window \"%s\" is already defined", window_name);
|
@@ -426,12 +426,16 @@ unique_ptr<BoundQueryNode> Binder::BindSelectNode(SelectNode &statement, unique_
|
|
426
426
|
// after that, we bind to the SELECT list
|
427
427
|
SelectBinder select_binder(*this, context, *result, info, alias_map);
|
428
428
|
vector<LogicalType> internal_sql_types;
|
429
|
+
vector<idx_t> group_by_all_indexes;
|
429
430
|
for (idx_t i = 0; i < statement.select_list.size(); i++) {
|
430
431
|
bool is_window = statement.select_list[i]->IsWindow();
|
431
432
|
idx_t unnest_count = result->unnests.size();
|
432
433
|
LogicalType result_type;
|
433
434
|
auto expr = select_binder.Bind(statement.select_list[i], &result_type);
|
434
|
-
|
435
|
+
bool is_original_column = i < result->column_count;
|
436
|
+
bool can_group_by_all =
|
437
|
+
statement.aggregate_handling == AggregateHandling::FORCE_AGGREGATES && is_original_column;
|
438
|
+
if (can_group_by_all && select_binder.HasBoundColumns()) {
|
435
439
|
if (select_binder.BoundAggregates()) {
|
436
440
|
throw BinderException("Cannot mix aggregates with non-aggregated columns!");
|
437
441
|
}
|
@@ -443,20 +447,25 @@ unique_ptr<BoundQueryNode> Binder::BindSelectNode(SelectNode &statement, unique_
|
|
443
447
|
}
|
444
448
|
// we are forcing aggregates, and the node has columns bound
|
445
449
|
// this entry becomes a group
|
446
|
-
|
447
|
-
expr->return_type, ColumnBinding(result->group_index, result->groups.group_expressions.size()));
|
448
|
-
result->groups.group_expressions.push_back(std::move(expr));
|
449
|
-
expr = std::move(group_ref);
|
450
|
+
group_by_all_indexes.push_back(i);
|
450
451
|
}
|
451
452
|
result->select_list.push_back(std::move(expr));
|
452
453
|
if (i < result->column_count) {
|
453
454
|
result->types.push_back(result_type);
|
454
455
|
}
|
455
456
|
internal_sql_types.push_back(result_type);
|
456
|
-
if (
|
457
|
+
if (can_group_by_all) {
|
457
458
|
select_binder.ResetBindings();
|
458
459
|
}
|
459
460
|
}
|
461
|
+
// push the GROUP BY ALL expressions into the group set
|
462
|
+
for (auto &group_by_all_index : group_by_all_indexes) {
|
463
|
+
auto &expr = result->select_list[group_by_all_index];
|
464
|
+
auto group_ref = make_unique<BoundColumnRefExpression>(
|
465
|
+
expr->return_type, ColumnBinding(result->group_index, result->groups.group_expressions.size()));
|
466
|
+
result->groups.group_expressions.push_back(std::move(expr));
|
467
|
+
expr = std::move(group_ref);
|
468
|
+
}
|
460
469
|
result->need_prune = result->select_list.size() > result->column_count;
|
461
470
|
|
462
471
|
// in the normal select binder, we bind columns as if there is no aggregation
|
@@ -467,16 +476,19 @@ unique_ptr<BoundQueryNode> Binder::BindSelectNode(SelectNode &statement, unique_
|
|
467
476
|
!result->groups.grouping_sets.empty()) {
|
468
477
|
if (statement.aggregate_handling == AggregateHandling::NO_AGGREGATES_ALLOWED) {
|
469
478
|
throw BinderException("Aggregates cannot be present in a Project relation!");
|
470
|
-
} else if (
|
471
|
-
|
472
|
-
|
473
|
-
|
474
|
-
|
479
|
+
} else if (select_binder.HasBoundColumns()) {
|
480
|
+
auto &bound_columns = select_binder.GetBoundColumns();
|
481
|
+
string error;
|
482
|
+
error = "column \"%s\" must appear in the GROUP BY clause or must be part of an aggregate function.";
|
483
|
+
if (statement.aggregate_handling == AggregateHandling::FORCE_AGGREGATES) {
|
484
|
+
error += "\nGROUP BY ALL will only group entries in the SELECT list. Add it to the SELECT list or "
|
485
|
+
"GROUP BY this entry explicitly.";
|
486
|
+
} else {
|
475
487
|
error += "\nEither add it to the GROUP BY list, or use \"ANY_VALUE(%s)\" if the exact value of \"%s\" "
|
476
488
|
"is not important.";
|
477
|
-
throw BinderException(FormatError(bound_columns[0].query_location, error, bound_columns[0].name,
|
478
|
-
bound_columns[0].name, bound_columns[0].name));
|
479
489
|
}
|
490
|
+
throw BinderException(FormatError(bound_columns[0].query_location, error, bound_columns[0].name,
|
491
|
+
bound_columns[0].name, bound_columns[0].name));
|
480
492
|
}
|
481
493
|
}
|
482
494
|
|
@@ -185,7 +185,7 @@ BoundStatement Binder::BindCopyFrom(CopyStatement &stmt) {
|
|
185
185
|
vector<string> expected_names;
|
186
186
|
if (!bound_insert.column_index_map.empty()) {
|
187
187
|
expected_names.resize(bound_insert.expected_types.size());
|
188
|
-
for (auto &col : table->GetColumns().
|
188
|
+
for (auto &col : table->GetColumns().Physical()) {
|
189
189
|
auto i = col.Physical();
|
190
190
|
if (bound_insert.column_index_map[i] != DConstants::INVALID_INDEX) {
|
191
191
|
expected_names[bound_insert.column_index_map[i]] = col.Name();
|
@@ -193,7 +193,7 @@ BoundStatement Binder::BindCopyFrom(CopyStatement &stmt) {
|
|
193
193
|
}
|
194
194
|
} else {
|
195
195
|
expected_names.reserve(bound_insert.expected_types.size());
|
196
|
-
for (auto &col : table->GetColumns().
|
196
|
+
for (auto &col : table->GetColumns().Physical()) {
|
197
197
|
expected_names.push_back(col.Name());
|
198
198
|
}
|
199
199
|
}
|
@@ -119,9 +119,15 @@ static void BindConstraints(Binder &binder, BoundCreateTableInfo &info) {
|
|
119
119
|
fk.info.type == ForeignKeyType::FK_TYPE_SELF_REFERENCE_TABLE);
|
120
120
|
physical_index_set_t fk_key_set, pk_key_set;
|
121
121
|
for (idx_t i = 0; i < fk.info.pk_keys.size(); i++) {
|
122
|
+
if (pk_key_set.find(fk.info.pk_keys[i]) != pk_key_set.end()) {
|
123
|
+
throw BinderException("Duplicate primary key referenced in FOREIGN KEY constraint");
|
124
|
+
}
|
122
125
|
pk_key_set.insert(fk.info.pk_keys[i]);
|
123
126
|
}
|
124
127
|
for (idx_t i = 0; i < fk.info.fk_keys.size(); i++) {
|
128
|
+
if (fk_key_set.find(fk.info.fk_keys[i]) != fk_key_set.end()) {
|
129
|
+
throw BinderException("Duplicate key specified in FOREIGN KEY constraint");
|
130
|
+
}
|
125
131
|
fk_key_set.insert(fk.info.fk_keys[i]);
|
126
132
|
}
|
127
133
|
info.bound_constraints.push_back(
|
@@ -292,6 +298,7 @@ unique_ptr<BoundCreateTableInfo> Binder::BindCreateTableInfo(unique_ptr<CreateIn
|
|
292
298
|
result->dependencies.AddDependency(type_dependency);
|
293
299
|
}
|
294
300
|
}
|
301
|
+
result->dependencies.VerifyDependencies(schema->catalog, result->Base().table);
|
295
302
|
properties.allow_stream_result = false;
|
296
303
|
return result;
|
297
304
|
}
|
@@ -335,6 +335,16 @@ void Binder::BindOnConflictClause(LogicalInsert &insert, TableCatalogEntry &tabl
|
|
335
335
|
ReplaceColumnBindings(*insert.on_conflict_condition, table_index, projection_index);
|
336
336
|
}
|
337
337
|
|
338
|
+
if (insert.action_type == OnConflictAction::REPLACE) {
|
339
|
+
D_ASSERT(on_conflict.set_info == nullptr);
|
340
|
+
on_conflict.set_info = CreateSetInfoForReplace(table, stmt);
|
341
|
+
insert.action_type = OnConflictAction::UPDATE;
|
342
|
+
}
|
343
|
+
if (on_conflict.set_info && on_conflict.set_info->columns.empty()) {
|
344
|
+
// if we are doing INSERT OR REPLACE on a table with no columns outside of the primary key column
|
345
|
+
// convert to INSERT OR IGNORE
|
346
|
+
insert.action_type = OnConflictAction::NOTHING;
|
347
|
+
}
|
338
348
|
if (insert.action_type == OnConflictAction::NOTHING) {
|
339
349
|
if (!insert.on_conflict_condition) {
|
340
350
|
return;
|
@@ -346,15 +356,9 @@ void Binder::BindOnConflictClause(LogicalInsert &insert, TableCatalogEntry &tabl
|
|
346
356
|
insert.columns_to_fetch = table_binding->GetBoundColumnIds();
|
347
357
|
return;
|
348
358
|
}
|
349
|
-
if (insert.action_type == OnConflictAction::REPLACE) {
|
350
|
-
D_ASSERT(on_conflict.set_info == nullptr);
|
351
|
-
on_conflict.set_info = CreateSetInfoForReplace(table, stmt);
|
352
|
-
insert.action_type = OnConflictAction::UPDATE;
|
353
|
-
}
|
354
359
|
|
355
360
|
D_ASSERT(on_conflict.set_info);
|
356
361
|
auto &set_info = *on_conflict.set_info;
|
357
|
-
D_ASSERT(!set_info.columns.empty());
|
358
362
|
D_ASSERT(set_info.columns.size() == set_info.expressions.size());
|
359
363
|
|
360
364
|
if (set_info.condition) {
|
@@ -195,11 +195,13 @@ BoundStatement Binder::Bind(UpdateStatement &stmt) {
|
|
195
195
|
AddCTEMap(stmt.cte_map);
|
196
196
|
|
197
197
|
if (stmt.from_table) {
|
198
|
+
auto from_binder = Binder::CreateBinder(context, this);
|
198
199
|
BoundJoinRef bound_crossproduct(JoinRefType::CROSS);
|
199
200
|
bound_crossproduct.left = std::move(bound_table);
|
200
|
-
bound_crossproduct.right = Bind(*stmt.from_table);
|
201
|
+
bound_crossproduct.right = from_binder->Bind(*stmt.from_table);
|
201
202
|
root = CreatePlan(bound_crossproduct);
|
202
203
|
get = (LogicalGet *)root->children[0].get();
|
204
|
+
bind_context.AddContext(std::move(from_binder->bind_context));
|
203
205
|
} else {
|
204
206
|
root = CreatePlan(*bound_table);
|
205
207
|
get = (LogicalGet *)root.get();
|
@@ -19,6 +19,9 @@ BindResult HavingBinder::BindColumnRef(unique_ptr<ParsedExpression> *expr_ptr, i
|
|
19
19
|
auto &expr = (ColumnRefExpression &)**expr_ptr;
|
20
20
|
auto alias_result = column_alias_binder.BindAlias(*this, expr, depth, root_expression);
|
21
21
|
if (!alias_result.HasError()) {
|
22
|
+
if (depth > 0) {
|
23
|
+
throw BinderException("Having clause cannot reference alias in correlated subquery");
|
24
|
+
}
|
22
25
|
return alias_result;
|
23
26
|
}
|
24
27
|
if (aggregate_handling == AggregateHandling::FORCE_AGGREGATES) {
|
@@ -4,8 +4,7 @@
|
|
4
4
|
namespace duckdb {
|
5
5
|
|
6
6
|
ColumnStatistics::ColumnStatistics(BaseStatistics stats_p) : stats(std::move(stats_p)) {
|
7
|
-
|
8
|
-
if (type != PhysicalType::LIST && type != PhysicalType::STRUCT) {
|
7
|
+
if (DistinctStatistics::TypeIsSupported(stats.GetType())) {
|
9
8
|
distinct_stats = make_unique<DistinctStatistics>();
|
10
9
|
}
|
11
10
|
}
|
@@ -94,4 +94,8 @@ idx_t DistinctStatistics::GetCount() const {
|
|
94
94
|
return MinValue<idx_t>(estimate, total_count);
|
95
95
|
}
|
96
96
|
|
97
|
+
bool DistinctStatistics::TypeIsSupported(const LogicalType &type) {
|
98
|
+
return type.InternalType() != PhysicalType::LIST && type.InternalType() != PhysicalType::STRUCT;
|
99
|
+
}
|
100
|
+
|
97
101
|
} // namespace duckdb
|
@@ -156,7 +156,11 @@ unique_ptr<RowGroup> RowGroup::AlterType(const LogicalType &target_type, idx_t c
|
|
156
156
|
// scan the original table, and fill the new column with the transformed value
|
157
157
|
InitializeScan(scan_state);
|
158
158
|
|
159
|
-
|
159
|
+
DataChunk append_chunk;
|
160
|
+
vector<LogicalType> append_types;
|
161
|
+
append_types.push_back(target_type);
|
162
|
+
append_chunk.Initialize(Allocator::DefaultAllocator(), append_types);
|
163
|
+
auto &append_vector = append_chunk.data[0];
|
160
164
|
SegmentStatistics altered_col_stats(target_type);
|
161
165
|
while (true) {
|
162
166
|
// scan the table
|
@@ -166,6 +170,7 @@ unique_ptr<RowGroup> RowGroup::AlterType(const LogicalType &target_type, idx_t c
|
|
166
170
|
break;
|
167
171
|
}
|
168
172
|
// execute the expression
|
173
|
+
append_chunk.Reset();
|
169
174
|
executor.ExecuteExpression(scan_chunk, append_vector);
|
170
175
|
column_data->Append(altered_col_stats.statistics, append_state, append_vector, scan_chunk.size());
|
171
176
|
}
|