Skip to content

Commit

Permalink
Merge branch 'main' into replace_views
Browse files Browse the repository at this point in the history
  • Loading branch information
Tishj committed Jun 18, 2024
2 parents 1b54ad9 + d8b024f commit 27d2951
Show file tree
Hide file tree
Showing 4 changed files with 61 additions and 28 deletions.
39 changes: 20 additions & 19 deletions src/quack_types.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,7 @@ struct PostgresArrayAppendState {
expected_values *= to_append;
}
if (dimensions[dimension] != to_append) {
elog(ERROR, "Expected %d values in list at dimension %d, found %d instead", dimensions[dimension], dimension, to_append);
throw duckdb::InvalidInputException("Expected %d values in list at dimension %d, found %d instead", dimensions[dimension], dimension, to_append);
}

auto &child_type = duckdb::ListType::GetChildType(value.type());
Expand All @@ -234,7 +234,7 @@ struct PostgresArrayAppendState {
if (child_val.IsNull()) {
// Postgres arrays can not contains nulls at the array level
// i.e {{1,2}, NULL, {3,4}} is not supported
elog(ERROR, "Returned LIST contains a NULL at an intermediate dimension (not the value level), which is not supported in Postgres");
throw duckdb::InvalidInputException("Returned LIST contains a NULL at an intermediate dimension (not the value level), which is not supported in Postgres");
}
AppendValueAtDimension(child_val, dimension + 1);
}
Expand Down Expand Up @@ -367,27 +367,23 @@ ConvertDuckToPostgresValue(TupleTableSlot *slot, duckdb::Value &value, idx_t col
auto scale = duckdb::DecimalType::GetScale(value.type());
switch (physical_type) {
case duckdb::PhysicalType::INT16: {
elog(INFO, "SMALLINT");
numeric_var = ConvertNumeric<int16_t>(value.GetValueUnsafe<int16_t>(), scale);
break;
}
case duckdb::PhysicalType::INT32: {
elog(INFO, "INTEGER");
numeric_var = ConvertNumeric<int32_t>(value.GetValueUnsafe<int32_t>(), scale);
break;
}
case duckdb::PhysicalType::INT64: {
elog(INFO, "BIGINT");
numeric_var = ConvertNumeric<int64_t>(value.GetValueUnsafe<int64_t>(), scale);
break;
}
case duckdb::PhysicalType::INT128: {
elog(INFO, "HUGEINT");
numeric_var = ConvertNumeric<hugeint_t, DecimalConversionHugeint>(value.GetValueUnsafe<hugeint_t>(), scale);
break;
}
default: {
elog(ERROR, "Unrecognized physical type for DECIMAL value");
throw duckdb::InternalException("Unrecognized physical type for DECIMAL value");
break;
}
}
Expand Down Expand Up @@ -427,7 +423,7 @@ ConvertDuckToPostgresValue(TupleTableSlot *slot, duckdb::Value &value, idx_t col
break;
}
default:
elog(ERROR, "(DuckDB/ConvertDuckToPostgresValue) Unsuported quack type: %d", oid);
throw duckdb::NotImplementedException("(DuckDB/ConvertDuckToPostgresValue) Unsuported quack type: %d", oid);
}
}

Expand All @@ -454,7 +450,7 @@ static duckdb::LogicalType ChildTypeFromArray(Oid array_type) {
case INT8ARRAYOID:
return duckdb::LogicalTypeId::BIGINT;
default:
elog(ERROR, "No child type set for Postgres OID %d", array_type);
throw duckdb::NotImplementedException("No child type set for Postgres OID %d", array_type);
}
}

Expand Down Expand Up @@ -564,7 +560,7 @@ GetPostgresDuckDBType(duckdb::LogicalType type) {
case duckdb::LogicalTypeId::BIGINT:
return INT8ARRAYOID;
default:
elog(ERROR, "(DuckDB/GetPostgresDuckDBType) Unsupported quack type: %s", type.ToString().c_str());
throw duckdb::InvalidInputException("(DuckDB/GetPostgresDuckDBType) Unsupported quack type: %s", type.ToString().c_str());
}
}
default: {
Expand Down Expand Up @@ -669,7 +665,7 @@ ConvertPostgresToDuckValue(Datum value, duckdb::Vector &result, idx_t offset) {
auto bpchar_data = VARDATA_ANY(value);

if (bpchar_length != 1) {
elog(ERROR, "Expected 1 length BPCHAR for TINYINT marked with IsBpChar at offset %llu", offset);
throw duckdb::InternalException("Expected 1 length BPCHAR for TINYINT marked with IsBpChar at offset %llu", offset);
}
Append<int8_t>(result, bpchar_data[0], offset);
} else {
Expand Down Expand Up @@ -737,7 +733,7 @@ ConvertPostgresToDuckValue(Datum value, duckdb::Vector &result, idx_t offset) {
break;
}
default: {
elog(ERROR, "Unrecognized physical type for DECIMAL value");
throw duckdb::InternalException("Unrecognized physical type (%s) for DECIMAL value", duckdb::EnumUtil::ToString(physical_type));
break;
}
}
Expand Down Expand Up @@ -773,7 +769,7 @@ ConvertPostgresToDuckValue(Datum value, duckdb::Vector &result, idx_t offset) {
deconstruct_array(array, ARR_ELEMTYPE(array), typlen, typbyval, typalign, &elems, &nulls, &nelems);

if (ndims == -1) {
elog(ERROR, "Array type has an ndims of -1, so it's actually not an array??");
throw duckdb::InternalException("Array type has an ndims of -1, so it's actually not an array??");
}
// Set the list_entry_t metadata
duckdb::Vector *vec = &result;
Expand All @@ -782,8 +778,7 @@ ConvertPostgresToDuckValue(Datum value, duckdb::Vector &result, idx_t offset) {
auto previous_dimension = dim ? dims[dim - 1] : 1;
auto dimension = dims[dim];
if (vec->GetType().id() != duckdb::LogicalTypeId::LIST) {
// TODO: provide a more detailed description of the error
elog(ERROR, "Dimensionality of the schema and the data does not match");
throw duckdb::InvalidInputException("Dimensionality of the schema and the data does not match, data contains more dimensions than the amount of dimensions specified by the schema");
}
auto child_offset = duckdb::ListVector::GetListSize(*vec);
auto list_data = duckdb::FlatVector::GetData<duckdb::list_entry_t>(*vec);
Expand All @@ -804,13 +799,17 @@ ConvertPostgresToDuckValue(Datum value, duckdb::Vector &result, idx_t offset) {
}
if (ndims == 0) {
D_ASSERT(nelems == 0);
auto child_offset = duckdb::ListVector::GetListSize(*vec);
auto list_data = duckdb::FlatVector::GetData<duckdb::list_entry_t>(*vec);
list_data[write_offset] = duckdb::list_entry_t(
child_offset,
0
);
vec = &duckdb::ListVector::GetEntry(*vec);
}

if (vec->GetType().id() == duckdb::LogicalTypeId::LIST) {
// Same as before, but now the data has fewer dimensions than the schema
// TODO: provide a more detailed description of the error
elog(ERROR, "Dimensionality of the schema and the data does not match");
throw duckdb::InvalidInputException("Dimensionality of the schema and the data does not match, data contains fewer dimensions than the amount of dimensions specified by the schema");
}

auto child_type = vec->GetType();
Expand All @@ -827,7 +826,7 @@ ConvertPostgresToDuckValue(Datum value, duckdb::Vector &result, idx_t offset) {
break;
}
default:
elog(ERROR, "(DuckDB/ConvertPostgresToDuckValue) Unsupported quack type: %s", result.GetType().ToString().c_str());
throw duckdb::NotImplementedException("(DuckDB/ConvertPostgresToDuckValue) Unsupported quack type: %s", result.GetType().ToString().c_str());
break;
}
}
Expand Down Expand Up @@ -968,6 +967,8 @@ InsertTupleIntoChunk(duckdb::DataChunk &output, PostgresHeapSeqScanThreadInfo &t
if (validTuple) {
threadScanInfo.m_output_vector_size++;
}
output.SetCardinality(threadScanInfo.m_output_vector_size);
output.Verify();

parallelScanState.m_total_row_count++;

Expand Down
24 changes: 23 additions & 1 deletion test/regression/expected/array_type_support.out
Original file line number Diff line number Diff line change
@@ -1,5 +1,16 @@
drop extension if exists quack;
create extension quack;
-- INT4 (zero dimension)
CREATE TABLE int_array_0d(a INT[]);
INSERT INTO int_array_0d SELECT CAST(a as INT[]) FROM (VALUES
('{}')
) t(a);
SELECT * FROM int_array_0d;
a
----
{}
(1 row)

-- INT4 (single dimension)
CREATE TABLE int_array_1d(a INT[]);
INSERT INTO int_array_1d SELECT CAST(a as INT[]) FROM (VALUES
Expand All @@ -25,7 +36,17 @@ INSERT INTO int_array_2d VALUES
('{{11, 12, 13}, {14, 15, 16}}'),
('{{17, 18}, {19, 20}}');
SELECT * FROM int_array_2d;
ERROR: Dimensionality of the schema and the data does not match
ERROR: Quack execute returned an error: Invalid Input Error: Dimensionality of the schema and the data does not match, data contains more dimensions than the amount of dimensions specified by the schema
drop table int_array_2d;
-- INT4 (single dimensional data, two dimensionsal type)
CREATE TABLE int_array_2d(a INT[][]);
INSERT INTO int_array_2d VALUES
('{1, 2}'),
('{5, 6, 7}'),
('{11, 12, 13}'),
('{17, 18}');
SELECT * FROM int_array_2d;
ERROR: Quack execute returned an error: Invalid Input Error: Dimensionality of the schema and the data does not match, data contains fewer dimensions than the amount of dimensions specified by the schema
drop table int_array_2d;
-- INT4 (two dimensional data and type)
CREATE TABLE int_array_2d(a INT[][]);
Expand Down Expand Up @@ -77,6 +98,7 @@ SELECT * FROM bool_array_1d;
{}
(4 rows)

DROP TABLE int_array_0d;
DROP TABLE int_array_1d;
DROP TABLE int_array_2d;
DROP TABLE bigint_array_1d;
Expand Down
8 changes: 0 additions & 8 deletions test/regression/expected/type_support.out
Original file line number Diff line number Diff line change
Expand Up @@ -144,8 +144,6 @@ INSERT INTO smallint_numeric SELECT a FROM (VALUES
(45.12)
) t(a);
SELECT * FROM smallint_numeric;
INFO: SMALLINT
INFO: SMALLINT
a
-------
0.23
Expand All @@ -161,8 +159,6 @@ INSERT INTO integer_numeric SELECT a FROM (VALUES
(45.000012::NUMERIC(9,6))
) t(a);
SELECT * FROM integer_numeric;
INFO: INTEGER
INFO: INTEGER
a
------------
243.345035
Expand All @@ -178,8 +174,6 @@ INSERT INTO bigint_numeric SELECT a FROM (VALUES
(12.000000000001::NUMERIC(18,12))
) t(a);
SELECT * FROM bigint_numeric;
INFO: BIGINT
INFO: BIGINT
a
---------------------
856324.111122223333
Expand All @@ -195,8 +189,6 @@ INSERT INTO hugeint_numeric SELECT a FROM (VALUES
(123456789.000000000000000000000001::NUMERIC(38,24))
) t(a);
SELECT * FROM hugeint_numeric;
INFO: HUGEINT
INFO: HUGEINT
a
-----------------------------------------
32942348563242.111222333444555666777888
Expand Down
18 changes: 18 additions & 0 deletions test/regression/sql/array_type_support.sql
Original file line number Diff line number Diff line change
@@ -1,6 +1,13 @@
drop extension if exists quack;
create extension quack;

-- INT4 (zero dimension)
CREATE TABLE int_array_0d(a INT[]);
INSERT INTO int_array_0d SELECT CAST(a as INT[]) FROM (VALUES
('{}')
) t(a);
SELECT * FROM int_array_0d;

-- INT4 (single dimension)
CREATE TABLE int_array_1d(a INT[]);
INSERT INTO int_array_1d SELECT CAST(a as INT[]) FROM (VALUES
Expand All @@ -21,6 +28,16 @@ INSERT INTO int_array_2d VALUES
SELECT * FROM int_array_2d;
drop table int_array_2d;

-- INT4 (single dimensional data, two dimensionsal type)
CREATE TABLE int_array_2d(a INT[][]);
INSERT INTO int_array_2d VALUES
('{1, 2}'),
('{5, 6, 7}'),
('{11, 12, 13}'),
('{17, 18}');
SELECT * FROM int_array_2d;
drop table int_array_2d;

-- INT4 (two dimensional data and type)
CREATE TABLE int_array_2d(a INT[][]);
INSERT INTO int_array_2d VALUES
Expand Down Expand Up @@ -50,6 +67,7 @@ INSERT INTO bool_array_1d SELECT CAST(a as BOOL[]) FROM (VALUES
) t(a);
SELECT * FROM bool_array_1d;

DROP TABLE int_array_0d;
DROP TABLE int_array_1d;
DROP TABLE int_array_2d;
DROP TABLE bigint_array_1d;
Expand Down

0 comments on commit 27d2951

Please sign in to comment.