Skip to content

Commit

Permalink
dont use elog in code that runs inside duckdb worker threads
Browse files Browse the repository at this point in the history
  • Loading branch information
Tishj committed Jun 18, 2024
1 parent 3add7a4 commit ce76027
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 28 deletions.
32 changes: 14 additions & 18 deletions src/quack_types.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,7 @@ struct PostgresArrayAppendState {
expected_values *= to_append;
}
if (dimensions[dimension] != to_append) {
elog(ERROR, "Expected %d values in list at dimension %d, found %d instead", dimensions[dimension], dimension, to_append);
throw duckdb::InvalidInputException("Expected %d values in list at dimension %d, found %d instead", dimensions[dimension], dimension, to_append);
}

auto &child_type = duckdb::ListType::GetChildType(value.type());
Expand All @@ -234,7 +234,7 @@ struct PostgresArrayAppendState {
if (child_val.IsNull()) {
// Postgres arrays can not contains nulls at the array level
// i.e {{1,2}, NULL, {3,4}} is not supported
elog(ERROR, "Returned LIST contains a NULL at an intermediate dimension (not the value level), which is not supported in Postgres");
throw duckdb::InvalidInputException("Returned LIST contains a NULL at an intermediate dimension (not the value level), which is not supported in Postgres");
}
AppendValueAtDimension(child_val, dimension + 1);
}
Expand Down Expand Up @@ -367,27 +367,23 @@ ConvertDuckToPostgresValue(TupleTableSlot *slot, duckdb::Value &value, idx_t col
auto scale = duckdb::DecimalType::GetScale(value.type());
switch (physical_type) {
case duckdb::PhysicalType::INT16: {
elog(INFO, "SMALLINT");
numeric_var = ConvertNumeric<int16_t>(value.GetValueUnsafe<int16_t>(), scale);
break;
}
case duckdb::PhysicalType::INT32: {
elog(INFO, "INTEGER");
numeric_var = ConvertNumeric<int32_t>(value.GetValueUnsafe<int32_t>(), scale);
break;
}
case duckdb::PhysicalType::INT64: {
elog(INFO, "BIGINT");
numeric_var = ConvertNumeric<int64_t>(value.GetValueUnsafe<int64_t>(), scale);
break;
}
case duckdb::PhysicalType::INT128: {
elog(INFO, "HUGEINT");
numeric_var = ConvertNumeric<hugeint_t, DecimalConversionHugeint>(value.GetValueUnsafe<hugeint_t>(), scale);
break;
}
default: {
elog(ERROR, "Unrecognized physical type for DECIMAL value");
throw duckdb::InternalException("Unrecognized physical type for DECIMAL value");
break;
}
}
Expand Down Expand Up @@ -427,7 +423,7 @@ ConvertDuckToPostgresValue(TupleTableSlot *slot, duckdb::Value &value, idx_t col
break;
}
default:
elog(ERROR, "(DuckDB/ConvertDuckToPostgresValue) Unsuported quack type: %d", oid);
throw duckdb::NotImplementedException("(DuckDB/ConvertDuckToPostgresValue) Unsuported quack type: %d", oid);
}
}

Expand All @@ -454,7 +450,7 @@ static duckdb::LogicalType ChildTypeFromArray(Oid array_type) {
case INT8ARRAYOID:
return duckdb::LogicalTypeId::BIGINT;
default:
elog(ERROR, "No child type set for Postgres OID %d", array_type);
throw duckdb::NotImplementedException("No child type set for Postgres OID %d", array_type);
}
}

Expand Down Expand Up @@ -509,7 +505,7 @@ ConvertPostgresToDuckColumnType(Form_pg_attribute &attribute) {
return duck_type;
}
default:
elog(ERROR, "(DuckDB/ConvertPostgresToDuckColumnType) Unsupported quack type: %d", type);
throw duckdb::NotImplementedException("(DuckDB/ConvertPostgresToDuckColumnType) Unsupported quack type: %d", type);
}
}

Expand Down Expand Up @@ -564,11 +560,11 @@ GetPostgresDuckDBType(duckdb::LogicalType type) {
case duckdb::LogicalTypeId::BIGINT:
return INT8ARRAYOID;
default:
elog(ERROR, "(DuckDB/GetPostgresDuckDBType) Unsupported quack type: %s", type.ToString().c_str());
throw duckdb::InvalidInputException("(DuckDB/GetPostgresDuckDBType) Unsupported quack type: %s", type.ToString().c_str());
}
}
default: {
elog(ERROR, "(DuckDB/GetPostgresDuckDBType) Unsupported quack type: %s", type.ToString().c_str());
throw duckdb::InvalidInputException("(DuckDB/GetPostgresDuckDBType) Unsupported quack type: %s", type.ToString().c_str());
break;
}
}
Expand Down Expand Up @@ -670,7 +666,7 @@ ConvertPostgresToDuckValue(Datum value, duckdb::Vector &result, idx_t offset) {
auto bpchar_data = VARDATA_ANY(value);

if (bpchar_length != 1) {
elog(ERROR, "Expected 1 length BPCHAR for TINYINT marked with IsBpChar at offset %llu", offset);
throw duckdb::InternalException("Expected 1 length BPCHAR for TINYINT marked with IsBpChar at offset %llu", offset);
}
Append<int8_t>(result, bpchar_data[0], offset);
} else {
Expand Down Expand Up @@ -738,7 +734,7 @@ ConvertPostgresToDuckValue(Datum value, duckdb::Vector &result, idx_t offset) {
break;
}
default: {
elog(ERROR, "Unrecognized physical type for DECIMAL value");
throw duckdb::InternalException("Unrecognized physical type (%s) for DECIMAL value", duckdb::EnumUtil::ToString(physical_type));
break;
}
}
Expand Down Expand Up @@ -774,7 +770,7 @@ ConvertPostgresToDuckValue(Datum value, duckdb::Vector &result, idx_t offset) {
deconstruct_array(array, ARR_ELEMTYPE(array), typlen, typbyval, typalign, &elems, &nulls, &nelems);

if (ndims == -1) {
elog(ERROR, "Array type has an ndims of -1, so it's actually not an array??");
throw duckdb::InternalException("Array type has an ndims of -1, so it's actually not an array??");
}
// Set the list_entry_t metadata
duckdb::Vector *vec = &result;
Expand All @@ -783,7 +779,7 @@ ConvertPostgresToDuckValue(Datum value, duckdb::Vector &result, idx_t offset) {
auto previous_dimension = dim ? dims[dim - 1] : 1;
auto dimension = dims[dim];
if (vec->GetType().id() != duckdb::LogicalTypeId::LIST) {
elog(ERROR, "Dimensionality of the schema and the data does not match, data contains more dimensions than the amount of dimensions specified by the schema");
throw duckdb::InvalidInputException("Dimensionality of the schema and the data does not match, data contains more dimensions than the amount of dimensions specified by the schema");
}
auto child_offset = duckdb::ListVector::GetListSize(*vec);
auto list_data = duckdb::FlatVector::GetData<duckdb::list_entry_t>(*vec);
Expand All @@ -808,7 +804,7 @@ ConvertPostgresToDuckValue(Datum value, duckdb::Vector &result, idx_t offset) {
}

if (vec->GetType().id() == duckdb::LogicalTypeId::LIST) {
elog(ERROR, "Dimensionality of the schema and the data does not match, data contains fewer dimensions than the amount of dimensions specified by the schema");
throw duckdb::InvalidInputException("Dimensionality of the schema and the data does not match, data contains fewer dimensions than the amount of dimensions specified by the schema");
}

auto child_type = vec->GetType();
Expand All @@ -825,7 +821,7 @@ ConvertPostgresToDuckValue(Datum value, duckdb::Vector &result, idx_t offset) {
break;
}
default:
elog(ERROR, "(DuckDB/ConvertPostgresToDuckValue) Unsupported quack type: %s", result.GetType().ToString().c_str());
throw duckdb::NotImplementedException("(DuckDB/ConvertPostgresToDuckValue) Unsupported quack type: %s", result.GetType().ToString().c_str());
break;
}
}
Expand Down
4 changes: 2 additions & 2 deletions test/regression/expected/array_type_support.out
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ INSERT INTO int_array_2d VALUES
('{{11, 12, 13}, {14, 15, 16}}'),
('{{17, 18}, {19, 20}}');
SELECT * FROM int_array_2d;
ERROR: Dimensionality of the schema and the data does not match, data contains more dimensions than the amount of dimensions specified by the schema
ERROR: Quack execute returned an error: Invalid Input Error: Dimensionality of the schema and the data does not match, data contains more dimensions than the amount of dimensions specified by the schema
drop table int_array_2d;
-- INT4 (single dimensional data, two dimensionsal type)
CREATE TABLE int_array_2d(a INT[][]);
Expand All @@ -35,7 +35,7 @@ INSERT INTO int_array_2d VALUES
('{11, 12, 13}'),
('{17, 18}');
SELECT * FROM int_array_2d;
ERROR: Dimensionality of the schema and the data does not match, data contains fewer dimensions than the amount of dimensions specified by the schema
ERROR: Quack execute returned an error: Invalid Input Error: Dimensionality of the schema and the data does not match, data contains fewer dimensions than the amount of dimensions specified by the schema
drop table int_array_2d;
-- INT4 (two dimensional data and type)
CREATE TABLE int_array_2d(a INT[][]);
Expand Down
8 changes: 0 additions & 8 deletions test/regression/expected/type_support.out
Original file line number Diff line number Diff line change
Expand Up @@ -144,8 +144,6 @@ INSERT INTO smallint_numeric SELECT a FROM (VALUES
(45.12)
) t(a);
SELECT * FROM smallint_numeric;
INFO: SMALLINT
INFO: SMALLINT
a
-------
0.23
Expand All @@ -161,8 +159,6 @@ INSERT INTO integer_numeric SELECT a FROM (VALUES
(45.000012::NUMERIC(9,6))
) t(a);
SELECT * FROM integer_numeric;
INFO: INTEGER
INFO: INTEGER
a
------------
243.345035
Expand All @@ -178,8 +174,6 @@ INSERT INTO bigint_numeric SELECT a FROM (VALUES
(12.000000000001::NUMERIC(18,12))
) t(a);
SELECT * FROM bigint_numeric;
INFO: BIGINT
INFO: BIGINT
a
---------------------
856324.111122223333
Expand All @@ -195,8 +189,6 @@ INSERT INTO hugeint_numeric SELECT a FROM (VALUES
(123456789.000000000000000000000001::NUMERIC(38,24))
) t(a);
SELECT * FROM hugeint_numeric;
INFO: HUGEINT
INFO: HUGEINT
a
-----------------------------------------
32942348563242.111222333444555666777888
Expand Down

0 comments on commit ce76027

Please sign in to comment.