Skip to content

Commit

Permalink
Update vendored DuckDB sources to 9c831cb
Browse files Browse the repository at this point in the history
  • Loading branch information
duckdblabs-bot committed Nov 18, 2024
1 parent 9c831cb commit 5f0a345
Show file tree
Hide file tree
Showing 33 changed files with 210 additions and 155 deletions.
22 changes: 13 additions & 9 deletions src/duckdb/extension/json/include/json_common.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,8 @@ using json_key_set_t = unordered_set<JSONKey, JSONKeyHash, JSONKeyEquality>;
struct JSONCommon {
public:
//! Read/Write flags
static constexpr auto READ_FLAG = YYJSON_READ_ALLOW_INF_AND_NAN | YYJSON_READ_ALLOW_TRAILING_COMMAS;
static constexpr auto READ_FLAG =
YYJSON_READ_ALLOW_INF_AND_NAN | YYJSON_READ_ALLOW_TRAILING_COMMAS | YYJSON_READ_BIGNUM_AS_RAW;
static constexpr auto READ_STOP_FLAG = READ_FLAG | YYJSON_READ_STOP_WHEN_DONE;
static constexpr auto READ_INSITU_FLAG = READ_STOP_FLAG | YYJSON_READ_INSITU;
static constexpr auto WRITE_FLAG = YYJSON_WRITE_ALLOW_INF_AND_NAN;
Expand All @@ -102,30 +103,32 @@ struct JSONCommon {
static constexpr char const *TYPE_STRING_BIGINT = "BIGINT";
static constexpr char const *TYPE_STRING_UBIGINT = "UBIGINT";
static constexpr char const *TYPE_STRING_DOUBLE = "DOUBLE";
static constexpr char const *TYPE_STRING_HUGEINT = "HUGEINT";
static constexpr char const *TYPE_STRING_VARCHAR = "VARCHAR";
static constexpr char const *TYPE_STRING_ARRAY = "ARRAY";
static constexpr char const *TYPE_STRING_OBJECT = "OBJECT";

static inline const char *ValTypeToString(yyjson_val *val) {
switch (yyjson_get_tag(val)) {
case YYJSON_TYPE_NULL | YYJSON_SUBTYPE_NONE:
return JSONCommon::TYPE_STRING_NULL;
return TYPE_STRING_NULL;
case YYJSON_TYPE_STR | YYJSON_SUBTYPE_NOESC:
case YYJSON_TYPE_STR | YYJSON_SUBTYPE_NONE:
return JSONCommon::TYPE_STRING_VARCHAR;
return TYPE_STRING_VARCHAR;
case YYJSON_TYPE_ARR | YYJSON_SUBTYPE_NONE:
return JSONCommon::TYPE_STRING_ARRAY;
return TYPE_STRING_ARRAY;
case YYJSON_TYPE_OBJ | YYJSON_SUBTYPE_NONE:
return JSONCommon::TYPE_STRING_OBJECT;
return TYPE_STRING_OBJECT;
case YYJSON_TYPE_BOOL | YYJSON_SUBTYPE_TRUE:
case YYJSON_TYPE_BOOL | YYJSON_SUBTYPE_FALSE:
return JSONCommon::TYPE_STRING_BOOLEAN;
return TYPE_STRING_BOOLEAN;
case YYJSON_TYPE_NUM | YYJSON_SUBTYPE_UINT:
return JSONCommon::TYPE_STRING_UBIGINT;
return TYPE_STRING_UBIGINT;
case YYJSON_TYPE_NUM | YYJSON_SUBTYPE_SINT:
return JSONCommon::TYPE_STRING_BIGINT;
return TYPE_STRING_BIGINT;
case YYJSON_TYPE_NUM | YYJSON_SUBTYPE_REAL:
return JSONCommon::TYPE_STRING_DOUBLE;
case YYJSON_TYPE_RAW | YYJSON_SUBTYPE_NONE:
return TYPE_STRING_DOUBLE;
default:
throw InternalException("Unexpected yyjson tag in ValTypeToString");
}
Expand Down Expand Up @@ -154,6 +157,7 @@ struct JSONCommon {
case YYJSON_TYPE_NUM | YYJSON_SUBTYPE_SINT:
return LogicalTypeId::BIGINT;
case YYJSON_TYPE_NUM | YYJSON_SUBTYPE_REAL:
case YYJSON_TYPE_RAW | YYJSON_SUBTYPE_NONE:
return LogicalTypeId::DOUBLE;
default:
throw InternalException("Unexpected yyjson tag in ValTypeToLogicalTypeId");
Expand Down
2 changes: 1 addition & 1 deletion src/duckdb/extension/json/include/json_scan.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ struct JSONScanData : public TableFunctionData {
bool convert_strings_to_integers = false;
//! If a struct contains more fields than this threshold with at least 80% similar types,
//! we infer it as MAP type
idx_t map_inference_threshold = 25;
idx_t map_inference_threshold = 200;

//! All column names (in order)
vector<string> names;
Expand Down
3 changes: 3 additions & 0 deletions src/duckdb/extension/json/json_functions/json_transform.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@ static inline bool GetValueNumerical(yyjson_val *val, T &result, JSONTransformOp
switch (unsafe_yyjson_get_tag(val)) {
case YYJSON_TYPE_STR | YYJSON_SUBTYPE_NOESC:
case YYJSON_TYPE_STR | YYJSON_SUBTYPE_NONE:
case YYJSON_TYPE_RAW | YYJSON_SUBTYPE_NONE:
success = OP::template Operation<string_t, T>(GetString(val), result, options.strict_cast);
break;
case YYJSON_TYPE_ARR | YYJSON_SUBTYPE_NONE:
Expand Down Expand Up @@ -138,6 +139,7 @@ static inline bool GetValueDecimal(yyjson_val *val, T &result, uint8_t w, uint8_
switch (unsafe_yyjson_get_tag(val)) {
case YYJSON_TYPE_STR | YYJSON_SUBTYPE_NOESC:
case YYJSON_TYPE_STR | YYJSON_SUBTYPE_NONE:
case YYJSON_TYPE_RAW | YYJSON_SUBTYPE_NONE:
success = OP::template Operation<string_t, T>(GetString(val), result, options.parameters, w, s);
break;
case YYJSON_TYPE_ARR | YYJSON_SUBTYPE_NONE:
Expand Down Expand Up @@ -172,6 +174,7 @@ static inline bool GetValueString(yyjson_val *val, yyjson_alc *alc, string_t &re
switch (unsafe_yyjson_get_tag(val)) {
case YYJSON_TYPE_STR | YYJSON_SUBTYPE_NOESC:
case YYJSON_TYPE_STR | YYJSON_SUBTYPE_NONE:
case YYJSON_TYPE_RAW | YYJSON_SUBTYPE_NONE:
result = string_t(unsafe_yyjson_get_str(val), unsafe_yyjson_get_len(val));
return true;
case YYJSON_TYPE_ARR | YYJSON_SUBTYPE_NONE:
Expand Down
4 changes: 2 additions & 2 deletions src/duckdb/extension/json/serialize_json.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ void JSONScanData::Serialize(Serializer &serializer) const {
serializer.WritePropertyWithDefault<double>(113, "field_appearance_threshold", field_appearance_threshold, 0.1);
serializer.WritePropertyWithDefault<idx_t>(114, "maximum_sample_files", maximum_sample_files, 32);
serializer.WritePropertyWithDefault<bool>(115, "convert_strings_to_integers", convert_strings_to_integers, false);
serializer.WritePropertyWithDefault<idx_t>(116, "map_inference_threshold", map_inference_threshold, 25);
serializer.WritePropertyWithDefault<idx_t>(116, "map_inference_threshold", map_inference_threshold, 200);
}

unique_ptr<JSONScanData> JSONScanData::Deserialize(Deserializer &deserializer) {
Expand Down Expand Up @@ -75,7 +75,7 @@ unique_ptr<JSONScanData> JSONScanData::Deserialize(Deserializer &deserializer) {
deserializer.ReadPropertyWithExplicitDefault<double>(113, "field_appearance_threshold", result->field_appearance_threshold, 0.1);
deserializer.ReadPropertyWithExplicitDefault<idx_t>(114, "maximum_sample_files", result->maximum_sample_files, 32);
deserializer.ReadPropertyWithExplicitDefault<bool>(115, "convert_strings_to_integers", result->convert_strings_to_integers, false);
deserializer.ReadPropertyWithExplicitDefault<idx_t>(116, "map_inference_threshold", result->map_inference_threshold, 25);
deserializer.ReadPropertyWithExplicitDefault<idx_t>(116, "map_inference_threshold", result->map_inference_threshold, 200);
return result;
}

Expand Down
2 changes: 1 addition & 1 deletion src/duckdb/src/common/row_operations/row_external.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ static inline void VerifyUnswizzledString(const RowLayout &layout, const idx_t &
idx_t idx_in_entry;
ValidityBytes::GetEntryIndex(col_idx, entry_idx, idx_in_entry);

ValidityBytes row_mask(row_ptr);
ValidityBytes row_mask(row_ptr, layout.ColumnCount());
if (row_mask.RowIsValid(row_mask.GetValidityEntry(entry_idx), idx_in_entry)) {
auto str = Load<string_t>(row_ptr + layout.GetOffsets()[col_idx]);
str.Verify();
Expand Down
25 changes: 13 additions & 12 deletions src/duckdb/src/common/row_operations/row_gather.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ static void TemplatedGatherLoop(Vector &rows, const SelectionVector &row_sel, Ve
auto row = ptrs[row_idx];
auto col_idx = col_sel.get_index(i);
data[col_idx] = Load<T>(row + col_offset);
ValidityBytes row_mask(row);
ValidityBytes row_mask(row, layout.ColumnCount());
if (!row_mask.RowIsValid(row_mask.GetValidityEntry(entry_idx), idx_in_entry)) {
if (build_size > STANDARD_VECTOR_SIZE && col_mask.AllValid()) {
//! We need to initialize the mask with the vector size.
Expand Down Expand Up @@ -67,7 +67,7 @@ static void GatherVarchar(Vector &rows, const SelectionVector &row_sel, Vector &
auto col_idx = col_sel.get_index(i);
auto col_ptr = row + col_offset;
data[col_idx] = Load<string_t>(col_ptr);
ValidityBytes row_mask(row);
ValidityBytes row_mask(row, layout.ColumnCount());
if (!row_mask.RowIsValid(row_mask.GetValidityEntry(entry_idx), idx_in_entry)) {
if (build_size > STANDARD_VECTOR_SIZE && col_mask.AllValid()) {
//! We need to initialize the mask with the vector size.
Expand Down Expand Up @@ -179,7 +179,8 @@ void RowOperations::Gather(Vector &rows, const SelectionVector &row_sel, Vector
}

template <class T>
static void TemplatedFullScanLoop(Vector &rows, Vector &col, idx_t count, idx_t col_offset, idx_t col_no) {
static void TemplatedFullScanLoop(Vector &rows, Vector &col, idx_t count, idx_t col_offset, idx_t col_no,
idx_t column_count) {
// Precompute mask indexes
idx_t entry_idx;
idx_t idx_in_entry;
Expand All @@ -192,7 +193,7 @@ static void TemplatedFullScanLoop(Vector &rows, Vector &col, idx_t count, idx_t
for (idx_t i = 0; i < count; i++) {
auto row = ptrs[i];
data[i] = Load<T>(row + col_offset);
ValidityBytes row_mask(row);
ValidityBytes row_mask(row, column_count);
if (!row_mask.RowIsValid(row_mask.GetValidityEntry(entry_idx), idx_in_entry)) {
throw InternalException("Null value comparisons not implemented for perfect hash table yet");
// col_mask.SetInvalid(i);
Expand All @@ -206,28 +207,28 @@ void RowOperations::FullScanColumn(const TupleDataLayout &layout, Vector &rows,
col.SetVectorType(VectorType::FLAT_VECTOR);
switch (col.GetType().InternalType()) {
case PhysicalType::UINT8:
TemplatedFullScanLoop<uint8_t>(rows, col, count, col_offset, col_no);
TemplatedFullScanLoop<uint8_t>(rows, col, count, col_offset, col_no, layout.ColumnCount());
break;
case PhysicalType::UINT16:
TemplatedFullScanLoop<uint16_t>(rows, col, count, col_offset, col_no);
TemplatedFullScanLoop<uint16_t>(rows, col, count, col_offset, col_no, layout.ColumnCount());
break;
case PhysicalType::UINT32:
TemplatedFullScanLoop<uint32_t>(rows, col, count, col_offset, col_no);
TemplatedFullScanLoop<uint32_t>(rows, col, count, col_offset, col_no, layout.ColumnCount());
break;
case PhysicalType::UINT64:
TemplatedFullScanLoop<uint64_t>(rows, col, count, col_offset, col_no);
TemplatedFullScanLoop<uint64_t>(rows, col, count, col_offset, col_no, layout.ColumnCount());
break;
case PhysicalType::INT8:
TemplatedFullScanLoop<int8_t>(rows, col, count, col_offset, col_no);
TemplatedFullScanLoop<int8_t>(rows, col, count, col_offset, col_no, layout.ColumnCount());
break;
case PhysicalType::INT16:
TemplatedFullScanLoop<int16_t>(rows, col, count, col_offset, col_no);
TemplatedFullScanLoop<int16_t>(rows, col, count, col_offset, col_no, layout.ColumnCount());
break;
case PhysicalType::INT32:
TemplatedFullScanLoop<int32_t>(rows, col, count, col_offset, col_no);
TemplatedFullScanLoop<int32_t>(rows, col, count, col_offset, col_no, layout.ColumnCount());
break;
case PhysicalType::INT64:
TemplatedFullScanLoop<int64_t>(rows, col, count, col_offset, col_no);
TemplatedFullScanLoop<int64_t>(rows, col, count, col_offset, col_no, layout.ColumnCount());
break;
default:
throw NotImplementedException("Unimplemented type for RowOperations::FullScanColumn");
Expand Down
4 changes: 2 additions & 2 deletions src/duckdb/src/common/row_operations/row_matcher.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ static idx_t TemplatedMatchLoop(const TupleDataVectorFormat &lhs_format, Selecti
const auto lhs_null = LHS_ALL_VALID ? false : !lhs_validity.RowIsValid(lhs_idx);

const auto &rhs_location = rhs_locations[idx];
const ValidityBytes rhs_mask(rhs_location);
const ValidityBytes rhs_mask(rhs_location, rhs_layout.ColumnCount());
const auto rhs_null = !rhs_mask.RowIsValid(rhs_mask.GetValidityEntryUnsafe(entry_idx), idx_in_entry);

if (COMPARISON_OP::template Operation<T>(lhs_data[lhs_idx], Load<T>(rhs_location + rhs_offset_in_row), lhs_null,
Expand Down Expand Up @@ -85,7 +85,7 @@ static idx_t StructMatchEquality(Vector &lhs_vector, const TupleDataVectorFormat
const auto lhs_null = lhs_validity.AllValid() ? false : !lhs_validity.RowIsValid(lhs_idx);

const auto &rhs_location = rhs_locations[idx];
const ValidityBytes rhs_mask(rhs_location);
const ValidityBytes rhs_mask(rhs_location, rhs_layout.ColumnCount());
const auto rhs_null = !rhs_mask.RowIsValid(rhs_mask.GetValidityEntryUnsafe(entry_idx), idx_in_entry);

// For structs there is no value to compare, here we match NULLs and let recursion do the rest
Expand Down
39 changes: 20 additions & 19 deletions src/duckdb/src/common/row_operations/row_scatter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ using ValidityBytes = RowLayout::ValidityBytes;

template <class T>
static void TemplatedScatter(UnifiedVectorFormat &col, Vector &rows, const SelectionVector &sel, const idx_t count,
const idx_t col_offset, const idx_t col_no) {
const idx_t col_offset, const idx_t col_no, const idx_t col_count) {
auto data = UnifiedVectorFormat::GetData<T>(col);
auto ptrs = FlatVector::GetData<data_ptr_t>(rows);

Expand All @@ -34,7 +34,7 @@ static void TemplatedScatter(UnifiedVectorFormat &col, Vector &rows, const Selec
T store_value = isnull ? NullValue<T>() : data[col_idx];
Store<T>(store_value, row + col_offset);
if (isnull) {
ValidityBytes col_mask(ptrs[idx]);
ValidityBytes col_mask(ptrs[idx], col_count);
col_mask.SetInvalidUnsafe(col_no);
}
}
Expand Down Expand Up @@ -64,7 +64,7 @@ static void ComputeStringEntrySizes(const UnifiedVectorFormat &col, idx_t entry_

static void ScatterStringVector(UnifiedVectorFormat &col, Vector &rows, data_ptr_t str_locations[],
const SelectionVector &sel, const idx_t count, const idx_t col_offset,
const idx_t col_no) {
const idx_t col_no, const idx_t col_count) {
auto string_data = UnifiedVectorFormat::GetData<string_t>(col);
auto ptrs = FlatVector::GetData<data_ptr_t>(rows);

Expand All @@ -75,7 +75,7 @@ static void ScatterStringVector(UnifiedVectorFormat &col, Vector &rows, data_ptr
auto col_idx = col.sel->get_index(idx);
auto row = ptrs[idx];
if (!col.validity.RowIsValid(col_idx)) {
ValidityBytes col_mask(row);
ValidityBytes col_mask(row, col_count);
col_mask.SetInvalidUnsafe(col_no);
Store<string_t>(null, row + col_offset);
} else if (string_data[col_idx].IsInlined()) {
Expand Down Expand Up @@ -118,11 +118,12 @@ void RowOperations::Scatter(DataChunk &columns, UnifiedVectorFormat col_data[],
}

// Set the validity mask for each row before inserting data
idx_t column_count = layout.ColumnCount();
auto ptrs = FlatVector::GetData<data_ptr_t>(rows);
for (idx_t i = 0; i < count; ++i) {
auto row_idx = sel.get_index(i);
auto row = ptrs[row_idx];
ValidityBytes(row).SetAllValid(layout.ColumnCount());
ValidityBytes(row, column_count).SetAllValid(layout.ColumnCount());
}

const auto vcount = columns.size();
Expand Down Expand Up @@ -180,46 +181,46 @@ void RowOperations::Scatter(DataChunk &columns, UnifiedVectorFormat col_data[],
switch (types[col_no].InternalType()) {
case PhysicalType::BOOL:
case PhysicalType::INT8:
TemplatedScatter<int8_t>(col, rows, sel, count, col_offset, col_no);
TemplatedScatter<int8_t>(col, rows, sel, count, col_offset, col_no, column_count);
break;
case PhysicalType::INT16:
TemplatedScatter<int16_t>(col, rows, sel, count, col_offset, col_no);
TemplatedScatter<int16_t>(col, rows, sel, count, col_offset, col_no, column_count);
break;
case PhysicalType::INT32:
TemplatedScatter<int32_t>(col, rows, sel, count, col_offset, col_no);
TemplatedScatter<int32_t>(col, rows, sel, count, col_offset, col_no, column_count);
break;
case PhysicalType::INT64:
TemplatedScatter<int64_t>(col, rows, sel, count, col_offset, col_no);
TemplatedScatter<int64_t>(col, rows, sel, count, col_offset, col_no, column_count);
break;
case PhysicalType::UINT8:
TemplatedScatter<uint8_t>(col, rows, sel, count, col_offset, col_no);
TemplatedScatter<uint8_t>(col, rows, sel, count, col_offset, col_no, column_count);
break;
case PhysicalType::UINT16:
TemplatedScatter<uint16_t>(col, rows, sel, count, col_offset, col_no);
TemplatedScatter<uint16_t>(col, rows, sel, count, col_offset, col_no, column_count);
break;
case PhysicalType::UINT32:
TemplatedScatter<uint32_t>(col, rows, sel, count, col_offset, col_no);
TemplatedScatter<uint32_t>(col, rows, sel, count, col_offset, col_no, column_count);
break;
case PhysicalType::UINT64:
TemplatedScatter<uint64_t>(col, rows, sel, count, col_offset, col_no);
TemplatedScatter<uint64_t>(col, rows, sel, count, col_offset, col_no, column_count);
break;
case PhysicalType::INT128:
TemplatedScatter<hugeint_t>(col, rows, sel, count, col_offset, col_no);
TemplatedScatter<hugeint_t>(col, rows, sel, count, col_offset, col_no, column_count);
break;
case PhysicalType::UINT128:
TemplatedScatter<uhugeint_t>(col, rows, sel, count, col_offset, col_no);
TemplatedScatter<uhugeint_t>(col, rows, sel, count, col_offset, col_no, column_count);
break;
case PhysicalType::FLOAT:
TemplatedScatter<float>(col, rows, sel, count, col_offset, col_no);
TemplatedScatter<float>(col, rows, sel, count, col_offset, col_no, column_count);
break;
case PhysicalType::DOUBLE:
TemplatedScatter<double>(col, rows, sel, count, col_offset, col_no);
TemplatedScatter<double>(col, rows, sel, count, col_offset, col_no, column_count);
break;
case PhysicalType::INTERVAL:
TemplatedScatter<interval_t>(col, rows, sel, count, col_offset, col_no);
TemplatedScatter<interval_t>(col, rows, sel, count, col_offset, col_no, column_count);
break;
case PhysicalType::VARCHAR:
ScatterStringVector(col, rows, data_locations, sel, count, col_offset, col_no);
ScatterStringVector(col, rows, data_locations, sel, count, col_offset, col_no, column_count);
break;
case PhysicalType::LIST:
case PhysicalType::STRUCT:
Expand Down
14 changes: 7 additions & 7 deletions src/duckdb/src/common/sort/comparators.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ namespace duckdb {
bool Comparators::TieIsBreakable(const idx_t &tie_col, const data_ptr_t &row_ptr, const SortLayout &sort_layout) {
const auto &col_idx = sort_layout.sorting_to_blob_col.at(tie_col);
// Check if the blob is NULL
ValidityBytes row_mask(row_ptr);
ValidityBytes row_mask(row_ptr, sort_layout.column_count);
idx_t entry_idx;
idx_t idx_in_entry;
ValidityBytes::GetEntryIndex(col_idx, entry_idx, idx_in_entry);
Expand Down Expand Up @@ -195,8 +195,8 @@ int Comparators::CompareStructAndAdvance(data_ptr_t &left_ptr, data_ptr_t &right
const child_list_t<LogicalType> &types, bool valid) {
idx_t count = types.size();
// Load validity masks
ValidityBytes left_validity(left_ptr);
ValidityBytes right_validity(right_ptr);
ValidityBytes left_validity(left_ptr, types.size());
ValidityBytes right_validity(right_ptr, types.size());
left_ptr += (count + 7) / 8;
right_ptr += (count + 7) / 8;
// Initialize variables
Expand Down Expand Up @@ -235,8 +235,8 @@ int Comparators::CompareArrayAndAdvance(data_ptr_t &left_ptr, data_ptr_t &right_
}

// Load array validity masks
ValidityBytes left_validity(left_ptr);
ValidityBytes right_validity(right_ptr);
ValidityBytes left_validity(left_ptr, array_size);
ValidityBytes right_validity(right_ptr, array_size);
left_ptr += (array_size + 7) / 8;
right_ptr += (array_size + 7) / 8;

Expand Down Expand Up @@ -352,8 +352,8 @@ int Comparators::CompareListAndAdvance(data_ptr_t &left_ptr, data_ptr_t &right_p
left_ptr += sizeof(idx_t);
right_ptr += sizeof(idx_t);
// Load list validity masks
ValidityBytes left_validity(left_ptr);
ValidityBytes right_validity(right_ptr);
ValidityBytes left_validity(left_ptr, left_len);
ValidityBytes right_validity(right_ptr, right_len);
left_ptr += (left_len + 7) / 8;
right_ptr += (right_len + 7) / 8;
// Compare
Expand Down
6 changes: 3 additions & 3 deletions src/duckdb/src/common/types/column/column_data_collection.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -318,7 +318,7 @@ void ColumnDataCollection::InitializeAppend(ColumnDataAppendState &state) {

void ColumnDataCopyValidity(const UnifiedVectorFormat &source_data, validity_t *target, idx_t source_offset,
idx_t target_offset, idx_t copy_count) {
ValidityMask validity(target);
ValidityMask validity(target, STANDARD_VECTOR_SIZE);
if (target_offset == 0) {
// first time appending to this vector
// all data here is still uninitialized
Expand Down Expand Up @@ -410,7 +410,7 @@ static void TemplatedColumnDataCopy(ColumnDataMetaData &meta_data, const Unified
current_segment.offset);
auto validity_data = ColumnDataCollectionSegment::GetValidityPointerForWriting(base_ptr, OP::TypeSize());

ValidityMask result_validity(validity_data);
ValidityMask result_validity(validity_data, STANDARD_VECTOR_SIZE);
if (current_segment.count == 0) {
// first time appending to this vector
// all data here is still uninitialized
Expand Down Expand Up @@ -525,7 +525,7 @@ void ColumnDataCopy<string_t>(ColumnDataMetaData &meta_data, const UnifiedVector
auto base_ptr = segment.allocator->GetDataPointer(append_state.current_chunk_state, current_segment.block_id,
current_segment.offset);
auto validity_data = ColumnDataCollectionSegment::GetValidityPointerForWriting(base_ptr, sizeof(string_t));
ValidityMask target_validity(validity_data);
ValidityMask target_validity(validity_data, STANDARD_VECTOR_SIZE);
if (current_segment.count == 0) {
// first time appending to this vector
// all data here is still uninitialized
Expand Down
Loading

0 comments on commit 5f0a345

Please sign in to comment.