From 3243d8c644c0ed71ac9e58bc1c1f463b406561bc Mon Sep 17 00:00:00 2001 From: Ciro Spaciari Date: Wed, 5 Feb 2025 18:05:48 -0800 Subject: [PATCH] more --- src/sql/postgres.zig | 4 ++ src/sql/postgres/postgres_types.zig | 2 + src/string_immutable.zig | 37 --------------- test/js/sql/sql.test.ts | 70 +++++++++++++++++++++++++++++ 4 files changed, 76 insertions(+), 37 deletions(-) diff --git a/src/sql/postgres.zig b/src/sql/postgres.zig index cff01b8acd2927..59c3b76994c182 100644 --- a/src/sql/postgres.zig +++ b/src/sql/postgres.zig @@ -2507,6 +2507,8 @@ pub const PostgresSQLConnection = struct { .macaddr_array, .inet_array, .aclitem_array, + .pg_database_array, + .pg_database_array2, => { // this is also a string until we reach "," or "}" but a single word string like Bun var current_idx: usize = 0; @@ -2909,6 +2911,8 @@ pub const PostgresSQLConnection = struct { .inet_array, .aclitem_array, .tid_array, + .pg_database_array, + .pg_database_array2, // numeric array types .int8_array, .int2_array, diff --git a/src/sql/postgres/postgres_types.zig b/src/sql/postgres/postgres_types.zig index 17d7249fae9478..bd0787199fcbd1 100644 --- a/src/sql/postgres/postgres_types.zig +++ b/src/sql/postgres/postgres_types.zig @@ -175,6 +175,8 @@ pub const Tag = enum(short) { // Not really sure what this is. jsonpath = 4072, jsonpath_array = 4073, + // another oid for pg_database + pg_database_array2 = 10052, _, pub fn tagName(this: Tag) ?[]const u8 { diff --git a/src/string_immutable.zig b/src/string_immutable.zig index 5097f71947bc1f..365d1acda0bc56 100644 --- a/src/string_immutable.zig +++ b/src/string_immutable.zig @@ -3761,43 +3761,6 @@ pub fn utf16EqlString(text: []const u16, str: string) bool { return j == str.len; } -pub fn encodeUTF8(cp: u32, buffer: []u8) ![]const u8 { - const HEADER_CONT_BYTE: u8 = 0b10000000; - const HEADER_2BYTE: u8 = 0b11000000; - const HEADER_3BYTE: u8 = 0b11100000; - const HEADER_4BYTE: u8 = 0b11100000; - - return switch (cp) { - 0x0...0x7F => { - if (buffer.len < 1) return error.BufferTooSmall; - buffer[0] = @intCast(cp); - return buffer[0..1]; - }, - 0x80...0x7FF => { - if (buffer.len < 2) return error.BufferTooSmall; - buffer[0] = HEADER_2BYTE | @as(u8, @intCast(cp >> 6)); - buffer[1] = HEADER_CONT_BYTE | @as(u8, @intCast(cp & 0b00111111)); - return buffer[0..2]; - }, - 0x800...0xFFFF => { - if (buffer.len < 3) return error.BufferTooSmall; - buffer[0] = HEADER_3BYTE | @as(u8, @intCast(cp >> 12)); - buffer[1] = HEADER_CONT_BYTE | @as(u8, @intCast((cp >> 6) & 0b00111111)); - buffer[2] = HEADER_CONT_BYTE | @as(u8, @intCast(cp & 0b00111111)); - return buffer[0..3]; - }, - 0x10000...0x10FFFF => { - if (buffer.len < 4) return error.BufferTooSmall; - buffer[0] = HEADER_4BYTE | @as(u8, @intCast(cp >> 18)); - buffer[1] = HEADER_CONT_BYTE | @as(u8, @intCast((cp >> 12) & 0b00111111)); - buffer[2] = HEADER_CONT_BYTE | @as(u8, @intCast((cp >> 6) & 0b00111111)); - buffer[3] = HEADER_CONT_BYTE | @as(u8, @intCast(cp & 0b00111111)); - return buffer[0..4]; - }, - else => error.InvalidCodepoint, - }; -} - pub fn encodeUTF8Comptime(comptime cp: u32) []const u8 { const HEADER_CONT_BYTE: u8 = 0b10000000; const HEADER_2BYTE: u8 = 0b11000000; diff --git a/test/js/sql/sql.test.ts b/test/js/sql/sql.test.ts index bc6a6c112f3df1..8834fefc739dc6 100644 --- a/test/js/sql/sql.test.ts +++ b/test/js/sql/sql.test.ts @@ -10501,4 +10501,74 @@ if (isDockerEnabled()) { expect(result[0].large_unicode[0]["长文本"].length).toBe(1000); }); }); + + describe("pg_database[] Array type", () => { + test("pg_database[] - empty array", async () => { + await using sql = postgres({ ...options, max: 1 }); + const result = await sql`SELECT ARRAY[]::pg_database[] as empty_array`; + expect(result[0].empty_array).toEqual([]); + }); + + test("pg_database[] - system databases", async () => { + await using sql = postgres({ ...options, max: 1 }); + const result = await sql`SELECT array_agg(d.*)::pg_database[] FROM pg_database d;`; + expect(result[0].array_agg[0]).toContain("(5,postgres,10,6,c,f,t,-1,717,1,1663,en_US.utf8,en_US.utf8,,2.36,)"); + }); + + test("pg_database[] - null values", async () => { + await using sql = postgres({ ...options, max: 1 }); + const result = await sql` + SELECT ARRAY[ + NULL, + '(5,postgres,10,6,c,f,t,-1,716,1,1663,C,C,,,)'::pg_database, + NULL + ]::pg_database[] as array_with_nulls + `; + expect(result[0].array_with_nulls[0]).toBeNull(); + expect(result[0].array_with_nulls[1]).toBe("(5,postgres,10,6,c,f,t,-1,716,1,1663,C,C,,,)"); + expect(result[0].array_with_nulls[2]).toBeNull(); + }); + + test("pg_database[] - null array", async () => { + await using sql = postgres({ ...options, max: 1 }); + const result = await sql`SELECT NULL::pg_database[] as null_array`; + expect(result[0].null_array).toBeNull(); + }); + }); + + describe("aclitem[] Array type", () => { + test("aclitem[] - empty array", async () => { + await using sql = postgres({ ...options, max: 1 }); + const result = await sql`SELECT ARRAY[]::aclitem[] as empty_array`; + expect(result[0].empty_array).toEqual([]); + }); + + test("aclitem[] system databases", async () => { + await using sql = postgres({ ...options, max: 1 }); + const result = await sql`SELECT datacl FROM pg_database;`; + expect(result[0].datacl).toBeNull(); + expect(result[result.length - 2].datacl).toEqual(["=c/postgres", "postgres=CTc/postgres"]); + expect(result[result.length - 1].datacl).toEqual(["=Tc/bun_sql_test", "bun_sql_test=CTc/bun_sql_test"]); + }); + + test("aclitem[] - null values", async () => { + await using sql = postgres({ ...options, max: 1 }); + const result = await sql` + SELECT ARRAY[ + NULL, + '=c/postgres'::aclitem, + NULL + ]::aclitem[] as array_with_nulls + `; + expect(result[0].array_with_nulls[0]).toBeNull(); + expect(result[0].array_with_nulls[1]).toBe("=c/postgres"); + expect(result[0].array_with_nulls[2]).toBeNull(); + }); + + test("aclitem[] - null array", async () => { + await using sql = postgres({ ...options, max: 1 }); + const result = await sql`SELECT NULL::aclitem[] as null_array`; + expect(result[0].null_array).toBeNull(); + }); + }); }