Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(s3) support supabase s3 #17163

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/env_loader.zig
Original file line number Diff line number Diff line change
Expand Up @@ -144,9 +144,9 @@ pub const Loader = struct {
region = region_;
}
if (this.get("S3_ENDPOINT")) |endpoint_| {
endpoint = bun.URL.parse(endpoint_).host;
endpoint = bun.URL.parse(endpoint_).hostWithPath();
} else if (this.get("AWS_ENDPOINT")) |endpoint_| {
endpoint = bun.URL.parse(endpoint_).host;
endpoint = bun.URL.parse(endpoint_).hostWithPath();
}
if (this.get("S3_BUCKET")) |bucket_| {
bucket = bucket_;
Expand Down
40 changes: 24 additions & 16 deletions src/s3/credentials.zig
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ pub const S3Credentials = struct {
new_credentials._endpointSlice = str.toUTF8(bun.default_allocator);
const endpoint = new_credentials._endpointSlice.?.slice();
const url = bun.URL.parse(endpoint);
const normalized_endpoint = url.host;
const normalized_endpoint = url.hostWithPath();
if (normalized_endpoint.len > 0) {
new_credentials.credentials.endpoint = normalized_endpoint;

Expand Down Expand Up @@ -524,7 +524,29 @@ pub const S3Credentials = struct {
var bucket_buffer: [63]u8 = undefined;
bucket = encodeURIComponent(bucket, &bucket_buffer, false) catch return error.InvalidPath;
path = encodeURIComponent(path, &path_buffer, false) catch return error.InvalidPath;
const normalizedPath = std.fmt.bufPrint(&normalized_path_buffer, "/{s}/{s}", .{ bucket, path }) catch return error.InvalidPath;
// Default to https. Only use http if they explicit pass "http://" as the endpoint.
const protocol = if (this.insecure_http) "http" else "https";

// detect service name and host from region or endpoint
var endpoint = this.endpoint;
var extra_path: []const u8 = "";
const host = brk_host: {
if (this.endpoint.len > 0) {
if (this.endpoint.len >= 2048) return error.InvalidEndpoint;
var host = this.endpoint;
if (bun.strings.indexOf(this.endpoint, "/")) |index| {
host = this.endpoint[0..index];
extra_path = this.endpoint[index..];
}
// only the host part is needed here
break :brk_host try bun.default_allocator.dupe(u8, host);
} else {
endpoint = try std.fmt.allocPrint(bun.default_allocator, "s3.{s}.amazonaws.com", .{region});
break :brk_host endpoint;
}
};
errdefer bun.default_allocator.free(host);
const normalizedPath = std.fmt.bufPrint(&normalized_path_buffer, "{s}/{s}/{s}", .{ extra_path, bucket, path }) catch return error.InvalidPath;

const date_result = getAMZDate(bun.default_allocator);
const amz_date = date_result.date;
Expand Down Expand Up @@ -595,22 +617,8 @@ pub const S3Credentials = struct {
}
};

// Default to https. Only use http if they explicit pass "http://" as the endpoint.
const protocol = if (this.insecure_http) "http" else "https";

// detect service name and host from region or endpoint
const host = brk_host: {
if (this.endpoint.len > 0) {
if (this.endpoint.len >= 512) return error.InvalidEndpoint;
break :brk_host try bun.default_allocator.dupe(u8, this.endpoint);
} else {
break :brk_host try std.fmt.allocPrint(bun.default_allocator, "s3.{s}.amazonaws.com", .{region});
}
};
const service_name = "s3";

errdefer bun.default_allocator.free(host);

const aws_content_hash = if (content_hash) |hash| hash else ("UNSIGNED-PAYLOAD");
var tmp_buffer: [4096]u8 = undefined;

Expand Down
14 changes: 14 additions & 0 deletions src/url.zig
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,20 @@ pub const URL = struct {
pub fn isFile(this: *const URL) bool {
return strings.eqlComptime(this.protocol, "file");
}
/// host + path without the ending slash, protocol, searchParams and hash
pub fn hostWithPath(this: *const URL) []const u8 {
if (this.host.len > 0) {
if (this.path.len > 1) {
const end = @intFromPtr(this.path.ptr) + this.path.len;
const start = @intFromPtr(this.host.ptr);
const len: usize = end - start - (if (bun.strings.endsWithComptime(this.path, "/")) @as(usize, 1) else @as(usize, 0));
const ptr: [*]u8 = @ptrFromInt(start);
return ptr[0..len];
}
return this.host;
}
return "";
}

pub fn isBlob(this: *const URL) bool {
return this.href.len == JSC.WebCore.ObjectURLRegistry.specifier_len and strings.hasPrefixComptime(this.href, "blob:");
Expand Down
115 changes: 83 additions & 32 deletions test/js/bun/s3/s3.test.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { describe, expect, it, beforeAll, afterAll } from "bun:test";
import { describe, expect, it, beforeEach, afterEach } from "bun:test";
import { bunExe, bunEnv, getSecret, tempDirWithFiles, isLinux } from "harness";
import { randomUUID } from "crypto";
import { S3Client, s3 as defaultS3, file, which } from "bun";
Expand Down Expand Up @@ -30,6 +30,14 @@ const allCredentials = [
bucket: getSecret("S3_R2_BUCKET"),
service: "R2" as string,
},
{
accessKeyId: getSecret("S3_SUPABASE_ACCESS_KEY"),
secretAccessKey: getSecret("S3_SUPABASE_SECRET_KEY"),
endpoint: getSecret("S3_SUPABASE_ENDPOINT"),
bucket: getSecret("S3_SUPABASE_BUCKET"),
region: getSecret("S3_SUPABASE_REGION"),
service: "supabase" as string,
},
];

if (isDockerEnabled()) {
Expand Down Expand Up @@ -107,22 +115,31 @@ for (let credentials of allCredentials) {
describe(bucketInName ? "bucket in path" : "bucket in options", () => {
var tmp_filename: string;
const options = bucketInName ? s3Options : { ...s3Options, bucket: S3Bucket };
beforeAll(async () => {
beforeEach(async () => {
// await a little bit so we dont change the filename before deleting it
await Bun.sleep(10);
tmp_filename = bucketInName ? `s3://${S3Bucket}/${randomUUID()}` : `s3://${randomUUID()}`;
const result = await fetch(tmp_filename, {
method: "PUT",
body: "Hello Bun!",
s3: options,
});
expect(result.status).toBe(200);
await result.text();
});

afterAll(async () => {
const result = await fetch(tmp_filename, {
method: "DELETE",
s3: options,
});
expect(result.status).toBe(204);
afterEach(async () => {
try {
const result = await fetch(tmp_filename, {
method: "DELETE",
s3: options,
});
expect([204, 200]).toContain(result.status);
await result.text();
} catch (e) {
// if error with NoSuchKey, it means the file does not exist and its fine
expect(e?.code || e).toBe("NoSuchKey");
}
});

it("should download file via fetch GET", async () => {
Expand Down Expand Up @@ -208,22 +225,30 @@ for (let credentials of allCredentials) {

describe("Bun.S3Client", () => {
describe(bucketInName ? "bucket in path" : "bucket in options", () => {
const tmp_filename = bucketInName ? `${S3Bucket}/${randomUUID()}` : `${randomUUID()}`;
let tmp_filename: string;
const options = bucketInName ? null : { bucket: S3Bucket };

var bucket = S3(s3Options);
beforeAll(async () => {
beforeEach(async () => {
await Bun.sleep(10);
tmp_filename = bucketInName ? `${S3Bucket}/${randomUUID()}` : `${randomUUID()}`;
const file = bucket.file(tmp_filename, options);
await file.write("Hello Bun!");
});

afterAll(async () => {
const file = bucket.file(tmp_filename, options);
await file.unlink();
afterEach(async () => {
try {
const file = bucket.file(tmp_filename, options);
await file.unlink();
} catch (e) {
// if error with NoSuchKey, it means the file does not exist and its fine
expect(e?.code || e).toBe("NoSuchKey");
}
});

it("should download file via Bun.s3().text()", async () => {
const file = bucket.file(tmp_filename, options);
await file.write("Hello Bun!");
const text = await file.text();
expect(text).toBe("Hello Bun!");
});
Expand Down Expand Up @@ -357,16 +382,23 @@ for (let credentials of allCredentials) {

describe("Bun.file", () => {
describe(bucketInName ? "bucket in path" : "bucket in options", () => {
const tmp_filename = bucketInName ? `s3://${S3Bucket}/${randomUUID()}` : `s3://${randomUUID()}`;
let tmp_filename: string;
const options = bucketInName ? s3Options : { ...s3Options, bucket: S3Bucket };
beforeAll(async () => {
beforeEach(async () => {
await Bun.sleep(10);
tmp_filename = bucketInName ? `s3://${S3Bucket}/${randomUUID()}` : `s3://${randomUUID()}`;
const s3file = file(tmp_filename, options);
await s3file.write("Hello Bun!");
});

afterAll(async () => {
const s3file = file(tmp_filename, options);
await s3file.unlink();
afterEach(async () => {
try {
const s3file = file(tmp_filename, options);
await s3file.unlink();
} catch (e) {
// if error with NoSuchKey, it means the file does not exist and its fine
expect(e?.code || e).toBe("NoSuchKey");
}
});

it("should download file via Bun.file().text()", async () => {
Expand Down Expand Up @@ -450,16 +482,23 @@ for (let credentials of allCredentials) {

describe("Bun.s3", () => {
describe(bucketInName ? "bucket in path" : "bucket in options", () => {
const tmp_filename = bucketInName ? `${S3Bucket}/${randomUUID()}` : `${randomUUID()}`;
let tmp_filename: string;
const options = bucketInName ? s3Options : { ...s3Options, bucket: S3Bucket };
beforeAll(async () => {
beforeEach(async () => {
await Bun.sleep(10);
tmp_filename = bucketInName ? `${S3Bucket}/${randomUUID()}` : `${randomUUID()}`;
const s3file = s3(tmp_filename, options);
await s3file.write("Hello Bun!");
});

afterAll(async () => {
const s3file = s3(tmp_filename, options);
await s3file.unlink();
afterEach(async () => {
try {
const s3file = s3(tmp_filename, options);
await s3file.unlink();
} catch (e) {
// if error with NoSuchKey, it means the file does not exist and its fine
expect(e?.code || e).toBe("NoSuchKey");
}
});

it("should download file via Bun.s3().text()", async () => {
Expand Down Expand Up @@ -553,10 +592,20 @@ for (let credentials of allCredentials) {
}, 10_000);

describe("readable stream", () => {
afterAll(async () => {
afterEach(async () => {
await Promise.all([
s3(tmp_filename + "-readable-stream", options).unlink(),
s3(tmp_filename + "-readable-stream-big", options).unlink(),
s3(tmp_filename + "-readable-stream", options)
.unlink()
.catch(e => {
// if error with NoSuchKey, it means the file does not exist and its fine
expect(e?.code || e).toBe("NoSuchKey");
}),
s3(tmp_filename + "-readable-stream-big", options)
.unlink()
.catch(e => {
// if error with NoSuchKey, it means the file does not exist and its fine
expect(e?.code || e).toBe("NoSuchKey");
}),
]);
});
it("should work with small files", async () => {
Expand All @@ -576,7 +625,6 @@ for (let credentials of allCredentials) {
}
expect(bytes).toBe(10);
expect(Buffer.concat(chunks)).toEqual(Buffer.from("Hello Bun!"));
s3file.delete();
});
it("should work with large files ", async () => {
const s3file = s3(tmp_filename + "-readable-stream-big", options);
Expand All @@ -602,14 +650,14 @@ for (let credentials of allCredentials) {
const SHA1_2 = Bun.SHA1.hash(bigishPayload, "hex");
expect(SHA1).toBe(SHA1_2);
}
s3file.delete();
}, 30_000);
});
});
});
}
describe("special characters", () => {
it("should allow special characters in the path", async () => {
// supabase will throw InvalidKey
it.skipIf(credentials.service === "supabase")("should allow special characters in the path", async () => {
const options = { ...s3Options, bucket: S3Bucket };
const s3file = s3(`🌈🦄${randomUUID()}.txt`, options);
await s3file.write("Hello Bun!");
Expand Down Expand Up @@ -716,7 +764,10 @@ for (let credentials of allCredentials) {
const dir = tempDirWithFiles("fsr", {
"hello.txt": "",
});
await Bun.write(s3("test.txt", { ...s3Options, bucket: S3Bucket }), file(path.join(dir, "hello.txt")));
const tmp_filename = `${randomUUID()}.txt`;

await Bun.write(s3(tmp_filename, { ...s3Options, bucket: S3Bucket }), file(path.join(dir, "hello.txt")));
await s3(tmp_filename, { ...s3Options, bucket: S3Bucket }).unlink();
});
it("Bun.write(s3file, file) should throw if the file does not exist", async () => {
try {
Expand All @@ -739,7 +790,7 @@ for (let credentials of allCredentials) {
);
expect.unreachable();
} catch (e: any) {
expect(["AccessDenied", "NoSuchBucket"]).toContain(e?.code);
expect(["AccessDenied", "NoSuchBucket", "NoSuchKey"]).toContain(e?.code);
expect(e?.path).toBe("do-not-exist.txt");
expect(e?.name).toBe("S3Error");
}
Expand Down Expand Up @@ -941,7 +992,7 @@ for (let credentials of allCredentials) {
await Promise.all(
[s3, (path, ...args) => S3(...args).file(path)].map(async fn => {
let options = { ...s3Options, bucket: S3Bucket };
options.endpoint = Buffer.alloc(1024, "a").toString();
options.endpoint = Buffer.alloc(2048, "a").toString();

try {
const s3file = fn(randomUUID(), options);
Expand Down