-
Notifications
You must be signed in to change notification settings - Fork 17
/
Copy pathdeploy.js
129 lines (106 loc) · 3.1 KB
/
deploy.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
const program = require("commander");
const promptly = require("promptly");
const AWS = require("aws-sdk");
const glob = require("glob-promise");
const fs = require("fs");
const zlib = require("zlib");
const path = require("path");
const { version } = require("../package.json");
program
.version(version)
.arguments("<src> <bucket> <dest>")
.parse(process.argv);
async function uploadFiles(s3Client, srcDir, bucket, destDir) {
const files = await glob(path.join(srcDir, "**", "*"), {
absolute: true,
nodir: true
});
const rootDir = path.resolve(srcDir);
const uploadTasks = files.map(filePath =>
createUploadTask(s3Client, rootDir, filePath, bucket, destDir)
);
const uploadPromises = uploadTasks.map((req, idx) =>
req
.promise()
.then(() => {
console.log(`Successfully uploaded: ${files[idx]}`);
})
.catch(err => {
console.error(`Error uploading: ${files[idx]}`);
throw err;
})
);
await Promise.all(uploadPromises);
}
function createUploadTask(s3Client, rootDir, filePath, bucket, destDir) {
const extension = path.extname(filePath);
const fileStream = createFileStream(filePath, extension);
const key = destDir + path.relative(rootDir, filePath).replace(/\\/g, "/");
const req = s3Client.upload({
ACL: "public-read",
Body: fileStream,
Bucket: bucket,
CacheControl: getCacheControl(filePath),
ContentEncoding: shouldGzip(extension) ? "gzip" : undefined,
ContentType: getContentType(filePath, extension),
Key: key
});
return req;
}
const GZIPPED_EXTENSIONS = [".json", ".gltf", ".bin"];
function shouldGzip(extension) {
return GZIPPED_EXTENSIONS.indexOf(extension) !== -1;
}
function createFileStream(filePath, extension) {
const readStream = fs.createReadStream(filePath);
if (shouldGzip(extension)) {
const gzipStream = zlib.createGzip();
return readStream.pipe(gzipStream);
}
return readStream;
}
function getContentType(filePath, extension) {
switch (extension) {
case ".json":
return "application/json";
case ".gltf":
return "model/gltf+json";
case ".bin":
return "application/octet-stream";
case ".png":
return "image/png";
case ".jpeg":
return "image/jpeg";
case ".jpg":
return "image/jpeg";
default:
throw `Unsupported file type ${extension} for ${filePath}`;
}
}
function getCacheControl(filePath) {
if (filePath.endsWith(".json")) {
return "no-cache, no-store, must-revalidate";
}
return "public, max-age=31536000";
}
(async function execute() {
const normalizedPath = path.normalize(program.args[0]);
const bucket = program.args[1];
const destPath = program.args[2];
if (
await promptly.confirm(
`Are you sure you wish to deploy to ${bucket}? (y/n)`
)
) {
const s3 = new AWS.S3({
accessKeyId: AWS.config.credentials.accessKeyId,
secretAccessKey: AWS.config.credentials.secretAccessKey
});
await uploadFiles(s3, normalizedPath, bucket, destPath);
console.log("Done!");
process.exit(0);
}
})().catch(e => {
console.error(e);
process.exit(1);
});