Deploy command
This basically works, but some things can be improved or need to be looked at, cf the TODOs in the code. Co-authored-by: Armaël Guéneau <armael.gueneau@ens-lyon.org> Co-authored-by: Quentin Dufour <quentin@deuxfleurs.fr> Reviewed-on: Deuxfleurs/dfl#9 Co-authored-by: Armael <armael@noreply.localhost> Co-committed-by: Armael <armael@noreply.localhost>
This commit is contained in:
parent
15c43196ad
commit
5c63a408f6
5 changed files with 1877 additions and 337 deletions
2
auth.ts
2
auth.ts
|
@ -1,4 +1,4 @@
|
|||
import { Configuration, WebsiteApi, ResponseError } from "guichet-sdk-ts";
|
||||
import { Configuration, WebsiteApi } from "guichet-sdk-ts";
|
||||
import { read } from 'read';
|
||||
import path from 'node:path';
|
||||
import fs from 'node:fs/promises';
|
||||
|
|
147
deploy.ts
Normal file
147
deploy.ts
Normal file
|
@ -0,0 +1,147 @@
|
|||
import fs from "fs";
|
||||
import path from "path";
|
||||
import mime from "mime";
|
||||
import { WebsiteApi } from "guichet-sdk-ts";
|
||||
import {
|
||||
S3Client,
|
||||
ListObjectsV2Command,
|
||||
DeleteObjectsCommand,
|
||||
DeleteObjectsCommandOutput,
|
||||
} from "@aws-sdk/client-s3";
|
||||
import { Upload } from "@aws-sdk/lib-storage";
|
||||
import { openApiConf } from "./auth";
|
||||
|
||||
// Walks through the local directory at path `dir`, and for each file it contains, returns :
|
||||
// - `localPath`: its path on the local filesystem (includes `dir`). On windows, this path
|
||||
// will typically use `\` as separator.
|
||||
// - `s3Path`: an equivalent path as we would store it in an S3 bucket, using '/' as separator.
|
||||
// This path includes `s3Prefix` as a prefix if provided. If `s3Prefix` is null, `s3Path`
|
||||
// is relative to the root (of the form "a/b/c", instead of "/a/b/c" if `s3Prefix` is "").
|
||||
async function getLocalFiles(dir: string, s3Prefix: string | null): Promise<{ localPath: string, s3Path: string}[]> {
|
||||
const entries = await fs.promises.readdir(dir, { withFileTypes: true });
|
||||
const files = await Promise.all(entries.map(entry => {
|
||||
const localPath = path.join(dir, entry.name);
|
||||
const s3Path = s3Prefix ? s3Prefix + "/" + entry.name : entry.name;
|
||||
if (entry.isDirectory()) {
|
||||
return getLocalFiles(localPath, s3Path)
|
||||
} else {
|
||||
return Promise.resolve([{ localPath, s3Path }])
|
||||
}
|
||||
}));
|
||||
return files.flat()
|
||||
}
|
||||
|
||||
async function getBucketFiles(client: S3Client, Bucket: string): Promise<string[]> {
|
||||
const files = [];
|
||||
let done = false;
|
||||
let cmd = new ListObjectsV2Command({ Bucket });
|
||||
while (!done) {
|
||||
const resp = await client.send(cmd);
|
||||
if (resp.$metadata.httpStatusCode != 200) {
|
||||
// TODO: better error handling?
|
||||
console.log(resp);
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
for (var item of resp.Contents!) {
|
||||
files.push(item.Key!)
|
||||
}
|
||||
|
||||
if (resp.NextContinuationToken) {
|
||||
cmd = new ListObjectsV2Command({
|
||||
Bucket,
|
||||
ContinuationToken: resp.NextContinuationToken
|
||||
})
|
||||
} else {
|
||||
done = true
|
||||
}
|
||||
}
|
||||
return files
|
||||
}
|
||||
|
||||
async function uploadFile(client: S3Client, Bucket: string, Key: string, Body: any) {
|
||||
// use `path.posix` because `Key` is a path in a bucket that uses `/` as separator.
|
||||
let ContentType = mime.getType(path.posix.extname(Key)) ?? undefined;
|
||||
// add charset=utf-8 by default on text files (TODO: allow the user to override this)
|
||||
if (ContentType && ContentType.startsWith("text/")) {
|
||||
ContentType = ContentType + "; charset=utf-8";
|
||||
}
|
||||
const parallelUpload = new Upload({ client, params: { Bucket, Key, Body, ContentType } });
|
||||
parallelUpload.on("httpUploadProgress", progress => {
|
||||
process.stdout.write("Sent " + progress.Key);
|
||||
if (! (progress.loaded == progress.total && progress.part == 1)) {
|
||||
process.stdout.write(" (" + progress.loaded + "/" + progress.total + ")");
|
||||
}
|
||||
process.stdout.write("\n")
|
||||
});
|
||||
await parallelUpload.done();
|
||||
}
|
||||
|
||||
async function deleteFiles(client: S3Client, Bucket: string, files: string[]): Promise<DeleteObjectsCommandOutput | null> {
|
||||
if (files.length == 0) {
|
||||
return null
|
||||
}
|
||||
return await client.send(new DeleteObjectsCommand({
|
||||
Bucket,
|
||||
Delete: {
|
||||
Objects: files.map(f => { return { Key: f }}),
|
||||
},
|
||||
}));
|
||||
}
|
||||
|
||||
export async function deploy(vhost: string, localFolder: string) {
|
||||
const conf = await openApiConf();
|
||||
|
||||
// Get paths of the local files to deploy
|
||||
const localFiles = await getLocalFiles(localFolder, "").catch(err => {
|
||||
if (err.errno = -2) {
|
||||
console.log(`Error: directory '${localFolder}' does not exist`);
|
||||
} else {
|
||||
console.log(err);
|
||||
}
|
||||
process.exit(1)
|
||||
});
|
||||
|
||||
// Get website info from guichet (bucket name and keys)
|
||||
const api = new WebsiteApi(conf);
|
||||
let vhostInfo = await api.getWebsite({ vhost }).catch(err => {
|
||||
if (err.response.status == 404) {
|
||||
console.log(`Error: website '${vhost}' does not exist`);
|
||||
} else {
|
||||
console.log(err);
|
||||
}
|
||||
process.exit(1)
|
||||
});
|
||||
|
||||
// List the files currently stored in the bucket
|
||||
const s3client = new S3Client({
|
||||
endpoint: "https://garage.deuxfleurs.fr",
|
||||
region: "garage",
|
||||
forcePathStyle: true,
|
||||
credentials: {
|
||||
accessKeyId: vhostInfo.accessKeyId!,
|
||||
secretAccessKey: vhostInfo.secretAccessKey!,
|
||||
},
|
||||
});
|
||||
const Bucket = vhostInfo.vhost!.name!;
|
||||
const remoteFiles = await getBucketFiles(s3client, Bucket);
|
||||
|
||||
// Delete files that are present in the bucket but not locally.
|
||||
// Do this before sending the new files to avoid hitting the size quota
|
||||
// unnecessarily.
|
||||
const resp = await deleteFiles(
|
||||
s3client,
|
||||
Bucket,
|
||||
remoteFiles.filter(f => !localFiles.find(({ s3Path }) => s3Path == f))
|
||||
);
|
||||
if (resp && resp!.$metadata.httpStatusCode != 200) {
|
||||
// TODO: better error handling?
|
||||
console.log(resp);
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
// Upload the local files into the bucket
|
||||
for (var { localPath, s3Path } of localFiles) {
|
||||
uploadFile(s3client, Bucket, s3Path,fs.createReadStream(localPath))
|
||||
}
|
||||
}
|
5
index.ts
5
index.ts
|
@ -1,5 +1,6 @@
|
|||
import { program } from "commander";
|
||||
import { login } from "./auth";
|
||||
import { deploy } from "./deploy";
|
||||
import { vhostsList } from "./vhosts";
|
||||
|
||||
program
|
||||
|
@ -20,8 +21,6 @@ program.command('deploy')
|
|||
.description('Deploy your website')
|
||||
.argument('<vhost>', 'selected vhost')
|
||||
.argument('<local_folder>', 'your local folder')
|
||||
.action((_str, _options) => {
|
||||
console.log("todo deploy");
|
||||
});
|
||||
.action(deploy)
|
||||
|
||||
program.parse();
|
||||
|
|
2057
package-lock.json
generated
2057
package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -13,9 +13,12 @@
|
|||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "^3.750.0",
|
||||
"@aws-sdk/lib-storage": "^3.750.0",
|
||||
"@types/node": "^22.13.5",
|
||||
"commander": "^13.1.0",
|
||||
"guichet-sdk-ts": "git+https://git.deuxfleurs.fr/Deuxfleurs/guichet-sdk-ts",
|
||||
"mime": "^4.0.6",
|
||||
"read": "^4.1.0",
|
||||
"tsx": "^4.19.3"
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue