Skip to content

Commit b6df242

Browse files
vrtndroyvardhan
authored andcommitted
single file
1 parent 1c69484 commit b6df242

File tree

1 file changed

+37
-4
lines changed

1 file changed

+37
-4
lines changed

src/cli/backup.js

Lines changed: 37 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,47 @@
11
const dotenv = require("dotenv");
2+
dotenv.config();
3+
24
const dayjs = require("dayjs");
35
const zlib = require("zlib");
4-
const { sql } = require("../utils/db");
5-
const { storeBackup } = require("../utils/s3");
6-
7-
dotenv.config();
6+
const postgres = require("postgres");
7+
const { S3Client, PutObjectCommand } = require("@aws-sdk/client-s3");
8+
9+
const connectionString =
10+
process.env.DB_URL ||
11+
`postgresql://${process.env.PSQL_USERNAME}:${process.env.PSQL_PW}@${process.env.PSQL_URL}:5433/postgres`;
12+
13+
const sql = postgres(connectionString, {
14+
idle_timeout: 120,
15+
max_lifetime: 60 * 30,
16+
max: 3,
17+
connect_timeout: 30,
18+
keep_alive: true,
19+
});
20+
21+
const backupClient =
22+
process.env.BB_AWS_S3_ENDPOINT && process.env.BB_AWS_REGION
23+
? new S3Client({
24+
endpoint: process.env.BB_AWS_S3_ENDPOINT,
25+
region: process.env.BB_AWS_REGION,
26+
forcePathStyle: true,
27+
})
28+
: null;
29+
30+
async function storeBackup(key, body) {
31+
const params = {
32+
Bucket: backupBucket,
33+
Key: key,
34+
Body: body,
35+
};
36+
37+
const command = new PutObjectCommand(params);
38+
await backupClient.send(command);
39+
}
840

941
const BACKUP_PREFIX = process.env.BACKUP_PREFIX || "transactions/daily-backup";
1042
const DAY_BATCH_SIZE = Number(process.env.DAY_BATCH_SIZE || "100000");
1143

44+
1245
async function backupSingleDay(dayStart) {
1346
const start = dayStart.startOf("day").toISOString();
1447
const end = dayStart.add(1, "day").startOf("day").toISOString();

0 commit comments

Comments
 (0)