Skip to content

Commit

Permalink
remove vacuum
Browse files Browse the repository at this point in the history
  • Loading branch information
AvidDabbler committed Feb 5, 2024
1 parent 6a87038 commit 3fcc5fb
Showing 1 changed file with 3 additions and 85 deletions.
88 changes: 3 additions & 85 deletions drizzle/backup.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
import fs from 'fs'
import sqlite3 from 'sqlite3'

import cron from 'node-cron';

Check failure on line 2 in drizzle/backup.ts

View workflow job for this annotation

GitHub Actions / ⬣ ESLint

There should be at least one empty line between import groups
import { envConfig } from '~/config.server';

Check failure on line 3 in drizzle/backup.ts

View workflow job for this annotation

GitHub Actions / ⬣ ESLint

There should be at least one empty line between import groups
import { S3Client, CopyObjectCommand, PutObjectCommand } from '@aws-sdk/client-s3'
import path from 'path';
import { S3Client, CopyObjectCommand } from '@aws-sdk/client-s3'

Check failure on line 4 in drizzle/backup.ts

View workflow job for this annotation

GitHub Actions / ⬣ ESLint

`@aws-sdk/client-s3` import should occur before import of `node-cron`

const s3Client = new S3Client({
region: "us-east-1", // Replace with your AWS region
Expand All @@ -28,86 +26,6 @@ export async function backupS3Object(bucket: string, sourceKey: string, destinat
}
}

const vacuumDb = (dbPath: string, backupPath: string) => {
// Open the SQLite database
const db = new sqlite3.Database(dbPath, sqlite3.OPEN_READWRITE, (err) => {
if (err) {
console.error('Error opening the database:', err.message);
return;
}

// Perform VACUUM operation
db.exec(`ATTACH DATABASE '${backupPath}' AS backup;`, (attachErr) => {
if (attachErr) {
console.error('Error attaching backup database:', attachErr.message);
db.close();
return;
}

db.exec('VACUUM INTO backup.main;', (vacuumErr) => {
if (vacuumErr) {
console.error('Error performing VACUUM:', vacuumErr.message);
} else {
console.log('VACUUM operation successful.');

// If needed, you can also copy the backup to another location
fs.copyFileSync(`${backupPath}.main`, `${backupPath}`);

console.log('Backup saved to:', backupPath);
}

// Detach the backup database
db.exec('DETACH DATABASE backup;', (detachErr) => {
if (detachErr) {
console.error('Error detaching backup database:', detachErr.message);
}

// Close the main database
db.close((closeErr) => {
if (closeErr) {
console.error('Error closing the database:', closeErr.message);
}
});
});
});
});
});
}

async function uploadFilesToS3(directory: string, prefix: string) {
// Read the contents of the local directory
const files = fs.readdirSync(directory);

// Upload each file to S3
for (const file of files) {
const filePath = path.join(directory, file);

// Skip directories
if (fs.statSync(filePath).isDirectory()) {
continue;
}

// Read the file content
const fileContent = fs.readFileSync(filePath);

// Create S3 PutObjectCommand with the specified prefix
const uploadParams = {
Bucket: envConfig.S3_BUCKET,
Key: `${prefix}/${file}`, // Include the prefix in the S3 key
Body: fileContent,
};

// Upload the file to S3
try {
const uploadResult = await s3Client.send(new PutObjectCommand(uploadParams));
console.log(`File uploaded successfully: ${file} - ETag: ${uploadResult.ETag}`);
} catch (error) {
console.error(`Error uploading file ${file} to S3:`, error);
}
}
}

cron.schedule('* * * * *', () => {
vacuumDb('/drizzle/data.db', 'drizzle/backup')
uploadFilesToS3('drizzle/data.db', '/db-backup/data.db')
backupS3Object(envConfig.S3_BUCKET, 'data.db', '/drizzle/data.db')
})

0 comments on commit 3fcc5fb

Please sign in to comment.