This repository has been archived by the owner on Jan 3, 2022. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
index.js
109 lines (95 loc) · 3.39 KB
/
index.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
/*
Exports all databases and tables. Stores in folders according to to "date" in a similar format to npm logs
Dates are UTC but do not contain the timezone offset.
Uses streams to try and avoid storing massive tables in memory, but the way this is done is a bit "Hacky" so be aware of that.
(I manually write commas and array [] parts)
By default ignores the "test" db.
Dependencies:
* rethinkdbdash
*/
// At a later date I'll make it read these in from passed values
const blacklist = ["rethinkdb"];
const r = require("rethinkdbdash")();
const { join } = require("path");
const {stat, mkdir, createWriteStream} = require("fs");
const d = new Date();
const dateStr = d.toJSON().replace(/:/g, "_").split(".")[0];
const backupBase = join(__dirname, "backup", dateStr);
async function backup () {
await ensureExits(backupBase);
const databases = await r.dbList();
for (let db of databases) {
if (!blacklist.includes(db)) {
const tables = await r.db(db).tableList();
// Backup each
let passed = 0;
let failed = 0;
for (let table of tables) {
try {
const resp = await backupTable(db, table);
if (resp) {
passed++
console.log(`Backed up ${table}`);
}
} catch (e) {
console.error(`Failed to backup table ${table}. Error: ${e.message}\n\n${e.stack}`);
failed++
}
}
console.log(`${db} Backup Finished. ${passed} passes, ${failed} fails.`)
}
}
}
function backupTable (db, tableName) {
return new Promise(function (resolve, reject) {
// Ensure folder exists
ensureExits(join(backupBase, db))
.then(function () {
const str = join(backupBase, db, tableName);
const file = createWriteStream(`${str}.json`);
file.on('error', reject);
file.write("[")
let isFirst = true;
function handleErr (err) {
file.write("]");
file.close();
reject(err);
}
r.db(db).table(tableName).toStream()
.on('error', handleErr)
.on('data', function (d) {
const val = JSON.stringify(d, null, 2);
file.write(`${isFirst ? "" : ",\n"}${val}`)
isFirst = false;
})
.on('end', function () {
file.write("\n]")
file.end()
resolve(true);
})
})
.catch(reject)
})
}
function ensureExits (path) {
return new Promise(function (resolve, reject) {
stat(path, function (err, s) {
if (err) {
if (err.code === "ENOENT") {
mkdir(path, {
recursive: true
}, function (err) {
if (err) reject(err);
resolve(true);
});
} else {
console.log(err.code)
reject(err);
}
} else {
resolve(true);
}
});
})
}
backup();