v0 -> v1 db, support adding multiple users
This commit is contained in:
parent
3f7f929972
commit
d76871d3ac
5 changed files with 120 additions and 37 deletions
20
lib/schema.js
Normal file
20
lib/schema.js
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
/**
|
||||||
|
* Returns valid db schema
|
||||||
|
*/
|
||||||
|
export const dbSchema = (userList) => {
|
||||||
|
return {
|
||||||
|
version: '1.0.0',
|
||||||
|
userList,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns valid user schema
|
||||||
|
*/
|
||||||
|
export const userSchema = (entry) => {
|
||||||
|
return {
|
||||||
|
lastError: entry.lastError,
|
||||||
|
lastUpdated: entry.lastUpdated,
|
||||||
|
pastUsernames: entry.pastUsernames,
|
||||||
|
}
|
||||||
|
}
|
56
run-convertDb.js
Normal file
56
run-convertDb.js
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
import { readFile, writeFile } from 'fs/promises';
|
||||||
|
|
||||||
|
import { getArg } from './lib/args.js';
|
||||||
|
import { error, log } from './lib/log.js';
|
||||||
|
import { userSchema, dbSchema } from './lib/schema.js';
|
||||||
|
|
||||||
|
const ctx = 'convertDb.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts from array (v0) to current schema
|
||||||
|
*/
|
||||||
|
const fromV0 = (old) => {
|
||||||
|
const userList = Object.fromEntries(old.map(e => [e.user, userSchema(e)]));
|
||||||
|
|
||||||
|
return dbSchema(userList);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Upgrades db versions
|
||||||
|
*/
|
||||||
|
const convertDb = async () => {
|
||||||
|
log(ctx, 'Grabbing db');
|
||||||
|
let directory, db, updated;
|
||||||
|
try {
|
||||||
|
directory = getArg('path');
|
||||||
|
} catch (err) {
|
||||||
|
error(ctx, err);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
let file = await readFile(`${directory}/db.json`, { encoding: 'utf8' });
|
||||||
|
db = JSON.parse(file);
|
||||||
|
} catch (err) {
|
||||||
|
error(ctx, err);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
log(ctx, 'Converting db');
|
||||||
|
if (Array.isArray(db)) {
|
||||||
|
updated = fromV0(db);
|
||||||
|
} else {
|
||||||
|
updated = db;
|
||||||
|
}
|
||||||
|
|
||||||
|
log(ctx, 'Saving db');
|
||||||
|
try {
|
||||||
|
await writeFile(`${directory}/db.json`, JSON.stringify(updated, null, 2));
|
||||||
|
await writeFile(`${directory}/db.bak.json`, JSON.stringify(db, null, 2));
|
||||||
|
} catch (err) {
|
||||||
|
error(ctx, err);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
log(ctx, 'Done');
|
||||||
|
}
|
||||||
|
|
||||||
|
convertDb();
|
|
@ -4,6 +4,7 @@ import { getArg } from './lib/args.js';
|
||||||
import { getMany } from './lib/dl.js';
|
import { getMany } from './lib/dl.js';
|
||||||
import { error, log } from './lib/log.js';
|
import { error, log } from './lib/log.js';
|
||||||
import { initDb } from './run-initDb.js';
|
import { initDb } from './run-initDb.js';
|
||||||
|
import { userSchema } from './lib/schema.js';
|
||||||
|
|
||||||
const ctx = 'downloadDb.js';
|
const ctx = 'downloadDb.js';
|
||||||
|
|
||||||
|
@ -54,17 +55,21 @@ const downloadDb = async () => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let processes = db.map(entry => ({
|
let processes = [];
|
||||||
...entry,
|
Object.keys(db.userList).forEach(user => {
|
||||||
|
processes.push({
|
||||||
|
...db.userList[user],
|
||||||
|
user,
|
||||||
lastUpdated: Date.now(),
|
lastUpdated: Date.now(),
|
||||||
logs: [],
|
logs: [],
|
||||||
}));
|
})
|
||||||
|
});
|
||||||
|
|
||||||
log(ctx, `Downloading media using /<user>/media for ${processes.length} users`);
|
log(ctx, `Downloading media using /<user>/media for ${processes.length} users`);
|
||||||
await getMany(processes, threadMax, directory, 'media');
|
await getMany(processes, threadMax, directory, 'media');
|
||||||
|
|
||||||
log(ctx, 'Downloading media using /search');
|
log(ctx, 'Downloading media using /search');
|
||||||
await getMany(processes, threadMax, directory, 'search');
|
//await getMany(processes, threadMax, directory, 'search');
|
||||||
|
|
||||||
processes.forEach(entry => {
|
processes.forEach(entry => {
|
||||||
entry.logs.forEach(log => {
|
entry.logs.forEach(log => {
|
||||||
|
@ -82,11 +87,13 @@ const downloadDb = async () => {
|
||||||
|
|
||||||
log(ctx, 'Updating the db');
|
log(ctx, 'Updating the db');
|
||||||
try {
|
try {
|
||||||
let updated = processes.map(entry => ({
|
let updated = {
|
||||||
user: entry.user,
|
...db,
|
||||||
lastUpdated: entry.lastUpdated,
|
userList: {
|
||||||
lastError: entry.lastError,
|
...db.userList,
|
||||||
}));
|
...Object.fromEntries(processes.map(e => [e.user, userSchema(e)])),
|
||||||
|
},
|
||||||
|
}
|
||||||
await writeFile(`${directory}/db.json`, JSON.stringify(updated, null, 2));
|
await writeFile(`${directory}/db.json`, JSON.stringify(updated, null, 2));
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
error(ctx, err);
|
error(ctx, err);
|
||||||
|
|
|
@ -3,25 +3,32 @@ import { readFile, writeFile } from 'fs/promises';
|
||||||
import { getArg } from './lib/args.js';
|
import { getArg } from './lib/args.js';
|
||||||
import { getMany } from './lib/dl.js';
|
import { getMany } from './lib/dl.js';
|
||||||
import { error, log } from './lib/log.js';
|
import { error, log } from './lib/log.js';
|
||||||
|
import { userSchema } from './lib/schema.js';
|
||||||
|
|
||||||
const ctx = 'downloadUser.js';
|
const ctx = 'downloadUser.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Downloads all media possible for a user stored/to-be-stored in db.json at the specified `--path`.
|
* Downloads all media possible for comma-separated `--users` stored/to-be-stored in db.json at the specified `--path`.
|
||||||
* Useful for first run or for augmenting existing media
|
* Useful for first run or for augmenting existing media
|
||||||
* if it may be only partially archived in an uncertain state.
|
* if it may be only partially archived in an uncertain state.
|
||||||
* Safely checks if the db.json doesn't yet exist and/or the user isn't stored in the db.json at the directory provided.
|
* Safely checks if the db.json doesn't yet exist and/or the user(s) isn't stored in the db.json at the directory provided.
|
||||||
*/
|
*/
|
||||||
const downloadUser = async () => {
|
const downloadUsers = async () => {
|
||||||
log(ctx, 'Grabbing db');
|
log(ctx, 'Grabbing db');
|
||||||
let directory, threadMax = 1, user, db;
|
let directory, threadMax = 1, users, db;
|
||||||
try {
|
try {
|
||||||
directory = getArg('path');
|
directory = getArg('path');
|
||||||
user = getArg('user');
|
users = getArg('users').split(',');
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
error(ctx, err);
|
error(ctx, err);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
try {
|
||||||
|
threadMax = getArg('threads');
|
||||||
|
log(ctx, `Using ${threadMax} threads`);
|
||||||
|
} catch (err) {
|
||||||
|
log(ctx, 'Using 1 thread');
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
let file = await readFile(`${directory}/db.json`, { encoding: 'utf8' });
|
let file = await readFile(`${directory}/db.json`, { encoding: 'utf8' });
|
||||||
db = JSON.parse(file);
|
db = JSON.parse(file);
|
||||||
|
@ -40,13 +47,9 @@ const downloadUser = async () => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let index = db.findIndex(other => other.user === user);
|
let processes = users.map(user => ({
|
||||||
if (index < 0) {
|
...db.userList[user] | {},
|
||||||
index = db.push({ user }) - 1;
|
user,
|
||||||
}
|
|
||||||
|
|
||||||
let processes = db.filter((other) => other.user === user).map(entry => ({
|
|
||||||
...entry,
|
|
||||||
lastUpdated: Date.now(),
|
lastUpdated: Date.now(),
|
||||||
logs: [],
|
logs: [],
|
||||||
}));
|
}));
|
||||||
|
@ -73,15 +76,14 @@ const downloadUser = async () => {
|
||||||
|
|
||||||
log(ctx, 'Saving db');
|
log(ctx, 'Saving db');
|
||||||
try {
|
try {
|
||||||
processes.forEach(process => {
|
let updated = {
|
||||||
let i = db.findIndex(other => other.user === process.user);
|
...db,
|
||||||
db[i] = {
|
userList: {
|
||||||
user: process.user,
|
...db.userList,
|
||||||
lastUpdated: process.lastUpdated,
|
...Object.fromEntries(processes.map(e => [e.user, userSchema(e)])),
|
||||||
lastError: process.lastError,
|
},
|
||||||
}
|
}
|
||||||
});
|
await writeFile(`${directory}/db.json`, JSON.stringify(updated, null, 2));
|
||||||
await writeFile(`${directory}/db.json`, JSON.stringify(db, null, 2));
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
error(ctx, err);
|
error(ctx, err);
|
||||||
return;
|
return;
|
||||||
|
@ -89,4 +91,4 @@ const downloadUser = async () => {
|
||||||
log(ctx, 'Done');
|
log(ctx, 'Done');
|
||||||
}
|
}
|
||||||
|
|
||||||
downloadUser();
|
downloadUsers();
|
|
@ -3,6 +3,7 @@ import { writeFile } from 'fs/promises';
|
||||||
import { getArg } from './lib/args.js';
|
import { getArg } from './lib/args.js';
|
||||||
import { getChildDirectories } from './lib/io.js';
|
import { getChildDirectories } from './lib/io.js';
|
||||||
import { error, log } from './lib/log.js';
|
import { error, log } from './lib/log.js';
|
||||||
|
import { userSchema, dbSchema } from './lib/schema.js';
|
||||||
|
|
||||||
const ctx = 'initDb.js';
|
const ctx = 'initDb.js';
|
||||||
|
|
||||||
|
@ -21,10 +22,7 @@ export const initDb = async () => {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const children = await getChildDirectories(directory);
|
const children = await getChildDirectories(directory);
|
||||||
const db = children.map(child => ({
|
const db = dbSchema(Object.fromEntries(children.map(e => [e, userSchema({})])));
|
||||||
'user': child,
|
|
||||||
'lastUpdated': 'never',
|
|
||||||
}));
|
|
||||||
log(ctx, 'Writing database');
|
log(ctx, 'Writing database');
|
||||||
await writeFile(`${directory}/db.json`, JSON.stringify(db, null, 2));
|
await writeFile(`${directory}/db.json`, JSON.stringify(db, null, 2));
|
||||||
log(ctx, 'Writing complete!');
|
log(ctx, 'Writing complete!');
|
||||||
|
|
Loading…
Add table
Reference in a new issue