1
0
Fork 0

v0 -> v1 db, support adding multiple users

This commit is contained in:
lightling 2024-02-18 23:50:08 -05:00
parent 3f7f929972
commit d76871d3ac
5 changed files with 120 additions and 37 deletions

20
lib/schema.js Normal file
View file

@ -0,0 +1,20 @@
/**
* Returns valid db schema
*/
export const dbSchema = (userList) => {
return {
version: '1.0.0',
userList,
}
}
/**
* Returns valid user schema
*/
export const userSchema = (entry) => {
return {
lastError: entry.lastError,
lastUpdated: entry.lastUpdated,
pastUsernames: entry.pastUsernames,
}
}

56
run-convertDb.js Normal file
View file

@ -0,0 +1,56 @@
import { readFile, writeFile } from 'fs/promises';
import { getArg } from './lib/args.js';
import { error, log } from './lib/log.js';
import { userSchema, dbSchema } from './lib/schema.js';
const ctx = 'convertDb.js';
/**
* Converts from array (v0) to current schema
*/
const fromV0 = (old) => {
const userList = Object.fromEntries(old.map(e => [e.user, userSchema(e)]));
return dbSchema(userList);
}
/**
* Upgrades db versions
*/
const convertDb = async () => {
log(ctx, 'Grabbing db');
let directory, db, updated;
try {
directory = getArg('path');
} catch (err) {
error(ctx, err);
return;
}
try {
let file = await readFile(`${directory}/db.json`, { encoding: 'utf8' });
db = JSON.parse(file);
} catch (err) {
error(ctx, err);
return;
}
log(ctx, 'Converting db');
if (Array.isArray(db)) {
updated = fromV0(db);
} else {
updated = db;
}
log(ctx, 'Saving db');
try {
await writeFile(`${directory}/db.json`, JSON.stringify(updated, null, 2));
await writeFile(`${directory}/db.bak.json`, JSON.stringify(db, null, 2));
} catch (err) {
error(ctx, err);
return;
}
log(ctx, 'Done');
}
convertDb();

View file

@ -4,6 +4,7 @@ import { getArg } from './lib/args.js';
import { getMany } from './lib/dl.js';
import { error, log } from './lib/log.js';
import { initDb } from './run-initDb.js';
import { userSchema } from './lib/schema.js';
const ctx = 'downloadDb.js';
@ -54,17 +55,21 @@ const downloadDb = async () => {
}
}
let processes = db.map(entry => ({
...entry,
lastUpdated: Date.now(),
logs: [],
}));
let processes = [];
Object.keys(db.userList).forEach(user => {
processes.push({
...db.userList[user],
user,
lastUpdated: Date.now(),
logs: [],
})
});
log(ctx, `Downloading media using /<user>/media for ${processes.length} users`);
await getMany(processes, threadMax, directory, 'media');
log(ctx, 'Downloading media using /search');
await getMany(processes, threadMax, directory, 'search');
//await getMany(processes, threadMax, directory, 'search');
processes.forEach(entry => {
entry.logs.forEach(log => {
@ -82,11 +87,13 @@ const downloadDb = async () => {
log(ctx, 'Updating the db');
try {
let updated = processes.map(entry => ({
user: entry.user,
lastUpdated: entry.lastUpdated,
lastError: entry.lastError,
}));
let updated = {
...db,
userList: {
...db.userList,
...Object.fromEntries(processes.map(e => [e.user, userSchema(e)])),
},
}
await writeFile(`${directory}/db.json`, JSON.stringify(updated, null, 2));
} catch (err) {
error(ctx, err);

View file

@ -3,25 +3,32 @@ import { readFile, writeFile } from 'fs/promises';
import { getArg } from './lib/args.js';
import { getMany } from './lib/dl.js';
import { error, log } from './lib/log.js';
import { userSchema } from './lib/schema.js';
const ctx = 'downloadUser.js';
/**
* Downloads all media possible for a user stored/to-be-stored in db.json at the specified `--path`.
* Downloads all media possible for comma-separated `--users` stored/to-be-stored in db.json at the specified `--path`.
* Useful for first run or for augmenting existing media
* if it may be only partially archived in an uncertain state.
* Safely checks if the db.json doesn't yet exist and/or the user isn't stored in the db.json at the directory provided.
* Safely checks if the db.json doesn't yet exist and/or the user(s) isn't stored in the db.json at the directory provided.
*/
const downloadUser = async () => {
const downloadUsers = async () => {
log(ctx, 'Grabbing db');
let directory, threadMax = 1, user, db;
let directory, threadMax = 1, users, db;
try {
directory = getArg('path');
user = getArg('user');
users = getArg('users').split(',');
} catch (err) {
error(ctx, err);
return;
}
try {
threadMax = getArg('threads');
log(ctx, `Using ${threadMax} threads`);
} catch (err) {
log(ctx, 'Using 1 thread');
}
try {
let file = await readFile(`${directory}/db.json`, { encoding: 'utf8' });
db = JSON.parse(file);
@ -40,13 +47,9 @@ const downloadUser = async () => {
}
}
let index = db.findIndex(other => other.user === user);
if (index < 0) {
index = db.push({ user }) - 1;
}
let processes = db.filter((other) => other.user === user).map(entry => ({
...entry,
let processes = users.map(user => ({
...db.userList[user] | {},
user,
lastUpdated: Date.now(),
logs: [],
}));
@ -73,15 +76,14 @@ const downloadUser = async () => {
log(ctx, 'Saving db');
try {
processes.forEach(process => {
let i = db.findIndex(other => other.user === process.user);
db[i] = {
user: process.user,
lastUpdated: process.lastUpdated,
lastError: process.lastError,
}
});
await writeFile(`${directory}/db.json`, JSON.stringify(db, null, 2));
let updated = {
...db,
userList: {
...db.userList,
...Object.fromEntries(processes.map(e => [e.user, userSchema(e)])),
},
}
await writeFile(`${directory}/db.json`, JSON.stringify(updated, null, 2));
} catch (err) {
error(ctx, err);
return;
@ -89,4 +91,4 @@ const downloadUser = async () => {
log(ctx, 'Done');
}
downloadUser();
downloadUsers();

View file

@ -3,6 +3,7 @@ import { writeFile } from 'fs/promises';
import { getArg } from './lib/args.js';
import { getChildDirectories } from './lib/io.js';
import { error, log } from './lib/log.js';
import { userSchema, dbSchema } from './lib/schema.js';
const ctx = 'initDb.js';
@ -21,10 +22,7 @@ export const initDb = async () => {
return;
}
const children = await getChildDirectories(directory);
const db = children.map(child => ({
'user': child,
'lastUpdated': 'never',
}));
const db = dbSchema(Object.fromEntries(children.map(e => [e, userSchema({})])));
log(ctx, 'Writing database');
await writeFile(`${directory}/db.json`, JSON.stringify(db, null, 2));
log(ctx, 'Writing complete!');