Server: rename tiddler storage fields to align with current usage

This commit is contained in:
Val Packett 2023-07-25 20:42:59 -03:00
parent 6f29875e8c
commit 728ec0c70e
5 changed files with 58 additions and 56 deletions

View file

@ -529,7 +529,9 @@ Formatted with `deno fmt`.
const seemsLikeDocs = $tw.wiki.getTiddlersWithTag('TiddlyPWA Docs').length > 0; const seemsLikeDocs = $tw.wiki.getTiddlersWithTag('TiddlyPWA Docs').length > 0;
if (!seemsLikeDocs) modal.showWrapper(); if (!seemsLikeDocs) modal.showWrapper();
if (freshDb) { if (freshDb) {
modal.setFeedback('<p>No wiki data found in the browser storage for this URL. Wait a second, looking around the server..</p>'); modal.setFeedback(
'<p>No wiki data found in the browser storage for this URL. Wait a second, looking around the server..</p>',
);
const giveUp = new AbortController(); const giveUp = new AbortController();
modal.showGiveUpButtonDelayed(6900, () => giveUp.abort()); modal.showGiveUpButtonDelayed(6900, () => giveUp.abort());
modal.showModalDelayed(seemsLikeDocs ? 6900 : 1000); modal.showModalDelayed(seemsLikeDocs ? 6900 : 1000);
@ -972,10 +974,10 @@ Formatted with `deno fmt`.
} }
const tidjson = { const tidjson = {
thash: await b64enc(thash), thash: await b64enc(thash),
title: await b64enc(ct), ct: ct && await b64enc(ct),
tiv: await b64enc(iv), iv: iv && await b64enc(iv),
data: sbct && await b64enc(sbct), sbct: sbct && await b64enc(sbct),
iv: sbiv && await b64enc(sbiv), sbiv: sbiv && await b64enc(sbiv),
mtime, mtime,
deleted, deleted,
}; };
@ -1013,15 +1015,15 @@ Formatted with `deno fmt`.
const toDecrypt = []; const toDecrypt = [];
const titleHashesToDelete = new Set(); const titleHashesToDelete = new Set();
const txn = this.db.transaction('tiddlers', 'readwrite'); const txn = this.db.transaction('tiddlers', 'readwrite');
for (const { thash, title, tiv, data, iv, mtime, deleted } of serverChanges) { for (const { thash, iv, ct, sbiv, sbct, mtime, deleted } of serverChanges) {
const dhash = b64dec(thash); const dhash = b64dec(thash);
if (!dhash || arrayEq(dhash, this.storyListHash)) continue; if (!dhash || arrayEq(dhash, this.storyListHash)) continue;
const tid = { const tid = {
thash: dhash.buffer, thash: dhash.buffer,
ct: title && b64dec(title).buffer, ct: ct && b64dec(ct).buffer,
iv: tiv && b64dec(tiv).buffer, iv: iv && b64dec(iv).buffer,
sbct: data && b64dec(data).buffer, sbct: sbct && b64dec(sbct).buffer,
sbiv: iv && b64dec(iv).buffer, sbiv: sbiv && b64dec(sbiv).buffer,
mtime: new Date(mtime), mtime: new Date(mtime),
deleted, deleted,
}; };

View file

@ -36,10 +36,10 @@ const uploadAppFile = (token: string, body: string, extra?: Record<string, unkno
type tidjson = { type tidjson = {
thash: string; thash: string;
title?: string;
tiv?: string;
data?: string;
iv?: string; iv?: string;
ct?: string;
sbiv?: string;
sbct?: string;
mtime?: Date; mtime?: Date;
deleted?: boolean; deleted?: boolean;
}; };
@ -52,8 +52,8 @@ Deno.test('basic syncing works', async () => {
// Basic write, with and without an mtime // Basic write, with and without an mtime
assertEquals( assertEquals(
await sync(tok, 'test', s1date, new Date(0), [ await sync(tok, 'test', s1date, new Date(0), [
{ thash: 'T3dP', data: '1111' }, { thash: 'T3dP', ct: '1111' },
{ thash: 'VXdV', data: '11111111', mtime: new Date(69) }, { thash: 'VXdV', ct: '11111111', mtime: new Date(69) },
]), ]),
{ appEtag: null, serverChanges: [] }, { appEtag: null, serverChanges: [] },
); );
@ -65,19 +65,19 @@ Deno.test('basic syncing works', async () => {
serverChanges: [ serverChanges: [
{ {
thash: 'T3dP', thash: 'T3dP',
title: null,
tiv: null,
data: '1111',
iv: null, iv: null,
ct: '1111',
sbiv: null,
sbct: null,
mtime: s1date.toISOString(), mtime: s1date.toISOString(),
deleted: false, deleted: false,
}, },
{ {
thash: 'VXdV', thash: 'VXdV',
title: null,
tiv: null,
data: '11111111',
iv: null, iv: null,
ct: '11111111',
sbiv: null,
sbct: null,
mtime: new Date(69).toISOString(), mtime: new Date(69).toISOString(),
deleted: false, deleted: false,
}, },
@ -88,10 +88,10 @@ Deno.test('basic syncing works', async () => {
serverChanges: [ serverChanges: [
{ {
thash: 'T3dP', thash: 'T3dP',
title: null,
tiv: null,
data: '1111',
iv: null, iv: null,
ct: '1111',
sbiv: null,
sbct: null,
mtime: s1date.toISOString(), mtime: s1date.toISOString(),
deleted: false, deleted: false,
}, },
@ -109,7 +109,7 @@ Deno.test('syncing a ton of tiddlers works', async () => {
'test', 'test',
s1date, s1date,
new Date(0), new Date(0),
[...Array(20).keys()].map((_, i) => ({ thash: btoa(i.toString()), data: 'T3dp' })), [...Array(20).keys()].map((_, i) => ({ thash: btoa(i.toString()), ct: 'T3dp' })),
), ),
{ appEtag: null, serverChanges: [] }, { appEtag: null, serverChanges: [] },
); );
@ -118,10 +118,10 @@ Deno.test('syncing a ton of tiddlers works', async () => {
serverChanges: [...Array(20).keys()].map((_, i) => ( serverChanges: [...Array(20).keys()].map((_, i) => (
{ {
thash: btoa(i.toString()), thash: btoa(i.toString()),
title: null,
tiv: null,
data: 'T3dp',
iv: null, iv: null,
ct: 'T3dp',
sbiv: null,
sbct: null,
mtime: s1date.toISOString(), mtime: s1date.toISOString(),
deleted: false, deleted: false,
} }
@ -136,7 +136,7 @@ Deno.test('storing large data works', async () => {
const bigdata = Array(5592407).join('A') + '=='; const bigdata = Array(5592407).join('A') + '==';
assertEquals( assertEquals(
await sync(tok, 'test', s1date, new Date(0), [ await sync(tok, 'test', s1date, new Date(0), [
{ thash: 'T3dP', data: bigdata }, { thash: 'T3dP', ct: bigdata },
]), ]),
{ appEtag: null, serverChanges: [] }, { appEtag: null, serverChanges: [] },
); );
@ -145,10 +145,10 @@ Deno.test('storing large data works', async () => {
serverChanges: [ serverChanges: [
{ {
thash: 'T3dP', thash: 'T3dP',
title: null,
tiv: null,
data: bigdata,
iv: null, iv: null,
ct: bigdata,
sbiv: null,
sbct: null,
mtime: s1date.toISOString(), mtime: s1date.toISOString(),
deleted: false, deleted: false,
}, },

View file

@ -115,26 +115,26 @@ export class TiddlyPWASyncApp {
this.db.transaction(() => { this.db.transaction(() => {
if (!(wiki as Wiki).authcode && authcode) this.db.updateWikiAuthcode(token, authcode); if (!(wiki as Wiki).authcode && authcode) this.db.updateWikiAuthcode(token, authcode);
if (!(wiki as Wiki).salt && salt) this.db.updateWikiSalt(token, salt as string); if (!(wiki as Wiki).salt && salt) this.db.updateWikiSalt(token, salt as string);
for (const { thash, title, tiv, data, iv, mtime, deleted } of this.db.tiddlersChangedSince(token, modsince)) { for (const { thash, iv, ct, sbiv, sbct, mtime, deleted } of this.db.tiddlersChangedSince(token, modsince)) {
// console.log('ServHas', base64nourl.encode(thash as Uint8Array), mtime, modsince, mtime < modsince); // console.log('ServHas', base64nourl.encode(thash as Uint8Array), mtime, modsince, mtime < modsince);
serverChanges.push({ serverChanges.push({
thash: thash ? base64nourl.encode(thash as Uint8Array) : null, thash: thash ? base64nourl.encode(thash as Uint8Array) : null,
title: title ? base64nourl.encode(title as Uint8Array) : null,
tiv: tiv ? base64nourl.encode(tiv as Uint8Array) : null,
data: data ? base64nourl.encode(data as Uint8Array) : null,
iv: iv ? base64nourl.encode(iv as Uint8Array) : null, iv: iv ? base64nourl.encode(iv as Uint8Array) : null,
ct: ct ? base64nourl.encode(ct as Uint8Array) : null,
sbiv: sbiv ? base64nourl.encode(sbiv as Uint8Array) : null,
sbct: sbct ? base64nourl.encode(sbct as Uint8Array) : null,
mtime, mtime,
deleted, deleted,
}); });
} }
// console.log('ClntChg', clientChanges); // console.log('ClntChg', clientChanges);
for (const { thash, title, tiv, data, iv, mtime, deleted } of clientChanges) { for (const { thash, iv, ct, sbiv, sbct, mtime, deleted } of clientChanges) {
this.db.upsertTiddler(token, { this.db.upsertTiddler(token, {
thash: base64nourl.decode(thash), thash: base64nourl.decode(thash),
title: title && base64nourl.decode(title),
tiv: tiv && base64nourl.decode(tiv),
data: data && base64nourl.decode(data),
iv: iv && base64nourl.decode(iv), iv: iv && base64nourl.decode(iv),
ct: ct && base64nourl.decode(ct),
sbiv: sbiv && base64nourl.decode(sbiv),
sbct: sbct && base64nourl.decode(sbct),
mtime: new Date(mtime || now), mtime: new Date(mtime || now),
deleted: deleted || false, deleted: deleted || false,
}); });

6
server/data.d.ts vendored
View file

@ -16,10 +16,10 @@ export type File = {
export type Tiddler = { export type Tiddler = {
thash: Uint8Array; thash: Uint8Array;
title?: Uint8Array;
tiv?: Uint8Array;
data?: Uint8Array;
iv?: Uint8Array; iv?: Uint8Array;
ct?: Uint8Array;
sbiv?: Uint8Array;
sbct?: Uint8Array;
mtime: Date; mtime: Date;
deleted: boolean; deleted: boolean;
}; };

View file

@ -48,10 +48,10 @@ export class SQLiteDatastore extends DB implements Datastore {
END; END;
CREATE TABLE tiddlers ( CREATE TABLE tiddlers (
thash BLOB PRIMARY KEY NOT NULL, thash BLOB PRIMARY KEY NOT NULL,
title BLOB,
tiv BLOB,
data BLOB,
iv BLOB, iv BLOB,
ct BLOB,
sbiv BLOB,
sbct BLOB,
mtime INTEGER NOT NULL, mtime INTEGER NOT NULL,
deleted INTEGER NOT NULL DEFAULT 0, deleted INTEGER NOT NULL DEFAULT 0,
token TEXT NOT NULL, token TEXT NOT NULL,
@ -86,7 +86,7 @@ export class SQLiteDatastore extends DB implements Datastore {
{ token: string; authcode?: string; salt?: string; note?: string; tidsize: number; appsize: number } { token: string; authcode?: string; salt?: string; note?: string; tidsize: number; appsize: number }
>(sql` >(sql`
SELECT token, authcode, salt, note, ( SELECT token, authcode, salt, note, (
SELECT sum(length(thash) + length(title) + length(tiv) + length(data) + length(iv)) SELECT sum(length(thash) + length(iv) + length(ct) + length(sbiv) + length(sbct))
FROM tiddlers FROM tiddlers
WHERE tiddlers.token = wikis.token WHERE tiddlers.token = wikis.token
) AS tidsize, ( ) AS tidsize, (
@ -145,15 +145,15 @@ export class SQLiteDatastore extends DB implements Datastore {
[], [],
{ {
thash: Uint8Array; thash: Uint8Array;
title?: Uint8Array;
tiv?: Uint8Array;
data?: Uint8Array;
iv?: Uint8Array; iv?: Uint8Array;
ct?: Uint8Array;
sbiv?: Uint8Array;
sbct?: Uint8Array;
mtime: number; mtime: number;
deleted: number; deleted: number;
} }
>(sql` >(sql`
SELECT thash, title, tiv, data, iv, mtime, deleted SELECT thash, iv, ct, sbiv, sbct, mtime, deleted
FROM tiddlers WHERE mtime > :modsince AND token = :token FROM tiddlers WHERE mtime > :modsince AND token = :token
`); `);
tiddlersChangedSince(token: string, since: Date) { tiddlersChangedSince(token: string, since: Date) {
@ -165,13 +165,13 @@ export class SQLiteDatastore extends DB implements Datastore {
} }
#upsertQuery = this.prepareQuery(sql` #upsertQuery = this.prepareQuery(sql`
INSERT INTO tiddlers (thash, title, tiv, data, iv, mtime, deleted, token) INSERT INTO tiddlers (thash, iv, ct, sbiv, sbct, mtime, deleted, token)
VALUES (:thash, :title, :tiv, :data, :iv, :mtime, :deleted, :token) VALUES (:thash, :iv, :ct, :sbiv, :sbct, :mtime, :deleted, :token)
ON CONFLICT (thash) DO UPDATE SET ON CONFLICT (thash) DO UPDATE SET
title = excluded.title,
tiv = excluded.tiv,
data = excluded.data,
iv = excluded.iv, iv = excluded.iv,
ct = excluded.ct,
sbiv = excluded.sbiv,
sbct = excluded.sbct,
mtime = excluded.mtime, mtime = excluded.mtime,
deleted = excluded.deleted deleted = excluded.deleted
WHERE excluded.mtime > mtime WHERE excluded.mtime > mtime