Compare commits

...

7 commits

Author SHA1 Message Date
Magnus Åhall
02a8e10d11 Better progress updates on sync 2025-01-21 18:46:00 +01:00
Magnus Åhall
3453dffb53 Sync progress bar somewhat working 2025-01-21 18:20:50 +01:00
Magnus Åhall
f33e5d54af Client UUID nodes from server sync count 2025-01-12 20:57:49 +01:00
Magnus Åhall
dc010df448 Client UUID added to JWT 2025-01-12 17:35:29 +01:00
Magnus Åhall
dfd6260a7a Clean queue after sending 2025-01-12 16:54:21 +01:00
Magnus Åhall
1c3116d9dc More sync 2025-01-12 16:06:28 +01:00
Magnus Åhall
25179ffd15 Sync from and to server 2025-01-12 12:21:49 +01:00
15 changed files with 730 additions and 138 deletions

View file

@ -2,8 +2,9 @@ package authentication
import (
// External
_ "git.gibonuddevalla.se/go/wrappederror"
werr "git.gibonuddevalla.se/go/wrappederror"
"github.com/golang-jwt/jwt/v5"
"github.com/google/uuid"
"github.com/jmoiron/sqlx"
"github.com/lib/pq"
@ -146,6 +147,14 @@ func (mngr *Manager) AuthenticationHandler(w http.ResponseWriter, r *http.Reques
data["uid"] = user.ID
data["login"] = user.Username
data["name"] = user.Name
data["cid"], err = mngr.NewClientUUID(user)
if err != nil {
mngr.log.Error("authentication", "error", err)
httpError(w, err)
return
}
token, err = mngr.GenerateToken(data)
if err != nil {
mngr.log.Error("authentication", "error", err)
@ -269,3 +278,31 @@ func (mngr *Manager) ChangePassword(username, currentPassword, newPassword strin
changed = (rowsAffected == 1)
return
} // }}}
func (mngr *Manager) NewClientUUID(user User) (clientUUID string, err error) { // {{{
// Each client session has its own UUID.
// Loop through until a unique one is established.
var proposedClientUUID string
var numSessions int
for {
proposedClientUUID = uuid.NewString()
row := mngr.db.QueryRow("SELECT COUNT(id) FROM public.client WHERE client_uuid = $1", proposedClientUUID)
err = row.Scan(&numSessions)
if err != nil {
err = werr.Wrap(err).WithData(proposedClientUUID)
return
}
if numSessions > 0 {
continue
}
_, err = mngr.db.Exec(`INSERT INTO public.client(user_id, client_uuid) VALUES($1, $2)`, user.ID, proposedClientUUID)
if err != nil {
err = werr.Wrap(err).WithData(proposedClientUUID)
return
}
clientUUID = proposedClientUUID
break
}
return
} // }}}

74
main.go
View file

@ -124,7 +124,9 @@ func main() { // {{{
http.HandleFunc("/user/authenticate", AuthManager.AuthenticationHandler)
http.HandleFunc("/sync/node/{sequence}/{offset}", authenticated(actionSyncNode))
http.HandleFunc("/sync/from_server/count/{sequence}", authenticated(actionSyncFromServerCount))
http.HandleFunc("/sync/from_server/{sequence}/{offset}", authenticated(actionSyncFromServer))
http.HandleFunc("/sync/to_server", authenticated(actionSyncToServer))
http.HandleFunc("/node/retrieve/{uuid}", authenticated(actionNodeRetrieve))
@ -166,7 +168,7 @@ func authenticated(fn func(http.ResponseWriter, *http.Request)) func(http.Respon
user := NewUser(claims)
r = r.WithContext(context.WithValue(r.Context(), CONTEXT_USER, user))
Log.Info("webserver", "op", "request", "method", r.Method, "url", r.URL.String(), "username", user.Username)
Log.Debug("webserver", "op", "request", "method", r.Method, "url", r.URL.String(), "username", user.Username, "client", user.ClientUUID)
fn(w, r)
}
} // }}}
@ -241,33 +243,22 @@ func pageSync(w http.ResponseWriter, r *http.Request) { // {{{
}
} // }}}
func actionSyncNode(w http.ResponseWriter, r *http.Request) { // {{{
func actionSyncFromServer(w http.ResponseWriter, r *http.Request) { // {{{
// The purpose of the Client UUID is to avoid
// sending nodes back once again to a client that
// just created or modified it.
request := struct {
ClientUUID string
}{}
body, _ := io.ReadAll(r.Body)
err := json.Unmarshal(body, &request)
if err != nil {
Log.Error("/node/tree", "error", err)
httpError(w, err)
return
}
user := getUser(r)
changedFrom, _ := strconv.Atoi(r.PathValue("sequence"))
offset, _ := strconv.Atoi(r.PathValue("offset"))
nodes, maxSeq, moreRowsExist, err := Nodes(user.ID, offset, uint64(changedFrom), request.ClientUUID)
nodes, maxSeq, moreRowsExist, err := Nodes(user.UserID, offset, uint64(changedFrom), user.ClientUUID)
if err != nil {
Log.Error("/node/tree", "error", err)
Log.Error("/sync/from_server", "error", err)
httpError(w, err)
return
}
Log.Debug("/node/tree", "num_nodes", len(nodes), "maxSeq", maxSeq)
Log.Debug("/sync/from_server", "num_nodes", len(nodes), "maxSeq", maxSeq)
foo, _ := json.Marshal(nodes)
os.WriteFile(fmt.Sprintf("/tmp/nodes-%d.json", offset), foo, 0644)
@ -279,12 +270,36 @@ func actionSyncNode(w http.ResponseWriter, r *http.Request) { // {{{
}{true, nodes, maxSeq, moreRowsExist})
w.Write(j)
} // }}}
func actionSyncFromServerCount(w http.ResponseWriter, r *http.Request) { // {{{
// The purpose of the Client UUID is to avoid
// sending nodes back once again to a client that
// just created or modified it.
user := getUser(r)
changedFrom, _ := strconv.Atoi(r.PathValue("sequence"))
Log.Debug("FOO", "UUID", user.ClientUUID, "changedFrom", changedFrom)
count, err := NodesCount(user.UserID, uint64(changedFrom), user.ClientUUID)
if err != nil {
Log.Error("/sync/from_server/count", "error", err)
httpError(w, err)
return
}
j, _ := json.Marshal(struct {
OK bool
Count int
}{
true,
count,
})
w.Write(j)
} // }}}
func actionNodeRetrieve(w http.ResponseWriter, r *http.Request) { // {{{
user := getUser(r)
var err error
uuid := r.PathValue("uuid")
node, err := RetrieveNode(user.ID, uuid)
node, err := RetrieveNode(user.UserID, uuid)
if err != nil {
responseError(w, err)
return
@ -295,6 +310,25 @@ func actionNodeRetrieve(w http.ResponseWriter, r *http.Request) { // {{{
"Node": node,
})
} // }}}
func actionSyncToServer(w http.ResponseWriter, r *http.Request) { // {{{
user := getUser(r)
body, _ := io.ReadAll(r.Body)
var request struct {
NodeData string
}
err := json.Unmarshal(body, &request)
if err != nil {
httpError(w, err)
return
}
db.Exec(`CALL add_nodes($1, $2, $3::jsonb)`, user.UserID, user.ClientUUID, request.NodeData)
responseData(w, map[string]interface{}{
"OK": true,
})
} // }}}
func createNewUser(username string) { // {{{
reader := bufio.NewReader(os.Stdin)
@ -338,7 +372,7 @@ func changePassword(username string) { // {{{
fmt.Printf("\nPassword changed\n")
} // }}}
func getUser(r *http.Request) User { // {{{
user, _ := r.Context().Value(CONTEXT_USER).(User)
func getUser(r *http.Request) UserSession { // {{{
user, _ := r.Context().Value(CONTEXT_USER).(UserSession)
return user
} // }}}

44
node.go
View file

@ -2,6 +2,7 @@ package main
import (
// External
werr "git.gibonuddevalla.se/go/wrappederror"
"github.com/jmoiron/sqlx"
// Standard
@ -183,6 +184,39 @@ func Nodes(userID, offset int, synced uint64, clientUUID string) (nodes []Node,
return
} // }}}
func NodesCount(userID int, synced uint64, clientUUID string) (count int, err error) { // {{{
row := db.QueryRow(`
SELECT
COUNT(*)
FROM
public.node
WHERE
user_id = $1 AND
client != $3 AND
NOT history AND (
created_seq > $2 OR
updated_seq > $2 OR
deleted_seq > $2
)
`,
userID,
synced,
clientUUID,
)
err = row.Scan(&count)
if err != nil {
err = werr.Wrap(err).WithData(
struct {
UserID int
Synced uint64
ClientUUID string
}{
userID, synced, clientUUID,
},
)
}
return
} // }}}
func RetrieveNode(userID int, nodeUUID string) (node Node, err error) { // {{{
var rows *sqlx.Row
rows = db.QueryRowx(`
@ -228,7 +262,7 @@ func NodeCrumbs(nodeUUID string) (nodes []Node, err error) { // {{{
SELECT
n.uuid,
COALESCE(n.parent_uuid, 0) AS parent_uuid,
COALESCE(n.parent_uuid, '') AS parent_uuid,
n.name
FROM node n
INNER JOIN nodes nr ON n.uuid = nr.parent_uuid
@ -252,13 +286,13 @@ func NodeCrumbs(nodeUUID string) (nodes []Node, err error) { // {{{
} // }}}
func TestData() (err error) {
for range 10 {
for range 8 {
hash1, name1, _ := generateOneTestNode("", "G")
for range 10 {
for range 8 {
hash2, name2, _ := generateOneTestNode(hash1, name1)
for range 10 {
for range 8 {
hash3, name3, _ := generateOneTestNode(hash2, name2)
for range 10 {
for range 8 {
generateOneTestNode(hash3, name3)
}
}

16
sql/00006.sql Normal file
View file

@ -0,0 +1,16 @@
CREATE TABLE public.node_history (
id serial4 NOT NULL,
user_id int4 NOT NULL,
uuid bpchar(36) NOT NULL,
parents varchar[] NULL,
created timestamptz NOT NULL,
updated timestamptz NOT NULL,
name varchar(256) NOT NULL,
"content" text NOT NULL,
content_encrypted text NOT NULL,
markdown bool DEFAULT false NOT NULL,
client bpchar(36) DEFAULT ''::bpchar NOT NULL,
CONSTRAINT node_history_pk PRIMARY KEY (id),
CONSTRAINT node_history_user_fk FOREIGN KEY (user_id) REFERENCES public."user"(id) ON DELETE RESTRICT ON UPDATE RESTRICT
);
CREATE INDEX node_history_uuid_idx ON public.node USING btree (uuid);

162
sql/00007.sql Normal file
View file

@ -0,0 +1,162 @@
CREATE TYPE json_ancestor_array as ("Ancestors" varchar[]);
CREATE OR REPLACE PROCEDURE add_nodes(p_user_id int4, p_client_uuid varchar, p_nodes jsonb)
LANGUAGE PLPGSQL AS $$
DECLARE
node_data jsonb;
node_updated timestamptz;
db_updated timestamptz;
db_uuid bpchar;
db_client bpchar;
db_client_seq int;
node_uuid bpchar;
BEGIN
RAISE NOTICE '--------------------------';
FOR node_data IN SELECT * FROM jsonb_array_elements(p_nodes)
LOOP
node_uuid = (node_data->>'UUID')::bpchar;
node_updated = (node_data->>'Updated')::timestamptz;
/* Retrieve the current modified timestamp for this node from the database. */
SELECT
uuid, updated, client, client_sequence
INTO
db_uuid, db_updated, db_client, db_client_seq
FROM public."node"
WHERE
user_id = p_user_id AND
uuid = node_uuid;
/* Is the node not in database? It needs to be created. */
IF db_uuid IS NULL THEN
RAISE NOTICE '01 New node %', node_uuid;
INSERT INTO public."node" (
user_id, "uuid", parent_uuid, created, updated,
"name", "content", markdown, "content_encrypted",
client, client_sequence
)
VALUES(
p_user_id,
node_uuid,
(node_data->>'ParentUUID')::bpchar,
(node_data->>'Created')::timestamptz,
(node_data->>'Updated')::timestamptz,
(node_data->>'Name')::varchar,
(node_data->>'Content')::text,
(node_data->>'Markdown')::bool,
'', /* content_encrypted */
p_client_uuid,
(node_data->>'ClientSequence')::int
);
CONTINUE;
END IF;
/* The client could send a specific node again if it didn't receive the OK from this procedure before. */
IF db_updated = node_updated AND db_client = p_client_uuid AND db_client_seq = (node_data->>'ClientSequence')::int THEN
RAISE NOTICE '04, already recorded, %, %', db_client, db_client_seq;
CONTINUE;
END IF;
/* Determine if the incoming node data is to go into history or replace the current node. */
IF db_updated > node_updated THEN
RAISE NOTICE '02 DB newer, % > % (%))', db_updated, node_updated, node_uuid;
/* Incoming node is going straight to history since it is older than the current node. */
INSERT INTO node_history(
user_id, "uuid", parents, created, updated,
"name", "content", markdown, "content_encrypted",
client, client_sequence
)
VALUES(
p_user_id,
node_uuid,
(jsonb_populate_record(null::json_ancestor_array, node_data))."Ancestors",
(node_data->>'Created')::timestamptz,
(node_data->>'Updated')::timestamptz,
(node_data->>'Name')::varchar,
(node_data->>'Content')::text,
(node_data->>'Markdown')::bool,
'', /* content_encrypted */
p_client_uuid,
(node_data->>'ClientSequence')::int
)
ON CONFLICT (client, client_sequence)
DO NOTHING;
ELSE
RAISE NOTICE '03 Client newer, % > % (%, %)', node_updated, db_updated, node_uuid, (node_data->>'ClientSequence');
/* Incoming node is newer and will replace the current node.
*
* The current node is copied to the node_history table and then modified in place
* with the incoming data. */
INSERT INTO node_history(
user_id, "uuid", parents,
created, updated, "name", "content", markdown, "content_encrypted",
client, client_sequence
)
SELECT
user_id,
"uuid",
(
WITH RECURSIVE nodes AS (
SELECT
uuid,
COALESCE(parent_uuid, '') AS parent_uuid,
name,
0 AS depth
FROM node
WHERE
uuid = node_uuid
UNION
SELECT
n.uuid,
COALESCE(n.parent_uuid, '') AS parent_uuid,
n.name,
nr.depth+1 AS depth
FROM node n
INNER JOIN nodes nr ON n.uuid = nr.parent_uuid
)
SELECT ARRAY (
SELECT name
FROM nodes
ORDER BY depth DESC
OFFSET 1 /* discard itself */
)
),
created,
updated,
name,
content,
markdown,
content_encrypted,
client,
client_sequence
FROM public."node"
WHERE
user_id = p_user_id AND
uuid = node_uuid
ON CONFLICT (client, client_sequence)
DO NOTHING;
/* Current node in database is updated with incoming data. */
UPDATE public."node"
SET
updated = (node_data->>'Updated')::timestamptz,
updated_seq = nextval('node_updates'),
name = (node_data->>'Name')::varchar,
content = (node_data->>'Content')::text,
markdown = (node_data->>'Markdown')::bool,
client = p_client_uuid,
client_sequence = (node_data->>'ClientSequence')::int
WHERE
user_id = p_user_id AND
uuid = node_uuid;
END IF;
END LOOP;
END
$$;

2
sql/00008.sql Normal file
View file

@ -0,0 +1,2 @@
ALTER TABLE node ADD COLUMN Client_sequence int NULL;
ALTER TABLE node_history ADD COLUMN Client_sequence int NULL;

1
sql/00009.sql Normal file
View file

@ -0,0 +1 @@
CREATE UNIQUE INDEX node_history_client_idx ON public.node_history (client,client_sequence);

10
sql/00010.sql Normal file
View file

@ -0,0 +1,10 @@
CREATE TABLE public.client (
id serial NOT NULL,
user_id int4 NOT NULL,
client_uuid bpchar(36) DEFAULT '' NOT NULL,
created timestamptz DEFAULT NOW() NOT NULL,
description varchar DEFAULT '' NOT NULL,
CONSTRAINT client_pk PRIMARY KEY (id)
);
CREATE UNIQUE INDEX client_uuid_idx ON public.client (client_uuid);

View file

@ -4,12 +4,12 @@ html {
#notes2 {
min-height: 100vh;
display: grid;
grid-template-areas: "tree crumbs" "tree name" "tree content" "tree blank";
grid-template-areas: "tree crumbs" "tree sync" "tree name" "tree content" "tree blank";
grid-template-columns: min-content 1fr;
}
@media only screen and (max-width: 600px) {
#notes2 {
grid-template-areas: "crumbs" "name" "content" "blank";
grid-template-areas: "crumbs" "sync" "name" "content" "blank";
grid-template-columns: 1fr;
}
#notes2 #tree {
@ -75,6 +75,52 @@ html {
justify-items: center;
margin: 16px;
}
#sync-progress {
grid-area: sync;
display: grid;
justify-items: center;
justify-self: center;
width: 100%;
max-width: 900px;
height: 56px;
position: relative;
}
#sync-progress.hidden {
visibility: hidden;
opacity: 0;
transition: visibility 0s 500ms, opacity 500ms linear;
}
#sync-progress progress {
width: calc(100% - 16px);
height: 16px;
border-radius: 4px;
}
#sync-progress progress[value]::-webkit-progress-bar {
background-color: #eee;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.25) inset;
border-radius: 4px;
}
#sync-progress progress[value]::-moz-progress-bar {
background-color: #eee;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.25) inset;
border-radius: 4px;
}
#sync-progress progress[value]::-webkit-progress-value {
background: #ba5f59;
background: linear-gradient(180deg, #ba5f59 0%, #fe5f55 50%, #ba5f59 100%);
border-radius: 4px;
}
#sync-progress progress[value]::-moz-progress-value {
background: #ba5f59;
background: linear-gradient(180deg, #ba5f59 0%, #fe5f55 50%, #ba5f59 100%);
border-radius: 4px;
}
#sync-progress .count {
margin-top: 0px;
color: #888;
position: absolute;
top: 22px;
}
.crumbs {
display: flex;
flex-wrap: wrap;
@ -109,11 +155,11 @@ html {
margin-left: 0px;
}
#name {
color: #666;
color: #333;
font-weight: bold;
text-align: center;
font-size: 1.15em;
margin-top: 32px;
margin-top: 0px;
margin-bottom: 16px;
}
/* ============================================================= *

View file

@ -2,6 +2,7 @@ import { h, Component, createRef } from 'preact'
import htm from 'htm'
import { signal } from 'preact/signals'
import { ROOT_NODE } from 'node_store'
import { SyncProgress } from 'sync'
const html = htm.bind(h)
export class NodeUI extends Component {
@ -15,6 +16,7 @@ export class NodeUI extends Component {
this.keys = signal([])
this.page = signal('node')
this.crumbs = []
this.syncProgress = createRef()
window.addEventListener('popstate', evt => {
if (evt.state?.hasOwnProperty('nodeUUID'))
_notes2.current.goToNode(evt.state.nodeUUID, true)
@ -37,7 +39,7 @@ export class NodeUI extends Component {
const crumbDivs = [
html`<div class="crumb" onclick=${() => _notes2.current.goToNode(ROOT_NODE)}>Start</div>`
]
for (let i = this.crumbs.length-1; i >= 0; i--) {
for (let i = this.crumbs.length - 1; i >= 0; i--) {
const crumbNode = this.crumbs[i]
crumbDivs.push(html`<div class="crumb" onclick=${() => _notes2.current.goToNode(crumbNode.UUID)}>${crumbNode.get('Name')}</div>`)
}
@ -52,6 +54,7 @@ export class NodeUI extends Component {
${crumbDivs}
</div>
</div>
<${SyncProgress} ref=${this.syncProgress} />
<div id="name">${node.get('Name')}</div>
<${NodeContent} key=${node.UUID} node=${node} ref=${this.nodeContent} />
<div id="blank"></div>
@ -167,13 +170,32 @@ export class NodeUI extends Component {
if (!this.nodeModified.value)
return
await nodeStore.copyToNodesHistory(this.node.value)
/* The node history is a local store for node history.
* This could be provisioned from the server or cleared if
* deemed unnecessary.
*
* The send queue is what will be sent back to the server
* to have a recorded history of the notes.
*
* A setting to be implemented in the future could be to
* not save the history locally at all. */
const node = this.node.value
// The node is still in its old state and will present
// the unmodified content to the node store.
const history = nodeStore.nodesHistory.add(node)
// Prepares the node object for saving.
// Sets Updated value to current date and time.
const node = this.node.value
node.save()
await nodeStore.add([node])
await node.save()
// Updated node is added to the send queue to be stored on server.
const sendQueue = nodeStore.sendQueue.add(this.node.value)
// Updated node is saved to the primary node store.
const nodeStoreAdding = nodeStore.add([node])
await Promise.all([history, sendQueue, nodeStoreAdding])
this.nodeModified.value = false
}//}}}
@ -315,6 +337,7 @@ export class Node {
this._children_fetched = false
this.Children = []
this.Ancestors = []
this._content = this.data.Content
this._modified = false
@ -372,10 +395,15 @@ export class Node {
this._decrypted = true
*/
}//}}}
save() {//{{{
async save() {//{{{
this.data.Content = this._content
this.data.Updated = new Date().toISOString()
this._modified = false
// When stored into database and ancestry was changed,
// the ancestry path could be interesting.
const ancestors = await nodeStore.getNodeAncestry(this)
this.data.Ancestors = ancestors.map(a => a.get('Name')).reverse()
}//}}}
}

View file

@ -10,6 +10,8 @@ export class NodeStore {
this.db = null
this.nodes = {}
this.sendQueue = null
this.nodesHistory = null
}//}}}
async initializeDB() {//{{{
return new Promise((resolve, reject) => {
@ -48,7 +50,7 @@ export class NodeStore {
break
case 5:
sendQueue = db.createObjectStore('send_queue', { keyPath: ['UUID', 'Updated'] })
sendQueue = db.createObjectStore('send_queue', { keyPath: 'ClientSequence', autoIncrement: true })
sendQueue.createIndex('updated', 'Updated', { unique: false })
break
@ -65,8 +67,9 @@ export class NodeStore {
req.onsuccess = (event) => {
this.db = event.target.result
this.sendQueue = new SimpleNodeStore(this.db, 'send_queue')
this.nodesHistory = new SimpleNodeStore(this.db, 'nodes_history')
this.initializeRootNode()
.then(() => this.initializeClientUUID())
.then(() => resolve())
}
@ -106,13 +109,6 @@ export class NodeStore {
getRequest.onerror = (event) => reject(event.target.error)
})
}//}}}
async initializeClientUUID() {//{{{
let clientUUID = await this.getAppState('client_uuid')
if (clientUUID !== null)
return
clientUUID = crypto.randomUUID()
return this.setAppState('client_uuid', clientUUID)
}//}}}
node(uuid, dataIfUndefined, newLevel) {//{{{
let n = this.nodes[uuid]
@ -160,64 +156,6 @@ export class NodeStore {
})
}//}}}
async moveToSendQueue(nodeToMove, replaceWithNode) {//{{{
return new Promise((resolve, reject) => {
const t = this.db.transaction(['nodes', 'send_queue'], 'readwrite')
const nodeStore = t.objectStore('nodes')
const sendQueue = t.objectStore('send_queue')
t.onerror = (event) => {
console.log('transaction error', event.target.error)
reject(event.target.error)
}
t.oncomplete = () => {
resolve()
}
// Node to be moved is first stored in the new queue.
const queueReq = sendQueue.put(nodeToMove.data)
queueReq.onsuccess = () => {
// When added to the send queue, the node is either deleted
// or replaced with a new node.
console.debug(`Queueing ${nodeToMove.UUID} (${nodeToMove.get('Name')})`)
let nodeReq
if (replaceWithNode)
nodeReq = nodeStore.put(replaceWithNode.data)
else
nodeReq = nodeStore.delete(nodeToMove.UUID)
nodeReq.onsuccess = () => {
resolve()
}
nodeReq.onerror = (event) => {
console.log(`Error moving ${nodeToMove.UUID}`, event.target.error)
reject(event.target.error)
}
}
queueReq.onerror = (event) => {
console.log(`Error queueing ${nodeToMove.UUID}`, event.target.error)
reject(event.target.error)
}
})
}//}}}
async copyToNodesHistory(nodeToCopy) {//{{{
return new Promise((resolve, reject) => {
const t = this.db.transaction('nodes_history', 'readwrite')
const nodesHistory = t.objectStore('nodes_history')
t.oncomplete = () => {
resolve()
}
t.onerror = (event) => {
console.log('transaction error', event.target.error)
reject(event.target.error)
}
const historyReq = nodesHistory.put(nodeToCopy.data)
historyReq.onerror = (event) => {
console.log(`Error copying ${nodeToCopy.UUID}`, event.target.error)
reject(event.target.error)
}
})
}//}}}
async storeNode(node) {//{{{
return new Promise((resolve, reject) => {
const t = this.db.transaction('nodes', 'readwrite')
@ -308,9 +246,6 @@ export class NodeStore {
console.log('transaction error', event.target.error)
reject(event.target.error)
}
t.oncomplete = () => {
console.log('OK')
}
// records is an object, not an array.
const promises = []
@ -397,4 +332,86 @@ export class NodeStore {
}//}}}
}
class SimpleNodeStore {
constructor(db, storeName) {//{{{
this.db = db
this.storeName = storeName
}//}}}
async add(node) {//{{{
return new Promise((resolve, reject) => {
const t = this.db.transaction(['nodes', this.storeName], 'readwrite')
const store = t.objectStore(this.storeName)
t.onerror = (event) => {
console.log('transaction error', event.target.error)
reject(event.target.error)
}
// Node to be moved is first stored in the new queue.
const req = store.put(node.data)
req.onsuccess = () => {
resolve()
}
req.onerror = (event) => {
console.log(`Error adding ${node.UUID}`, event.target.error)
reject(event.target.error)
}
})
}//}}}
async retrieve(limit) {//{{{
return new Promise((resolve, reject) => {
const cursorReq = this.db
.transaction(['nodes', this.storeName], 'readonly')
.objectStore(this.storeName)
.index('updated')
.openCursor()
let retrieved = 0
const nodes = []
cursorReq.onsuccess = (event) => {
const cursor = event.target.result
if (!cursor) {
resolve(nodes)
return
}
retrieved++
nodes.push(cursor.value)
if (retrieved === limit) {
resolve(nodes)
return
}
cursor.continue()
}
})
}//}}}
async delete(keys) {//{{{
const store = this.db
.transaction(['nodes', this.storeName], 'readwrite')
.objectStore(this.storeName)
const promises = []
for (const key of keys) {
const p = new Promise((resolve, reject) => {
// TODO - implement a way to add an error to a page-global error log.
const request = store.delete(key)
request.onsuccess = (event) => resolve(event)
request.onerror = (event) => reject(event)
})
promises.push(p)
}
return Promise.all(promises)
}//}}}
async count() {//{{{
const store = this.db
.transaction(['nodes', this.storeName], 'readonly')
.objectStore(this.storeName)
return new Promise((resolve, reject) => {
const request = store.count()
request.onsuccess = (event) => resolve(event.target.result)
request.onerror = (event) => reject(event.target.error)
})
}//}}}
}
// vim: foldmethod=marker

View file

@ -13,9 +13,8 @@ export class Notes2 extends Component {
startNode: null,
}
Sync.nodes().then(durationNodes =>
console.log(`Total time: ${Math.round(100 * durationNodes) / 100}s`)
)
window._sync = new Sync()
window._sync.run()
this.getStartNode()
}//}}}

View file

@ -1,28 +1,73 @@
import { API } from 'api'
import { Node } from 'node'
import { h, Component, createRef } from 'preact'
import htm from 'htm'
const html = htm.bind(h)
const SYNC_COUNT = 1
const SYNC_HANDLED = 2
const SYNC_DONE = 3
export class Sync {
constructor() {
this.foo = ''
}
constructor() {//{{{
this.listeners = []
this.messagesReceived = []
}//}}}
addListener(fn, runMessageQueue) {//{{{
// Some handlers won't be added until a time after sync messages have been added to the queue.
// This is an opportunity for the handler to receive the old messages in order.
if (runMessageQueue)
for (const msg of this.messagesReceived)
fn(msg)
this.listeners.push(fn)
}//}}}
pushMessage(msg) {//{{{
this.messagesReceived.push(msg)
for (const fn of this.listeners)
fn(msg)
}//}}}
static async nodes() {
let duration = 0
const syncStart = Date.now()
async run() {//{{{
try {
let duration = 0 // in ms
// The latest sync node value is used to retrieve the changes
// from the backend.
const state = await nodeStore.getAppState('latest_sync_node')
const clientUUID = await nodeStore.getAppState('client_uuid')
const oldMax = (state?.value ? state.value : 0)
let currMax = oldMax
let nodeCount = await this.getNodeCount(oldMax)
nodeCount += await nodeStore.sendQueue.count()
const msg = { op: SYNC_COUNT, count: nodeCount }
this.pushMessage(msg)
await this.nodesFromServer(oldMax)
.then(durationNodes => {
duration = durationNodes // in ms
console.log(`Total time: ${Math.round(1000 * durationNodes) / 1000}s`)
})
await this.nodesToServer()
} finally {
this.pushMessage({ op: SYNC_DONE })
}
}//}}}
async getNodeCount(oldMax) {//{{{
// Retrieve the amount of values the server will send us.
const res = await API.query('POST', `/sync/from_server/count/${oldMax}`)
return res?.Count
}//}}}
async nodesFromServer(oldMax) {//{{{
const syncStart = Date.now()
let syncEnd
try {
let currMax = oldMax
let offset = 0
let res = { Continue: false }
let batch = 0
do {
batch++
res = await API.query('POST', `/sync/node/${oldMax}/${offset}`, { ClientUUID: clientUUID.value })
res = await API.query('POST', `/sync/from_server/${oldMax}/${offset}`)
if (res.Nodes.length > 0)
console.log(`Node sync batch #${batch}`)
offset += res.Nodes.length
@ -40,7 +85,7 @@ export class Sync {
let backendNode = null
for (const i in res.Nodes) {
backendNode = new Node(res.Nodes[i], -1)
await Sync.handleNode(backendNode)
await window._sync.handleNode(backendNode)
}
} while (res.Continue)
@ -49,14 +94,14 @@ export class Sync {
} catch (e) {
console.log('sync node tree', e)
} finally {
const syncEnd = Date.now()
duration = (syncEnd - syncStart) / 1000
syncEnd = Date.now()
const duration = (syncEnd - syncStart) / 1000
const count = await nodeStore.nodeCount()
console.log(`Node sync took ${duration}s`, count)
}
return duration
}
static async handleNode(backendNode) {
return (syncEnd - syncStart)
}//}}}
async handleNode(backendNode) {//{{{
try {
/* Retrieving the local copy of this node from IndexedDB.
* The backend node can be discarded if it is older than
@ -69,16 +114,117 @@ export class Sync {
return
}
// local node is older than the backend node
// and moved into the send_queue table for later sync to backend.
return nodeStore.moveToSendQueue(localNode, backendNode)
/* If the local node hasn't seen unsynchronized change,
* it can be replaced without anything else being done
* since it is already on the backend server.
*
* If the local node has seen change, the change is already
* placed into the send_queue anyway. */
return nodeStore.add([backendNode])
})
.catch(async e => {
.catch(async () => {
// Not found in IndexedDB - OK to just insert since it only exists in backend.
return nodeStore.add([backendNode])
})
} catch (e) {
console.error(e)
} finally {
this.pushMessage({ op: SYNC_HANDLED, count: 1 })
}
}
}//}}}
async nodesToServer() {//{{{
const BATCH_SIZE = 32
while (true) {
try {
// Send nodes in batches until everything is sent, or an error has occured.
const nodesToSend = await nodeStore.sendQueue.retrieve(BATCH_SIZE)
if (nodesToSend.length === 0)
break
console.debug(`Sending ${nodesToSend.length} node(s) to server`)
const request = {
NodeData: JSON.stringify(nodesToSend),
}
const res = await API.query('POST', '/sync/to_server', request)
if (!res.OK) {
// TODO - implement better error management here.
console.log(res)
alert(res)
return
}
// Nodes are archived on server and can now be deleted from the send queue.
const keys = nodesToSend.map(node => node.ClientSequence)
await nodeStore.sendQueue.delete(keys)
this.pushMessage({ op: SYNC_HANDLED, count: nodesToSend.length })
} catch (e) {
console.trace(e)
alert(e)
return
}
}
}//}}}
}
export class SyncProgress extends Component {
constructor() {//{{{
super()
this.forceUpdateRequest = null
this.state = {
nodesToSync: 0,
nodesSynced: 0,
syncedDone: false,
}
}//}}}
componentDidMount() {//{{{
window._sync.addListener(msg => this.progressHandler(msg), true)
}//}}}
getSnapshotBeforeUpdate(_, prevState) {//{{{
if (!prevState.syncedDone && this.state.syncedDone)
setTimeout(() => document.getElementById('sync-progress')?.classList.add('hidden'), 750)
}//}}}
componentDidUpdate() {//{{{
if (!this.state.syncedDone) {
if (this.forceUpdateRequest !== null)
clearTimeout(this.forceUpdateRequest)
this.forceUpdateRequest = setTimeout(
() => {
this.forceUpdateRequest = null
this.forceUpdate()
},
50
)
}
}//}}}
progressHandler(msg) {//{{{
switch (msg.op) {
case SYNC_COUNT:
this.setState({ nodesToSync: msg.count })
break
case SYNC_HANDLED:
this.state.nodesSynced += msg.count
break
case SYNC_DONE:
this.setState({ syncedDone: true })
break
}
}//}}}
render(_, { nodesToSync, nodesSynced }) {//{{{
if (nodesToSync === 0)
return html`<div id="sync-progress"></div>`
return html`
<div id="sync-progress">
<progress min=0 max=${nodesToSync} value=${nodesSynced}></progress>
<div class="count">${nodesSynced} / ${nodesToSync}</div>
</div>
`
}//}}}
}

View file

@ -10,6 +10,7 @@ html {
display: grid;
grid-template-areas:
"tree crumbs"
"tree sync"
"tree name"
"tree content"
//"tree checklist"
@ -22,6 +23,7 @@ html {
@media only screen and (max-width: 600px) {
grid-template-areas:
"crumbs"
"sync"
"name"
"content"
//"checklist"
@ -110,6 +112,61 @@ html {
margin: 16px;
}
#sync-progress {
grid-area: sync;
display: grid;
justify-items: center;
justify-self: center;
width: 100%;
max-width: 900px;
height: 56px;
position: relative;
&.hidden {
visibility: hidden;
opacity: 0;
transition: visibility 0s 500ms, opacity 500ms linear;
}
progress {
width: calc(100% - 16px);
height: 16px;
border-radius: 4px;
}
progress[value]::-webkit-progress-bar {
background-color: #eee;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.25) inset;
border-radius: 4px;
}
progress[value]::-moz-progress-bar {
background-color: #eee;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.25) inset;
border-radius: 4px;
}
progress[value]::-webkit-progress-value {
background: rgb(186,95,89);
background: linear-gradient(180deg, rgba(186,95,89,1) 0%, rgba(254,95,85,1) 50%, rgba(186,95,89,1) 100%);
border-radius: 4px;
}
// TODO: style the progress value for Firefox
progress[value]::-moz-progress-value {
background: rgb(186,95,89);
background: linear-gradient(180deg, rgba(186,95,89,1) 0%, rgba(254,95,85,1) 50%, rgba(186,95,89,1) 100%);
border-radius: 4px;
}
.count {
margin-top: 0px;
color: #888;
position: absolute;
top: 22px;
}
}
.crumbs {
background: #e4e4e4;
display: flex;
@ -151,11 +208,11 @@ html {
}
#name {
color: @color3;
color: #333;
font-weight: bold;
text-align: center;
font-size: 1.15em;
margin-top: 32px;
margin-top: 0px;
margin-bottom: 16px;
}

17
user.go
View file

@ -5,20 +5,23 @@ import (
"github.com/golang-jwt/jwt/v5"
)
type User struct {
ID int
Username string
Password string
Name string
type UserSession struct {
UserID int
Username string
Password string
Name string
ClientUUID string
}
func NewUser(claims jwt.MapClaims) (u User) {
func NewUser(claims jwt.MapClaims) (u UserSession) {
uid, _ := claims["uid"].(float64)
name, _ := claims["name"].(string)
username, _ := claims["login"].(string)
clientUUID, _ := claims["cid"].(string)
u.ID = int(uid)
u.UserID = int(uid)
u.Username = username
u.Name = name
u.ClientUUID = clientUUID
return
}