2024-03-08 00:22:51 -07:00
|
|
|
package graphvent
|
|
|
|
|
|
|
|
import (
|
2024-03-10 15:41:13 -06:00
|
|
|
"encoding/binary"
|
|
|
|
"fmt"
|
2024-03-30 15:42:06 -06:00
|
|
|
"reflect"
|
|
|
|
"sync"
|
2024-03-10 15:41:13 -06:00
|
|
|
|
2024-03-08 00:22:51 -07:00
|
|
|
badger "github.com/dgraph-io/badger/v3"
|
|
|
|
)
|
|
|
|
|
2024-03-30 15:42:06 -06:00
|
|
|
type Database interface {
|
|
|
|
WriteNodeInit(*Context, *Node) error
|
|
|
|
WriteNodeChanges(*Context, *Node, map[ExtType]Changes) error
|
|
|
|
LoadNode(*Context, NodeID) (*Node, error)
|
|
|
|
}
|
|
|
|
|
|
|
|
const WRITE_BUFFER_SIZE = 1000000
|
|
|
|
type BadgerDB struct {
|
|
|
|
*badger.DB
|
|
|
|
sync.Mutex
|
|
|
|
buffer [WRITE_BUFFER_SIZE]byte
|
|
|
|
}
|
2024-03-28 21:23:22 -06:00
|
|
|
|
2024-03-30 15:42:06 -06:00
|
|
|
func (db *BadgerDB) WriteNodeInit(ctx *Context, node *Node) error {
|
2024-03-10 15:41:13 -06:00
|
|
|
if node == nil {
|
|
|
|
return fmt.Errorf("Cannot serialize nil *Node")
|
|
|
|
}
|
|
|
|
|
2024-03-30 15:42:06 -06:00
|
|
|
return db.Update(func(tx *badger.Txn) error {
|
|
|
|
db.Lock()
|
|
|
|
defer db.Unlock()
|
2024-03-28 21:23:22 -06:00
|
|
|
|
2024-03-10 15:41:13 -06:00
|
|
|
// Get the base key bytes
|
|
|
|
id_ser, err := node.ID.MarshalBinary()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-03-30 15:42:06 -06:00
|
|
|
cur := 0
|
|
|
|
|
2024-03-10 15:41:13 -06:00
|
|
|
// Write Node value
|
2024-03-30 15:42:06 -06:00
|
|
|
written, err := Serialize(ctx, node, db.buffer[cur:])
|
2024-03-10 15:41:13 -06:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2024-03-30 15:42:06 -06:00
|
|
|
|
|
|
|
err = tx.Set(id_ser, db.buffer[cur:cur+written])
|
2024-03-10 15:41:13 -06:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2024-03-30 15:42:06 -06:00
|
|
|
|
|
|
|
cur += written
|
2024-03-10 15:41:13 -06:00
|
|
|
|
|
|
|
// Write empty signal queue
|
|
|
|
sigqueue_id := append(id_ser, []byte(" - SIGQUEUE")...)
|
2024-03-30 15:42:06 -06:00
|
|
|
written, err = Serialize(ctx, node.SignalQueue, db.buffer[cur:])
|
2024-03-10 15:41:13 -06:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2024-03-30 15:42:06 -06:00
|
|
|
|
|
|
|
err = tx.Set(sigqueue_id, db.buffer[cur:cur+written])
|
2024-03-08 14:35:23 -07:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-03-30 15:42:06 -06:00
|
|
|
cur += written
|
|
|
|
|
2024-03-08 00:22:51 -07:00
|
|
|
// Write node extension list
|
2024-03-10 15:41:13 -06:00
|
|
|
ext_list := []ExtType{}
|
|
|
|
for ext_type := range(node.Extensions) {
|
|
|
|
ext_list = append(ext_list, ext_type)
|
|
|
|
}
|
2024-03-30 15:42:06 -06:00
|
|
|
written, err = Serialize(ctx, ext_list, db.buffer[cur:])
|
2024-03-10 15:41:13 -06:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
ext_list_id := append(id_ser, []byte(" - EXTLIST")...)
|
2024-03-30 15:42:06 -06:00
|
|
|
err = tx.Set(ext_list_id, db.buffer[cur:cur+written])
|
2024-03-10 15:41:13 -06:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2024-03-30 15:42:06 -06:00
|
|
|
cur += written
|
2024-03-10 15:41:13 -06:00
|
|
|
|
2024-03-08 00:22:51 -07:00
|
|
|
// For each extension:
|
2024-03-10 15:41:13 -06:00
|
|
|
for ext_type, ext := range(node.Extensions) {
|
2024-03-08 00:22:51 -07:00
|
|
|
// Write each extension's current value
|
2024-03-10 15:41:13 -06:00
|
|
|
ext_id := binary.BigEndian.AppendUint64(id_ser, uint64(ext_type))
|
2024-03-30 15:42:06 -06:00
|
|
|
written, err := SerializeValue(ctx, reflect.ValueOf(ext).Elem(), db.buffer[cur:])
|
2024-03-10 15:41:13 -06:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2024-03-30 15:42:06 -06:00
|
|
|
err = tx.Set(ext_id, db.buffer[cur:cur+written])
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
cur += written
|
2024-03-10 15:41:13 -06:00
|
|
|
}
|
2024-03-08 00:22:51 -07:00
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2024-03-30 15:42:06 -06:00
|
|
|
func (db *BadgerDB) WriteNodeChanges(ctx *Context, node *Node, changes map[ExtType]Changes) error {
|
|
|
|
return db.Update(func(tx *badger.Txn) error {
|
|
|
|
db.Lock()
|
|
|
|
defer db.Unlock()
|
2024-03-28 21:23:22 -06:00
|
|
|
|
2024-03-10 15:41:13 -06:00
|
|
|
// Get the base key bytes
|
2024-03-30 15:42:06 -06:00
|
|
|
id_bytes := ([16]byte)(node.ID)
|
|
|
|
|
|
|
|
cur := 0
|
2024-03-10 15:41:13 -06:00
|
|
|
|
2024-03-08 00:22:51 -07:00
|
|
|
// Write the signal queue if it needs to be written
|
2024-03-08 14:35:23 -07:00
|
|
|
if node.writeSignalQueue {
|
|
|
|
node.writeSignalQueue = false
|
2024-03-10 15:41:13 -06:00
|
|
|
|
2024-03-30 15:42:06 -06:00
|
|
|
sigqueue_id := append(id_bytes[:], []byte(" - SIGQUEUE")...)
|
|
|
|
written, err := Serialize(ctx, node.SignalQueue, db.buffer[cur:])
|
2024-03-10 15:41:13 -06:00
|
|
|
if err != nil {
|
2024-03-28 20:28:07 -06:00
|
|
|
return fmt.Errorf("SignalQueue Serialize Error: %+v, %w", node.SignalQueue, err)
|
2024-03-10 15:41:13 -06:00
|
|
|
}
|
2024-03-30 15:42:06 -06:00
|
|
|
err = tx.Set(sigqueue_id, db.buffer[cur:cur+written])
|
2024-03-10 15:41:13 -06:00
|
|
|
if err != nil {
|
2024-03-28 20:28:07 -06:00
|
|
|
return fmt.Errorf("SignalQueue set error: %+v, %w", node.SignalQueue, err)
|
2024-03-10 15:41:13 -06:00
|
|
|
}
|
2024-03-30 15:42:06 -06:00
|
|
|
cur += written
|
2024-03-08 14:35:23 -07:00
|
|
|
}
|
|
|
|
|
2024-03-08 00:22:51 -07:00
|
|
|
// For each ext in changes
|
2024-03-10 15:41:13 -06:00
|
|
|
for ext_type := range(changes) {
|
|
|
|
// Write each ext
|
|
|
|
ext, exists := node.Extensions[ext_type]
|
|
|
|
if exists == false {
|
|
|
|
return fmt.Errorf("%s is not an extension in %s", ext_type, node.ID)
|
|
|
|
}
|
2024-03-30 15:42:06 -06:00
|
|
|
ext_id := binary.BigEndian.AppendUint64(id_bytes[:], uint64(ext_type))
|
|
|
|
written, err := SerializeValue(ctx, reflect.ValueOf(ext).Elem(), db.buffer[cur:])
|
2024-03-10 15:41:13 -06:00
|
|
|
if err != nil {
|
2024-03-28 20:28:07 -06:00
|
|
|
return fmt.Errorf("Extension serialize err: %s, %w", reflect.TypeOf(ext), err)
|
2024-03-10 15:41:13 -06:00
|
|
|
}
|
|
|
|
|
2024-03-30 15:42:06 -06:00
|
|
|
err = tx.Set(ext_id, db.buffer[cur:cur+written])
|
2024-03-10 15:41:13 -06:00
|
|
|
if err != nil {
|
2024-03-28 20:28:07 -06:00
|
|
|
return fmt.Errorf("Extension set err: %s, %w", reflect.TypeOf(ext), err)
|
2024-03-10 15:41:13 -06:00
|
|
|
}
|
2024-03-30 15:42:06 -06:00
|
|
|
cur += written
|
2024-03-10 15:41:13 -06:00
|
|
|
}
|
2024-03-08 00:22:51 -07:00
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2024-03-30 15:42:06 -06:00
|
|
|
func (db *BadgerDB) LoadNode(ctx *Context, id NodeID) (*Node, error) {
|
2024-03-10 15:41:13 -06:00
|
|
|
var node *Node = nil
|
2024-03-30 15:42:06 -06:00
|
|
|
|
|
|
|
err := db.View(func(tx *badger.Txn) error {
|
2024-03-10 15:41:13 -06:00
|
|
|
// Get the base key bytes
|
|
|
|
id_ser, err := id.MarshalBinary()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// Get the node value
|
|
|
|
node_item, err := tx.Get(id_ser)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
err = node_item.Value(func(val []byte) error {
|
2024-03-30 15:42:06 -06:00
|
|
|
ctx.Log.Logf("db", "DESERIALIZE_NODE(%d bytes): %+v", len(val), val)
|
2024-03-10 15:41:13 -06:00
|
|
|
node, err = Deserialize[*Node](ctx, val)
|
|
|
|
return err
|
|
|
|
})
|
|
|
|
|
|
|
|
if err != nil {
|
2024-03-30 15:42:06 -06:00
|
|
|
return fmt.Errorf("Failed to deserialize Node %s - %w", id, err)
|
2024-03-10 15:41:13 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
// Get the signal queue
|
|
|
|
sigqueue_id := append(id_ser, []byte(" - SIGQUEUE")...)
|
|
|
|
sigqueue_item, err := tx.Get(sigqueue_id)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
err = sigqueue_item.Value(func(val []byte) error {
|
|
|
|
node.SignalQueue, err = Deserialize[[]QueuedSignal](ctx, val)
|
|
|
|
return err
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// Get the extension list
|
|
|
|
ext_list_id := append(id_ser, []byte(" - EXTLIST")...)
|
|
|
|
ext_list_item, err := tx.Get(ext_list_id)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
var ext_list []ExtType
|
|
|
|
ext_list_item.Value(func(val []byte) error {
|
|
|
|
ext_list, err = Deserialize[[]ExtType](ctx, val)
|
|
|
|
return err
|
|
|
|
})
|
|
|
|
|
|
|
|
// Get the extensions
|
|
|
|
for _, ext_type := range(ext_list) {
|
|
|
|
ext_id := binary.BigEndian.AppendUint64(id_ser, uint64(ext_type))
|
|
|
|
ext_item, err := tx.Get(ext_id)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-03-28 20:28:07 -06:00
|
|
|
ext_info, exists := ctx.Extensions[ext_type]
|
|
|
|
if exists == false {
|
|
|
|
return fmt.Errorf("Extension %s not in context", ext_type)
|
|
|
|
}
|
|
|
|
|
2024-03-10 15:41:13 -06:00
|
|
|
var ext Extension
|
2024-03-28 20:28:07 -06:00
|
|
|
var ok bool
|
2024-03-10 15:41:13 -06:00
|
|
|
err = ext_item.Value(func(val []byte) error {
|
2024-03-28 20:28:07 -06:00
|
|
|
value, _, err := DeserializeValue(ctx, val, ext_info.Type)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
ext, ok = value.Addr().Interface().(Extension)
|
|
|
|
if ok == false {
|
|
|
|
return fmt.Errorf("Parsed value %+v is not extension", value.Type())
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
2024-03-10 15:41:13 -06:00
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
node.Extensions[ext_type] = ext
|
|
|
|
}
|
|
|
|
|
2024-03-08 00:22:51 -07:00
|
|
|
return nil
|
|
|
|
})
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2024-03-30 15:42:06 -06:00
|
|
|
} else if node == nil {
|
|
|
|
return nil, fmt.Errorf("Tried to return nil *Node from BadgerDB.LoadNode without error")
|
2024-03-08 00:22:51 -07:00
|
|
|
}
|
|
|
|
|
2024-03-10 15:41:13 -06:00
|
|
|
return node, nil
|
2024-03-08 00:22:51 -07:00
|
|
|
}
|