2023-09-05 00:08:09 -06:00
|
|
|
package graphvent
|
|
|
|
|
|
|
|
import (
|
|
|
|
"crypto/sha512"
|
2023-10-30 01:25:18 -06:00
|
|
|
"encoding"
|
2023-09-05 00:08:09 -06:00
|
|
|
"encoding/binary"
|
2023-10-29 18:26:14 -06:00
|
|
|
"encoding/gob"
|
2023-09-05 00:08:09 -06:00
|
|
|
"fmt"
|
2023-10-29 18:26:14 -06:00
|
|
|
"math"
|
2023-09-05 00:08:09 -06:00
|
|
|
"reflect"
|
2023-09-11 21:47:53 -06:00
|
|
|
"sort"
|
2023-10-30 13:23:08 -06:00
|
|
|
"bytes"
|
2023-09-05 00:08:09 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
const (
|
2023-09-20 19:14:28 -06:00
|
|
|
TagBase = "GraphventTag"
|
|
|
|
ExtTypeBase = "ExtType"
|
|
|
|
NodeTypeBase = "NodeType"
|
|
|
|
SignalTypeBase = "SignalType"
|
|
|
|
PolicyTypeBase = "PolicyType"
|
2023-09-05 00:08:09 -06:00
|
|
|
SerializedTypeBase = "SerializedType"
|
2023-09-20 19:14:28 -06:00
|
|
|
FieldNameBase = "FieldName"
|
2023-09-05 00:08:09 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
func Hash(base string, name string) SerializedType {
|
|
|
|
digest := append([]byte(base), 0x00)
|
|
|
|
digest = append(digest, []byte(name)...)
|
|
|
|
hash := sha512.Sum512(digest)
|
|
|
|
return SerializedType(binary.BigEndian.Uint64(hash[0:8]))
|
|
|
|
}
|
|
|
|
|
|
|
|
type SerializedType uint64
|
2023-09-20 19:14:28 -06:00
|
|
|
|
2023-09-11 21:47:53 -06:00
|
|
|
func (t SerializedType) String() string {
|
|
|
|
return fmt.Sprintf("0x%x", uint64(t))
|
|
|
|
}
|
|
|
|
|
2023-09-05 00:08:09 -06:00
|
|
|
type ExtType SerializedType
|
2023-09-20 19:14:28 -06:00
|
|
|
|
2023-09-12 19:00:48 -06:00
|
|
|
func (t ExtType) String() string {
|
|
|
|
return fmt.Sprintf("0x%x", uint64(t))
|
|
|
|
}
|
|
|
|
|
2023-09-05 00:08:09 -06:00
|
|
|
type NodeType SerializedType
|
2023-09-20 19:14:28 -06:00
|
|
|
|
2023-09-12 20:30:18 -06:00
|
|
|
func (t NodeType) String() string {
|
|
|
|
return fmt.Sprintf("0x%x", uint64(t))
|
|
|
|
}
|
|
|
|
|
2023-09-05 00:08:09 -06:00
|
|
|
type SignalType SerializedType
|
2023-09-20 19:14:28 -06:00
|
|
|
|
2023-09-12 20:30:18 -06:00
|
|
|
func (t SignalType) String() string {
|
|
|
|
return fmt.Sprintf("0x%x", uint64(t))
|
|
|
|
}
|
|
|
|
|
2023-09-05 00:08:09 -06:00
|
|
|
type PolicyType SerializedType
|
2023-09-20 19:14:28 -06:00
|
|
|
|
2023-09-12 20:30:18 -06:00
|
|
|
func (t PolicyType) String() string {
|
|
|
|
return fmt.Sprintf("0x%x", uint64(t))
|
|
|
|
}
|
2023-09-05 00:08:09 -06:00
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
type Chunk struct {
|
|
|
|
Data []byte
|
|
|
|
Next *Chunk
|
|
|
|
}
|
|
|
|
|
|
|
|
type Chunks struct {
|
|
|
|
First *Chunk
|
|
|
|
Last *Chunk
|
|
|
|
}
|
|
|
|
|
|
|
|
func (chunks Chunks) String() string {
|
|
|
|
cur := chunks.First
|
|
|
|
str := fmt.Sprintf("Chunks(")
|
|
|
|
for cur != nil {
|
|
|
|
str = fmt.Sprintf("%s%+v, ", str, cur)
|
|
|
|
cur = cur.Next
|
|
|
|
}
|
|
|
|
|
|
|
|
return fmt.Sprintf("%s)", str)
|
|
|
|
}
|
|
|
|
|
|
|
|
func NewChunks(datas ...[]byte) Chunks {
|
|
|
|
var first *Chunk = nil
|
|
|
|
var last *Chunk = nil
|
|
|
|
|
|
|
|
if len(datas) >= 1 {
|
|
|
|
first = &Chunk{
|
|
|
|
Data: datas[0],
|
|
|
|
Next: nil,
|
|
|
|
}
|
|
|
|
last = first
|
|
|
|
|
|
|
|
for _, data := range(datas[1:]) {
|
|
|
|
last.Next = &Chunk{
|
|
|
|
Data: data,
|
|
|
|
Next: nil,
|
|
|
|
}
|
|
|
|
last = last.Next
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (first == nil || last == nil) && (first != last) {
|
|
|
|
panic(fmt.Sprintf("Attempted to construct invalid Chunks with NewChunks %+v - %+v", first, last))
|
|
|
|
}
|
|
|
|
return Chunks{
|
|
|
|
First: first,
|
|
|
|
Last: last,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (chunks Chunks) AddDataToEnd(datas ...[]byte) Chunks {
|
|
|
|
if chunks.First == nil && chunks.Last == nil {
|
|
|
|
return NewChunks(datas...)
|
|
|
|
} else if chunks.First == nil || chunks.Last == nil {
|
|
|
|
panic(fmt.Sprintf("Invalid chunks %+v", chunks))
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, data := range(datas) {
|
|
|
|
chunks.Last.Next = &Chunk{
|
|
|
|
Data: data,
|
|
|
|
Next: nil,
|
|
|
|
}
|
|
|
|
chunks.Last = chunks.Last.Next
|
|
|
|
}
|
|
|
|
|
|
|
|
return chunks
|
|
|
|
}
|
|
|
|
|
|
|
|
func (chunks Chunks) AddChunksToEnd(new_chunks Chunks) Chunks {
|
|
|
|
if chunks.Last == nil && chunks.First == nil {
|
|
|
|
return new_chunks
|
|
|
|
} else if chunks.Last == nil || chunks.First == nil {
|
|
|
|
panic(fmt.Sprintf("Invalid chunks %+v", chunks))
|
|
|
|
} else if new_chunks.Last == nil && new_chunks.First == nil {
|
|
|
|
return chunks
|
|
|
|
} else if new_chunks.Last == nil || new_chunks.First == nil {
|
|
|
|
panic(fmt.Sprintf("Invalid new_chunks %+v", new_chunks))
|
|
|
|
} else {
|
|
|
|
chunks.Last.Next = new_chunks.First
|
|
|
|
chunks.Last = new_chunks.Last
|
|
|
|
return chunks
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (chunks Chunks) GetSerializedSize() int {
|
|
|
|
total_size := 0
|
|
|
|
cur := chunks.First
|
|
|
|
|
|
|
|
for cur != nil {
|
|
|
|
total_size += len(cur.Data)
|
|
|
|
cur = cur.Next
|
|
|
|
}
|
|
|
|
return total_size
|
|
|
|
}
|
|
|
|
|
|
|
|
func (chunks Chunks) Slice() []byte {
|
|
|
|
total_size := chunks.GetSerializedSize()
|
|
|
|
data := make([]byte, total_size)
|
|
|
|
data_ptr := 0
|
|
|
|
|
|
|
|
cur := chunks.First
|
|
|
|
for cur != nil {
|
|
|
|
copy(data[data_ptr:], cur.Data)
|
|
|
|
data_ptr += len(cur.Data)
|
|
|
|
cur = cur.Next
|
|
|
|
}
|
|
|
|
|
|
|
|
return data
|
|
|
|
}
|
|
|
|
|
2023-10-29 18:26:14 -06:00
|
|
|
type TypeSerializeFn func(*Context, reflect.Type) ([]SerializedType, error)
|
2023-10-30 01:25:18 -06:00
|
|
|
type SerializeFn func(*Context, reflect.Value) (Chunks, error)
|
2023-10-29 18:26:14 -06:00
|
|
|
type TypeDeserializeFn func(*Context, []SerializedType) (reflect.Type, []SerializedType, error)
|
|
|
|
type DeserializeFn func(*Context, reflect.Type, []byte) (reflect.Value, []byte, error)
|
2023-09-05 00:08:09 -06:00
|
|
|
|
|
|
|
func NewExtType(name string) ExtType {
|
|
|
|
return ExtType(Hash(ExtTypeBase, name))
|
|
|
|
}
|
|
|
|
|
|
|
|
func NewNodeType(name string) NodeType {
|
|
|
|
return NodeType(Hash(NodeTypeBase, name))
|
|
|
|
}
|
|
|
|
|
|
|
|
func NewSignalType(name string) SignalType {
|
|
|
|
return SignalType(Hash(SignalTypeBase, name))
|
|
|
|
}
|
|
|
|
|
|
|
|
func NewPolicyType(name string) PolicyType {
|
|
|
|
return PolicyType(Hash(PolicyTypeBase, name))
|
|
|
|
}
|
|
|
|
|
|
|
|
func NewSerializedType(name string) SerializedType {
|
|
|
|
return Hash(SerializedTypeBase, name)
|
|
|
|
}
|
|
|
|
|
|
|
|
var (
|
|
|
|
ListenerExtType = NewExtType("LISTENER")
|
|
|
|
LockableExtType = NewExtType("LOCKABLE")
|
|
|
|
GQLExtType = NewExtType("GQL")
|
|
|
|
GroupExtType = NewExtType("GROUP")
|
2023-10-13 00:32:24 -06:00
|
|
|
ACLExtType = NewExtType("ACL")
|
2023-10-29 18:26:14 -06:00
|
|
|
EventExtType = NewExtType("EVENT")
|
2023-09-05 00:08:09 -06:00
|
|
|
|
2023-10-11 18:11:24 -06:00
|
|
|
GQLNodeType = NewNodeType("GQL")
|
|
|
|
BaseNodeType = NewNodeType("BASE")
|
2023-10-03 20:14:26 -06:00
|
|
|
GroupNodeType = NewNodeType("GROUP")
|
2023-09-05 00:08:09 -06:00
|
|
|
|
2023-10-15 15:14:33 -06:00
|
|
|
StopSignalType = NewSignalType("STOP")
|
|
|
|
CreateSignalType = NewSignalType("CREATE")
|
|
|
|
StartSignalType = NewSignalType("START")
|
|
|
|
StatusSignalType = NewSignalType("STATUS")
|
|
|
|
LinkSignalType = NewSignalType("LINK")
|
|
|
|
LockSignalType = NewSignalType("LOCK")
|
|
|
|
TimeoutSignalType = NewSignalType("TIMEOUT")
|
|
|
|
ReadSignalType = NewSignalType("READ")
|
|
|
|
ACLTimeoutSignalType = NewSignalType("ACL_TIMEOUT")
|
|
|
|
ErrorSignalType = NewSignalType("ERROR")
|
|
|
|
SuccessSignalType = NewSignalType("SUCCESS")
|
|
|
|
ReadResultSignalType = NewSignalType("READ_RESULT")
|
|
|
|
RemoveMemberSignalType = NewSignalType("REMOVE_MEMBER")
|
|
|
|
AddMemberSignalType = NewSignalType("ADD_MEMBER")
|
|
|
|
ACLSignalType = NewSignalType("ACL")
|
|
|
|
AddSubGroupSignalType = NewSignalType("ADD_SUBGROUP")
|
|
|
|
RemoveSubGroupSignalType = NewSignalType("REMOVE_SUBGROUP")
|
2023-10-16 00:54:10 -06:00
|
|
|
StoppedSignalType = NewSignalType("STOPPED")
|
2023-10-29 18:26:14 -06:00
|
|
|
EventControlSignalType = NewSignalType("EVENT_CONTORL")
|
|
|
|
EventStateSignalType = NewSignalType("VEX_MATCH_STATUS")
|
2023-09-05 00:08:09 -06:00
|
|
|
|
|
|
|
MemberOfPolicyType = NewPolicyType("USER_OF")
|
|
|
|
RequirementOfPolicyType = NewPolicyType("REQUIEMENT_OF")
|
|
|
|
PerNodePolicyType = NewPolicyType("PER_NODE")
|
|
|
|
AllNodesPolicyType = NewPolicyType("ALL_NODES")
|
2023-10-13 13:45:30 -06:00
|
|
|
ACLProxyPolicyType = NewPolicyType("ACL_PROXY")
|
2023-09-05 00:08:09 -06:00
|
|
|
|
2023-09-20 19:14:28 -06:00
|
|
|
ErrorType = NewSerializedType("ERROR")
|
|
|
|
PointerType = NewSerializedType("POINTER")
|
|
|
|
SliceType = NewSerializedType("SLICE")
|
|
|
|
StructType = NewSerializedType("STRUCT")
|
|
|
|
IntType = NewSerializedType("INT")
|
|
|
|
UIntType = NewSerializedType("UINT")
|
|
|
|
BoolType = NewSerializedType("BOOL")
|
|
|
|
Float64Type = NewSerializedType("FLOAT64")
|
|
|
|
Float32Type = NewSerializedType("FLOAT32")
|
|
|
|
UInt8Type = NewSerializedType("UINT8")
|
|
|
|
UInt16Type = NewSerializedType("UINT16")
|
|
|
|
UInt32Type = NewSerializedType("UINT32")
|
|
|
|
UInt64Type = NewSerializedType("UINT64")
|
|
|
|
Int8Type = NewSerializedType("INT8")
|
|
|
|
Int16Type = NewSerializedType("INT16")
|
|
|
|
Int32Type = NewSerializedType("INT32")
|
|
|
|
Int64Type = NewSerializedType("INT64")
|
|
|
|
StringType = NewSerializedType("STRING")
|
|
|
|
ArrayType = NewSerializedType("ARRAY")
|
2023-09-05 00:53:58 -06:00
|
|
|
InterfaceType = NewSerializedType("INTERFACE")
|
2023-09-20 19:14:28 -06:00
|
|
|
MapType = NewSerializedType("MAP")
|
|
|
|
|
|
|
|
ReqStateType = NewSerializedType("REQ_STATE")
|
2023-11-03 21:41:06 -06:00
|
|
|
WaitInfoType = NewSerializedType("WAIT_INFO")
|
2023-09-20 19:14:28 -06:00
|
|
|
SignalDirectionType = NewSerializedType("SIGNAL_DIRECTION")
|
|
|
|
NodeStructType = NewSerializedType("NODE_STRUCT")
|
|
|
|
QueuedSignalType = NewSerializedType("QUEUED_SIGNAL")
|
|
|
|
NodeTypeSerialized = NewSerializedType("NODE_TYPE")
|
2023-10-10 11:23:44 -06:00
|
|
|
ChangesSerialized = NewSerializedType("CHANGES")
|
2023-09-20 19:14:28 -06:00
|
|
|
ExtTypeSerialized = NewSerializedType("EXT_TYPE")
|
2023-09-12 19:00:48 -06:00
|
|
|
PolicyTypeSerialized = NewSerializedType("POLICY_TYPE")
|
2023-09-20 19:14:28 -06:00
|
|
|
ExtSerialized = NewSerializedType("EXTENSION")
|
|
|
|
PolicySerialized = NewSerializedType("POLICY")
|
|
|
|
SignalSerialized = NewSerializedType("SIGNAL")
|
|
|
|
NodeIDType = NewSerializedType("NODE_ID")
|
|
|
|
UUIDType = NewSerializedType("UUID")
|
|
|
|
PendingACLType = NewSerializedType("PENDING_ACL")
|
2023-11-04 23:21:43 -06:00
|
|
|
PendingACLSignalType = NewSerializedType("PENDING_ACL_SIGNAL")
|
2023-09-20 19:14:28 -06:00
|
|
|
TimeType = NewSerializedType("TIME")
|
2023-10-16 00:54:10 -06:00
|
|
|
DurationType = NewSerializedType("DURATION")
|
2023-10-01 20:45:44 -06:00
|
|
|
ResponseType = NewSerializedType("RESPONSE")
|
2023-09-27 18:28:56 -06:00
|
|
|
StatusType = NewSerializedType("STATUS")
|
2023-09-20 19:14:28 -06:00
|
|
|
TreeType = NewSerializedType("TREE")
|
|
|
|
SerializedTypeSerialized = NewSerializedType("SERIALIZED_TYPE")
|
2023-09-05 00:08:09 -06:00
|
|
|
)
|
|
|
|
|
2023-09-11 21:47:53 -06:00
|
|
|
type FieldInfo struct {
|
2023-09-20 19:14:28 -06:00
|
|
|
Index []int
|
2023-09-11 21:47:53 -06:00
|
|
|
TypeStack []SerializedType
|
2023-10-29 18:26:14 -06:00
|
|
|
Type reflect.Type
|
2023-09-11 21:47:53 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
type StructInfo struct {
|
2023-09-20 19:14:28 -06:00
|
|
|
Type reflect.Type
|
|
|
|
FieldOrder []SerializedType
|
|
|
|
FieldMap map[SerializedType]FieldInfo
|
|
|
|
PostDeserialize bool
|
2023-09-12 19:00:48 -06:00
|
|
|
PostDeserializeIdx int
|
2023-09-11 21:47:53 -06:00
|
|
|
}
|
|
|
|
|
2023-09-12 19:00:48 -06:00
|
|
|
type Deserializable interface {
|
|
|
|
PostDeserialize(*Context) error
|
|
|
|
}
|
|
|
|
|
|
|
|
var deserializable_zero Deserializable = nil
|
|
|
|
var DeserializableType = reflect.TypeOf(&deserializable_zero).Elem()
|
|
|
|
|
2023-10-29 18:26:14 -06:00
|
|
|
func GetStructInfo(ctx *Context, struct_type reflect.Type) (StructInfo, error) {
|
2023-09-11 21:47:53 -06:00
|
|
|
field_order := []SerializedType{}
|
|
|
|
field_map := map[SerializedType]FieldInfo{}
|
2023-09-20 19:14:28 -06:00
|
|
|
for _, field := range reflect.VisibleFields(struct_type) {
|
2023-09-11 21:47:53 -06:00
|
|
|
gv_tag, tagged_gv := field.Tag.Lookup("gv")
|
|
|
|
if tagged_gv == false {
|
|
|
|
continue
|
|
|
|
} else {
|
|
|
|
field_hash := Hash(FieldNameBase, gv_tag)
|
|
|
|
_, exists := field_map[field_hash]
|
|
|
|
if exists == true {
|
2023-10-29 18:26:14 -06:00
|
|
|
return StructInfo{}, fmt.Errorf("gv tag %s is repeated", gv_tag)
|
2023-09-11 21:47:53 -06:00
|
|
|
} else {
|
2023-10-29 18:26:14 -06:00
|
|
|
field_type_stack, err := SerializeType(ctx, field.Type)
|
2023-09-11 21:47:53 -06:00
|
|
|
if err != nil {
|
2023-10-29 18:26:14 -06:00
|
|
|
return StructInfo{}, err
|
2023-09-11 21:47:53 -06:00
|
|
|
}
|
|
|
|
field_map[field_hash] = FieldInfo{
|
|
|
|
field.Index,
|
2023-10-29 18:26:14 -06:00
|
|
|
field_type_stack,
|
|
|
|
field.Type,
|
2023-09-11 21:47:53 -06:00
|
|
|
}
|
|
|
|
field_order = append(field_order, field_hash)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-09-20 19:14:28 -06:00
|
|
|
sort.Slice(field_order, func(i, j int) bool {
|
2023-09-11 21:47:53 -06:00
|
|
|
return uint64(field_order[i]) < uint64(field_order[j])
|
|
|
|
})
|
|
|
|
|
2023-09-12 19:00:48 -06:00
|
|
|
post_deserialize := false
|
|
|
|
post_deserialize_idx := 0
|
|
|
|
ptr_type := reflect.PointerTo(struct_type)
|
|
|
|
if ptr_type.Implements(DeserializableType) {
|
|
|
|
post_deserialize = true
|
|
|
|
for i := 0; i < ptr_type.NumMethod(); i += 1 {
|
|
|
|
method := ptr_type.Method(i)
|
|
|
|
if method.Name == "PostDeserialize" {
|
|
|
|
post_deserialize_idx = i
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-09-11 21:47:53 -06:00
|
|
|
return StructInfo{
|
|
|
|
struct_type,
|
|
|
|
field_order,
|
|
|
|
field_map,
|
2023-09-12 19:00:48 -06:00
|
|
|
post_deserialize,
|
|
|
|
post_deserialize_idx,
|
2023-10-29 18:26:14 -06:00
|
|
|
}, nil
|
2023-09-11 21:47:53 -06:00
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func SerializeStruct(info StructInfo)func(*Context, reflect.Value)(Chunks, error) {
|
|
|
|
return func(ctx *Context, value reflect.Value) (Chunks, error) {
|
|
|
|
struct_chunks := Chunks{}
|
2023-10-29 18:26:14 -06:00
|
|
|
for _, field_hash := range(info.FieldOrder) {
|
|
|
|
field_hash_bytes := make([]byte, 8)
|
|
|
|
binary.BigEndian.PutUint64(field_hash_bytes, uint64(field_hash))
|
|
|
|
|
|
|
|
field_info := info.FieldMap[field_hash]
|
|
|
|
field_value := value.FieldByIndex(field_info.Index)
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
field_chunks, err := SerializeValue(ctx, field_value)
|
2023-10-29 18:26:14 -06:00
|
|
|
if err != nil {
|
2023-10-30 01:25:18 -06:00
|
|
|
return Chunks{}, err
|
2023-09-11 21:47:53 -06:00
|
|
|
}
|
2023-10-29 18:26:14 -06:00
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
struct_chunks = struct_chunks.AddDataToEnd(field_hash_bytes).AddChunksToEnd(field_chunks)
|
|
|
|
ctx.Log.Logf("serialize", "STRUCT_FIELD_CHUNKS: %+v", field_chunks)
|
2023-09-11 21:47:53 -06:00
|
|
|
}
|
2023-10-30 01:25:18 -06:00
|
|
|
size_data := make([]byte, 8)
|
|
|
|
binary.BigEndian.PutUint64(size_data, uint64(len(info.FieldOrder)))
|
|
|
|
return NewChunks(size_data).AddChunksToEnd(struct_chunks), nil
|
2023-09-11 21:47:53 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-10-29 18:26:14 -06:00
|
|
|
func DeserializeStruct(info StructInfo)func(*Context, reflect.Type, []byte)(reflect.Value, []byte, error) {
|
|
|
|
return func(ctx *Context, reflect_type reflect.Type, data []byte) (reflect.Value, []byte, error) {
|
|
|
|
if len(data) < 8 {
|
|
|
|
return reflect.Value{}, nil, fmt.Errorf("Not enough data to deserialize struct %d/8", len(data))
|
|
|
|
}
|
|
|
|
|
|
|
|
num_field_bytes := data[:8]
|
|
|
|
data = data[8:]
|
|
|
|
|
|
|
|
num_fields := binary.BigEndian.Uint64(num_field_bytes)
|
|
|
|
|
|
|
|
struct_value := reflect.New(reflect_type).Elem()
|
|
|
|
for i := uint64(0); i < num_fields; i ++ {
|
|
|
|
field_hash_bytes := data[:8]
|
|
|
|
data = data[8:]
|
|
|
|
field_hash := SerializedType(binary.BigEndian.Uint64(field_hash_bytes))
|
|
|
|
field_info, exists := info.FieldMap[field_hash]
|
|
|
|
if exists == false {
|
2023-10-30 01:25:18 -06:00
|
|
|
return reflect.Value{}, nil, fmt.Errorf("%+v is not a field in %+v", field_hash, info.Type)
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
var field_value reflect.Value
|
2023-09-11 21:47:53 -06:00
|
|
|
var err error
|
2023-10-29 18:26:14 -06:00
|
|
|
field_value, data, err = DeserializeValue(ctx, field_info.Type, data)
|
2023-09-11 21:47:53 -06:00
|
|
|
if err != nil {
|
2023-10-29 18:26:14 -06:00
|
|
|
return reflect.Value{}, nil, err
|
2023-09-11 21:47:53 -06:00
|
|
|
}
|
|
|
|
|
2023-10-29 18:26:14 -06:00
|
|
|
field_reflect := struct_value.FieldByIndex(field_info.Index)
|
|
|
|
field_reflect.Set(field_value)
|
|
|
|
}
|
2023-09-11 21:47:53 -06:00
|
|
|
|
2023-10-29 18:43:43 -06:00
|
|
|
if info.PostDeserialize == true {
|
|
|
|
post_deserialize_method := struct_value.Addr().Method(info.PostDeserializeIdx)
|
|
|
|
results := post_deserialize_method.Call([]reflect.Value{reflect.ValueOf(ctx)})
|
|
|
|
err_if := results[0].Interface()
|
|
|
|
if err_if != nil {
|
|
|
|
return reflect.Value{}, nil, err_if.(error)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-10-29 18:26:14 -06:00
|
|
|
return struct_value, data, nil
|
|
|
|
}
|
|
|
|
}
|
2023-09-11 21:47:53 -06:00
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func SerializeGob(ctx *Context, value reflect.Value) (Chunks, error) {
|
2023-10-29 18:26:14 -06:00
|
|
|
data := make([]byte, 8)
|
|
|
|
gob_ser, err := value.Interface().(gob.GobEncoder).GobEncode()
|
|
|
|
if err != nil {
|
2023-10-30 01:25:18 -06:00
|
|
|
return Chunks{}, err
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
2023-09-11 21:47:53 -06:00
|
|
|
|
2023-10-29 18:26:14 -06:00
|
|
|
binary.BigEndian.PutUint64(data, uint64(len(gob_ser)))
|
2023-10-30 01:25:18 -06:00
|
|
|
return NewChunks(data, gob_ser), nil
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
2023-09-11 21:47:53 -06:00
|
|
|
|
2023-10-29 18:26:14 -06:00
|
|
|
func DeserializeGob[T any, PT interface{gob.GobDecoder; *T}](ctx *Context, reflect_type reflect.Type, data []byte) (reflect.Value, []byte, error) {
|
|
|
|
if len(data) < 8 {
|
|
|
|
return reflect.Value{}, nil, fmt.Errorf("Not enough bytes to deserialize gob %d/8", len(data))
|
|
|
|
}
|
2023-09-12 19:00:48 -06:00
|
|
|
|
2023-10-29 18:26:14 -06:00
|
|
|
size_bytes := data[:8]
|
|
|
|
size := binary.BigEndian.Uint64(size_bytes)
|
|
|
|
gob_data := data[8:8+size]
|
|
|
|
data = data[8+size:]
|
|
|
|
|
|
|
|
gob_ptr := reflect.New(reflect_type)
|
|
|
|
err := gob_ptr.Interface().(gob.GobDecoder).GobDecode(gob_data)
|
|
|
|
if err != nil {
|
|
|
|
return reflect.Value{}, nil, err
|
2023-09-11 21:47:53 -06:00
|
|
|
}
|
2023-10-29 18:26:14 -06:00
|
|
|
|
|
|
|
return gob_ptr.Elem(), data, nil
|
2023-09-11 21:47:53 -06:00
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func SerializeInt8(ctx *Context, value reflect.Value) (Chunks, error) {
|
2023-10-29 18:26:14 -06:00
|
|
|
data := []byte{byte(value.Int())}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
return NewChunks(data), nil
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func SerializeInt16(ctx *Context, value reflect.Value) (Chunks, error) {
|
2023-10-29 18:26:14 -06:00
|
|
|
data := make([]byte, 2)
|
|
|
|
binary.BigEndian.PutUint16(data, uint16(value.Int()))
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
return NewChunks(data), nil
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func SerializeInt32(ctx *Context, value reflect.Value) (Chunks, error) {
|
2023-10-29 18:26:14 -06:00
|
|
|
data := make([]byte, 4)
|
|
|
|
binary.BigEndian.PutUint32(data, uint32(value.Int()))
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
return NewChunks(data), nil
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func SerializeInt64(ctx *Context, value reflect.Value) (Chunks, error) {
|
2023-10-29 18:26:14 -06:00
|
|
|
data := make([]byte, 8)
|
|
|
|
binary.BigEndian.PutUint64(data, uint64(value.Int()))
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
return NewChunks(data), nil
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func SerializeUint8(ctx *Context, value reflect.Value) (Chunks, error) {
|
2023-10-29 18:26:14 -06:00
|
|
|
data := []byte{byte(value.Uint())}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
return NewChunks(data), nil
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func SerializeUint16(ctx *Context, value reflect.Value) (Chunks, error) {
|
2023-10-29 18:26:14 -06:00
|
|
|
data := make([]byte, 2)
|
|
|
|
binary.BigEndian.PutUint16(data, uint16(value.Uint()))
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
return NewChunks(data), nil
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func SerializeUint32(ctx *Context, value reflect.Value) (Chunks, error) {
|
2023-10-29 18:26:14 -06:00
|
|
|
data := make([]byte, 4)
|
|
|
|
binary.BigEndian.PutUint32(data, uint32(value.Uint()))
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
return NewChunks(data), nil
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func SerializeUint64(ctx *Context, value reflect.Value) (Chunks, error) {
|
2023-10-29 18:26:14 -06:00
|
|
|
data := make([]byte, 8)
|
|
|
|
binary.BigEndian.PutUint64(data, value.Uint())
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
return NewChunks(data), nil
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
func DeserializeUint64[T ~uint64 | ~int64](ctx *Context, reflect_type reflect.Type, data []byte) (reflect.Value, []byte, error) {
|
|
|
|
uint_size := 8
|
|
|
|
if len(data) < uint_size {
|
|
|
|
return reflect.Value{}, nil, fmt.Errorf("Not enough data to deserialize uint %d/%d", len(data), uint_size)
|
|
|
|
}
|
|
|
|
|
|
|
|
uint_bytes := data[:uint_size]
|
|
|
|
data = data[uint_size:]
|
|
|
|
uint_value := reflect.New(reflect_type).Elem()
|
|
|
|
|
|
|
|
typed_value := T(binary.BigEndian.Uint64(uint_bytes))
|
|
|
|
uint_value.Set(reflect.ValueOf(typed_value))
|
|
|
|
|
|
|
|
return uint_value, data, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func DeserializeUint32[T ~uint32 | ~uint | ~int32 | ~int](ctx *Context, reflect_type reflect.Type, data []byte) (reflect.Value, []byte, error) {
|
|
|
|
uint_size := 4
|
|
|
|
if len(data) < uint_size {
|
|
|
|
return reflect.Value{}, nil, fmt.Errorf("Not enough data to deserialize uint %d/%d", len(data), uint_size)
|
|
|
|
}
|
|
|
|
|
|
|
|
uint_bytes := data[:uint_size]
|
|
|
|
data = data[uint_size:]
|
|
|
|
uint_value := reflect.New(reflect_type).Elem()
|
|
|
|
|
|
|
|
typed_value := T(binary.BigEndian.Uint32(uint_bytes))
|
|
|
|
uint_value.Set(reflect.ValueOf(typed_value))
|
|
|
|
|
|
|
|
return uint_value, data, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func DeserializeUint16[T ~uint16 | ~int16](ctx *Context, reflect_type reflect.Type, data []byte) (reflect.Value, []byte, error) {
|
|
|
|
uint_size := 2
|
|
|
|
if len(data) < uint_size {
|
|
|
|
return reflect.Value{}, nil, fmt.Errorf("Not enough data to deserialize uint %d/%d", len(data), uint_size)
|
|
|
|
}
|
|
|
|
|
|
|
|
uint_bytes := data[:uint_size]
|
|
|
|
data = data[uint_size:]
|
|
|
|
uint_value := reflect.New(reflect_type).Elem()
|
|
|
|
|
|
|
|
typed_value := T(binary.BigEndian.Uint16(uint_bytes))
|
|
|
|
uint_value.Set(reflect.ValueOf(typed_value))
|
|
|
|
|
|
|
|
return uint_value, data, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func DeserializeUint8[T ~uint8 | ~int8](ctx *Context, reflect_type reflect.Type, data []byte) (reflect.Value, []byte, error) {
|
|
|
|
uint_size := 1
|
|
|
|
if len(data) < uint_size {
|
|
|
|
return reflect.Value{}, nil, fmt.Errorf("Not enough data to deserialize uint %d/%d", len(data), uint_size)
|
|
|
|
}
|
|
|
|
|
|
|
|
uint_bytes := data[:uint_size]
|
|
|
|
data = data[uint_size:]
|
|
|
|
uint_value := reflect.New(reflect_type).Elem()
|
|
|
|
|
|
|
|
typed_value := T(uint_bytes[0])
|
|
|
|
uint_value.Set(reflect.ValueOf(typed_value))
|
|
|
|
|
|
|
|
return uint_value, data, nil
|
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func SerializeFloat64(ctx *Context, value reflect.Value) (Chunks, error) {
|
2023-10-29 18:26:14 -06:00
|
|
|
data := make([]byte, 8)
|
|
|
|
float_representation := math.Float64bits(value.Float())
|
|
|
|
binary.BigEndian.PutUint64(data, float_representation)
|
2023-10-30 01:25:18 -06:00
|
|
|
return NewChunks(data), nil
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
func DeserializeFloat64[T ~float64](ctx *Context, reflect_type reflect.Type, data []byte) (reflect.Value, []byte, error) {
|
|
|
|
if len(data) < 8 {
|
|
|
|
return reflect.Value{}, nil, fmt.Errorf("Not enough data to deserialize float64 %d/8", len(data))
|
|
|
|
}
|
|
|
|
|
|
|
|
float_bytes := data[0:8]
|
|
|
|
data = data[8:]
|
|
|
|
|
|
|
|
float_representation := binary.BigEndian.Uint64(float_bytes)
|
|
|
|
float := math.Float64frombits(float_representation)
|
|
|
|
|
|
|
|
float_value := reflect.New(reflect_type).Elem()
|
|
|
|
float_value.Set(reflect.ValueOf(T(float)))
|
|
|
|
|
|
|
|
return float_value, data, nil
|
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func SerializeFloat32(ctx *Context, value reflect.Value) (Chunks, error) {
|
2023-10-29 18:26:14 -06:00
|
|
|
data := make([]byte, 4)
|
|
|
|
float_representation := math.Float32bits(float32(value.Float()))
|
|
|
|
binary.BigEndian.PutUint32(data, float_representation)
|
2023-10-30 01:25:18 -06:00
|
|
|
return NewChunks(data), nil
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
func DeserializeFloat32[T ~float32](ctx *Context, reflect_type reflect.Type, data []byte) (reflect.Value, []byte, error) {
|
|
|
|
if len(data) < 4 {
|
|
|
|
return reflect.Value{}, nil, fmt.Errorf("Not enough data to deserialize float32 %d/4", len(data))
|
|
|
|
}
|
|
|
|
|
|
|
|
float_bytes := data[0:4]
|
|
|
|
data = data[4:]
|
|
|
|
|
|
|
|
float_representation := binary.BigEndian.Uint32(float_bytes)
|
|
|
|
float := math.Float32frombits(float_representation)
|
|
|
|
|
|
|
|
float_value := reflect.New(reflect_type).Elem()
|
|
|
|
float_value.Set(reflect.ValueOf(T(float)))
|
|
|
|
|
|
|
|
return float_value, data, nil
|
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func SerializeString(ctx *Context, value reflect.Value) (Chunks, error) {
|
2023-10-29 18:26:14 -06:00
|
|
|
data := make([]byte, 8)
|
|
|
|
binary.BigEndian.PutUint64(data, uint64(value.Len()))
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
return NewChunks(data, []byte(value.String())), nil
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func DeserializeString[T ~string](ctx *Context, reflect_type reflect.Type, data []byte) (reflect.Value, []byte, error) {
|
2023-10-29 18:26:14 -06:00
|
|
|
if len(data) < 8 {
|
|
|
|
return reflect.Value{}, nil, fmt.Errorf("Not enough data to deserialize string %d/8", len(data))
|
|
|
|
}
|
|
|
|
|
|
|
|
size_bytes := data[0:8]
|
|
|
|
data = data[8:]
|
|
|
|
|
|
|
|
size := binary.BigEndian.Uint64(size_bytes)
|
|
|
|
if len(data) < int(size) {
|
|
|
|
return reflect.Value{}, nil, fmt.Errorf("Not enough data to deserialize string of len %d, %d/%d", size, len(data), size)
|
|
|
|
}
|
|
|
|
|
|
|
|
string_value := reflect.New(reflect_type).Elem()
|
2023-10-30 01:25:18 -06:00
|
|
|
string_value.Set(reflect.ValueOf(T(string(data[:size]))))
|
2023-10-29 18:26:14 -06:00
|
|
|
data = data[size:]
|
|
|
|
|
|
|
|
return string_value, data, nil
|
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func SerializeBool(ctx *Context, value reflect.Value) (Chunks, error) {
|
2023-10-29 18:26:14 -06:00
|
|
|
if value.Bool() == true {
|
2023-10-30 01:25:18 -06:00
|
|
|
return NewChunks([]byte{0xFF}), nil
|
2023-10-29 18:26:14 -06:00
|
|
|
} else {
|
2023-10-30 01:25:18 -06:00
|
|
|
return NewChunks([]byte{0x00}), nil
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func DeserializeBool[T ~bool](ctx *Context, reflect_type reflect.Type, data []byte) (reflect.Value, []byte, error) {
|
|
|
|
if len(data) < 1 {
|
|
|
|
return reflect.Value{}, nil, fmt.Errorf("Not enough data to deserialize bool %d/1", len(data))
|
|
|
|
}
|
|
|
|
byte := data[0]
|
|
|
|
data = data[1:]
|
|
|
|
|
|
|
|
bool_value := reflect.New(reflect_type).Elem()
|
|
|
|
if byte == 0x00 {
|
|
|
|
bool_value.Set(reflect.ValueOf(T(false)))
|
|
|
|
} else {
|
|
|
|
bool_value.Set(reflect.ValueOf(T(true)))
|
|
|
|
}
|
|
|
|
|
|
|
|
return bool_value, data, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func DeserializeTypePointer(ctx *Context, type_stack []SerializedType) (reflect.Type, []SerializedType, error) {
|
|
|
|
elem_type, remaining, err := DeserializeType(ctx, type_stack)
|
|
|
|
if err != nil {
|
|
|
|
return nil, nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return reflect.PointerTo(elem_type), remaining, nil
|
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func SerializePointer(ctx *Context, value reflect.Value) (Chunks, error) {
|
2023-10-29 18:26:14 -06:00
|
|
|
if value.IsZero() {
|
2023-10-30 01:25:18 -06:00
|
|
|
return NewChunks([]byte{0x00}), nil
|
2023-09-11 21:47:53 -06:00
|
|
|
} else {
|
2023-10-30 01:25:18 -06:00
|
|
|
flags := NewChunks([]byte{0x01})
|
2023-10-29 18:26:14 -06:00
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
elem_chunks, err := SerializeValue(ctx, value.Elem())
|
2023-09-11 21:47:53 -06:00
|
|
|
if err != nil {
|
2023-10-30 01:25:18 -06:00
|
|
|
return Chunks{}, err
|
2023-09-11 21:47:53 -06:00
|
|
|
}
|
2023-10-29 18:26:14 -06:00
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
return flags.AddChunksToEnd(elem_chunks), nil
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func DeserializePointer(ctx *Context, reflect_type reflect.Type, data []byte) (reflect.Value, []byte, error) {
|
|
|
|
if len(data) < 1 {
|
|
|
|
return reflect.Value{}, nil, fmt.Errorf("Not enough data to deserialize pointer %d/1", len(data))
|
|
|
|
}
|
|
|
|
|
|
|
|
flags := data[0]
|
|
|
|
data = data[1:]
|
|
|
|
|
|
|
|
pointer_value := reflect.New(reflect_type).Elem()
|
|
|
|
|
|
|
|
if flags != 0x00 {
|
|
|
|
var element_value reflect.Value
|
|
|
|
var err error
|
|
|
|
element_value, data, err = DeserializeValue(ctx, reflect_type.Elem(), data)
|
2023-09-11 21:47:53 -06:00
|
|
|
if err != nil {
|
2023-10-29 18:26:14 -06:00
|
|
|
return reflect.Value{}, nil, err
|
2023-09-11 21:47:53 -06:00
|
|
|
}
|
2023-10-29 18:26:14 -06:00
|
|
|
|
|
|
|
pointer_value.Set(element_value.Addr())
|
2023-09-11 21:47:53 -06:00
|
|
|
}
|
2023-10-29 18:26:14 -06:00
|
|
|
|
|
|
|
return pointer_value, data, nil
|
2023-09-11 21:47:53 -06:00
|
|
|
}
|
|
|
|
|
2023-10-29 18:26:14 -06:00
|
|
|
func SerializeTypeStub(ctx *Context, reflect_type reflect.Type) ([]SerializedType, error) {
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func DeserializeTypeStub[T any](ctx *Context, type_stack []SerializedType) (reflect.Type, []SerializedType, error) {
|
|
|
|
var zero T
|
|
|
|
return reflect.TypeOf(zero), type_stack, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func SerializeTypeElem(ctx *Context, reflect_type reflect.Type) ([]SerializedType, error) {
|
|
|
|
return SerializeType(ctx, reflect_type.Elem())
|
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func SerializeSlice(ctx *Context, value reflect.Value) (Chunks, error) {
|
2023-10-29 18:26:14 -06:00
|
|
|
if value.IsZero() {
|
2023-10-30 01:25:18 -06:00
|
|
|
return NewChunks([]byte{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}), nil
|
2023-10-29 18:26:14 -06:00
|
|
|
} else if value.Len() == 0 {
|
2023-10-30 01:25:18 -06:00
|
|
|
return NewChunks([]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}), nil
|
2023-10-29 18:26:14 -06:00
|
|
|
} else {
|
2023-10-30 01:25:18 -06:00
|
|
|
slice_chunks := Chunks{}
|
2023-10-29 18:26:14 -06:00
|
|
|
for i := 0; i < value.Len(); i += 1 {
|
|
|
|
val := value.Index(i)
|
2023-10-30 01:25:18 -06:00
|
|
|
element_chunks, err := SerializeValue(ctx, val)
|
2023-09-11 21:47:53 -06:00
|
|
|
if err != nil {
|
2023-10-30 01:25:18 -06:00
|
|
|
return Chunks{}, err
|
2023-09-11 21:47:53 -06:00
|
|
|
}
|
2023-10-30 01:25:18 -06:00
|
|
|
slice_chunks = slice_chunks.AddChunksToEnd(element_chunks)
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
2023-09-11 21:47:53 -06:00
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
size_data := make([]byte, 8)
|
|
|
|
binary.BigEndian.PutUint64(size_data, uint64(value.Len()))
|
|
|
|
|
|
|
|
return NewChunks(size_data).AddChunksToEnd(slice_chunks), nil
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func DeserializeTypeSlice(ctx *Context, type_stack []SerializedType) (reflect.Type, []SerializedType, error) {
|
|
|
|
elem_type, remaining, err := DeserializeType(ctx, type_stack)
|
|
|
|
if err != nil {
|
|
|
|
return nil, nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
reflect_type := reflect.SliceOf(elem_type)
|
|
|
|
return reflect_type, remaining, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func DeserializeSlice(ctx *Context, reflect_type reflect.Type, data []byte) (reflect.Value, []byte, error) {
|
|
|
|
if len(data) < 8 {
|
|
|
|
return reflect.Value{}, nil, fmt.Errorf("Not enough data to deserialize slice %d/8", len(data))
|
|
|
|
}
|
|
|
|
|
|
|
|
slice_size := binary.BigEndian.Uint64(data[0:8])
|
|
|
|
slice_value := reflect.New(reflect_type).Elem()
|
|
|
|
data = data[8:]
|
|
|
|
|
|
|
|
if slice_size != 0xFFFFFFFFFFFFFFFF {
|
|
|
|
slice_unaddr := reflect.MakeSlice(reflect_type, int(slice_size), int(slice_size))
|
|
|
|
slice_value.Set(slice_unaddr)
|
|
|
|
for i := uint64(0); i < slice_size; i += 1 {
|
|
|
|
var element_value reflect.Value
|
|
|
|
var err error
|
|
|
|
element_value, data, err = DeserializeValue(ctx, reflect_type.Elem(), data)
|
|
|
|
if err != nil {
|
|
|
|
return reflect.Value{}, nil, err
|
2023-09-11 21:47:53 -06:00
|
|
|
}
|
2023-10-29 18:26:14 -06:00
|
|
|
|
|
|
|
slice_elem := slice_value.Index(int(i))
|
|
|
|
slice_elem.Set(element_value)
|
2023-09-11 21:47:53 -06:00
|
|
|
}
|
|
|
|
}
|
2023-10-29 18:26:14 -06:00
|
|
|
|
|
|
|
return slice_value, data, nil
|
2023-09-11 21:47:53 -06:00
|
|
|
}
|
|
|
|
|
2023-10-29 18:26:14 -06:00
|
|
|
func SerializeTypeMap(ctx *Context, reflect_type reflect.Type) ([]SerializedType, error) {
|
|
|
|
key_stack, err := SerializeType(ctx, reflect_type.Key())
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
elem_stack, err := SerializeType(ctx, reflect_type.Elem())
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return append(key_stack, elem_stack...), nil
|
2023-09-05 00:08:09 -06:00
|
|
|
}
|
|
|
|
|
2023-10-29 18:26:14 -06:00
|
|
|
func DeserializeTypeMap(ctx *Context, type_stack []SerializedType) (reflect.Type, []SerializedType, error) {
|
|
|
|
key_type, after_key, err := DeserializeType(ctx, type_stack)
|
|
|
|
if err != nil {
|
|
|
|
return nil, nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
elem_type, after_elem, err := DeserializeType(ctx, after_key)
|
|
|
|
if err != nil {
|
|
|
|
return nil, nil, err
|
2023-09-05 00:08:09 -06:00
|
|
|
}
|
2023-10-29 18:26:14 -06:00
|
|
|
|
|
|
|
map_type := reflect.MapOf(key_type, elem_type)
|
|
|
|
return map_type, after_elem, nil
|
2023-09-05 00:08:09 -06:00
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func SerializeMap(ctx *Context, value reflect.Value) (Chunks, error) {
|
2023-10-29 18:26:14 -06:00
|
|
|
if value.IsZero() == true {
|
2023-10-30 01:25:18 -06:00
|
|
|
return NewChunks([]byte{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}), nil
|
2023-09-05 00:08:09 -06:00
|
|
|
}
|
|
|
|
|
2023-10-30 13:23:08 -06:00
|
|
|
map_chunks := []Chunks{}
|
2023-10-29 18:26:14 -06:00
|
|
|
map_size := uint64(0)
|
|
|
|
map_iter := value.MapRange()
|
|
|
|
for map_iter.Next() {
|
|
|
|
map_size = map_size + 1
|
|
|
|
key := map_iter.Key()
|
|
|
|
val := map_iter.Value()
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
key_chunks, err := SerializeValue(ctx, key)
|
2023-10-29 18:26:14 -06:00
|
|
|
if err != nil {
|
2023-10-30 01:25:18 -06:00
|
|
|
return Chunks{}, err
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
val_chunks, err := SerializeValue(ctx, val)
|
2023-10-29 18:26:14 -06:00
|
|
|
if err != nil {
|
2023-10-30 01:25:18 -06:00
|
|
|
return Chunks{}, err
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
2023-10-30 13:23:08 -06:00
|
|
|
|
|
|
|
chunks := key_chunks.AddChunksToEnd(val_chunks)
|
|
|
|
map_chunks = append(map_chunks, chunks)
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
2023-10-30 13:23:08 -06:00
|
|
|
// Sort map_chunks
|
|
|
|
sort.Slice(map_chunks, func(i, j int) bool {
|
|
|
|
return bytes.Compare(map_chunks[i].First.Data, map_chunks[j].First.Data) < 0
|
|
|
|
})
|
|
|
|
chunks := Chunks{}
|
|
|
|
for _, chunk := range(map_chunks) {
|
|
|
|
chunks = chunks.AddChunksToEnd(chunk)
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2023-10-29 18:26:14 -06:00
|
|
|
size_data := make([]byte, 8)
|
|
|
|
binary.BigEndian.PutUint64(size_data, map_size)
|
|
|
|
|
2023-10-30 13:23:08 -06:00
|
|
|
return NewChunks(size_data).AddChunksToEnd(chunks), nil
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
func DeserializeMap(ctx *Context, reflect_type reflect.Type, data []byte) (reflect.Value, []byte, error) {
|
|
|
|
if len(data) < 8 {
|
|
|
|
return reflect.Value{}, nil, fmt.Errorf("Not enough data to deserialize map %d/8", len(data))
|
|
|
|
}
|
|
|
|
|
|
|
|
size_bytes := data[:8]
|
|
|
|
data = data[8:]
|
|
|
|
|
|
|
|
size := binary.BigEndian.Uint64(size_bytes)
|
|
|
|
|
|
|
|
map_value := reflect.New(reflect_type).Elem()
|
|
|
|
if size == 0xFFFFFFFFFFFFFFFF {
|
|
|
|
return map_value, data, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
map_unaddr := reflect.MakeMapWithSize(reflect_type, int(size))
|
|
|
|
map_value.Set(map_unaddr)
|
|
|
|
|
|
|
|
for i := uint64(0); i < size; i++ {
|
|
|
|
var err error
|
|
|
|
var key_value reflect.Value
|
|
|
|
key_value, data, err = DeserializeValue(ctx, reflect_type.Key(), data)
|
|
|
|
if err != nil {
|
|
|
|
return reflect.Value{}, nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
var val_value reflect.Value
|
|
|
|
val_value, data, err = DeserializeValue(ctx, reflect_type.Elem(), data)
|
|
|
|
if err != nil {
|
|
|
|
return reflect.Value{}, nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
map_value.SetMapIndex(key_value, val_value)
|
|
|
|
}
|
|
|
|
|
|
|
|
return map_value, data, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func SerializeTypeArray(ctx *Context, reflect_type reflect.Type) ([]SerializedType, error) {
|
|
|
|
size := SerializedType(reflect_type.Len())
|
|
|
|
elem_stack, err := SerializeType(ctx, reflect_type.Elem())
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return append([]SerializedType{size}, elem_stack...), nil
|
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func SerializeUUID(ctx *Context, value reflect.Value) (Chunks, error) {
|
|
|
|
uuid_ser, err := value.Interface().(encoding.BinaryMarshaler).MarshalBinary()
|
|
|
|
if err != nil {
|
|
|
|
return Chunks{}, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(uuid_ser) != 16 {
|
|
|
|
return Chunks{}, fmt.Errorf("Wrong length of uuid: %d/16", len(uuid_ser))
|
|
|
|
}
|
|
|
|
|
|
|
|
return NewChunks(uuid_ser), nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func DeserializeUUID[T ~[16]byte](ctx *Context, reflect_type reflect.Type, data []byte) (reflect.Value, []byte, error) {
|
|
|
|
if len(data) < 16 {
|
|
|
|
return reflect.Value{}, nil, fmt.Errorf("Not enough data to deserialize UUID %d/16", len(data))
|
|
|
|
}
|
|
|
|
|
|
|
|
uuid_bytes := data[:16]
|
|
|
|
data = data[16:]
|
|
|
|
|
|
|
|
uuid_value := reflect.New(reflect_type).Elem()
|
|
|
|
uuid_value.Set(reflect.ValueOf(T(uuid_bytes)))
|
|
|
|
|
|
|
|
return uuid_value, data, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func SerializeArray(ctx *Context, value reflect.Value) (Chunks, error) {
|
|
|
|
data := Chunks{}
|
2023-10-29 18:26:14 -06:00
|
|
|
for i := 0; i < value.Len(); i += 1 {
|
|
|
|
element := value.Index(i)
|
2023-10-30 01:25:18 -06:00
|
|
|
element_chunks, err := SerializeValue(ctx, element)
|
2023-10-29 18:26:14 -06:00
|
|
|
if err != nil {
|
2023-10-30 01:25:18 -06:00
|
|
|
return Chunks{}, err
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
2023-10-30 01:25:18 -06:00
|
|
|
data = data.AddChunksToEnd(element_chunks)
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
return data, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func DeserializeTypeArray(ctx *Context, type_stack []SerializedType) (reflect.Type, []SerializedType, error) {
|
|
|
|
if len(type_stack) < 1 {
|
|
|
|
return nil, nil, fmt.Errorf("Not enough valued in type stack to deserialize array")
|
|
|
|
}
|
|
|
|
|
|
|
|
size := int(type_stack[0])
|
|
|
|
element_type, remaining, err := DeserializeType(ctx, type_stack[1:])
|
|
|
|
if err != nil {
|
|
|
|
return nil, nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
array_type := reflect.ArrayOf(size, element_type)
|
|
|
|
return array_type, remaining, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func DeserializeArray(ctx *Context, reflect_type reflect.Type, data []byte) (reflect.Value, []byte, error) {
|
|
|
|
array_value := reflect.New(reflect_type).Elem()
|
|
|
|
for i := 0; i < array_value.Len(); i += 1 {
|
|
|
|
var element_value reflect.Value
|
|
|
|
var err error
|
|
|
|
element_value, data, err = DeserializeValue(ctx, reflect_type.Elem(), data)
|
|
|
|
if err != nil {
|
|
|
|
return reflect.Value{}, nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
element := array_value.Index(i)
|
|
|
|
element.Set(element_value)
|
|
|
|
}
|
|
|
|
|
|
|
|
return array_value, data, nil
|
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func SerializeInterface(ctx *Context, value reflect.Value) (Chunks, error) {
|
2023-10-29 18:26:14 -06:00
|
|
|
if value.IsZero() == true {
|
2023-10-30 01:25:18 -06:00
|
|
|
return NewChunks([]byte{0xFF}), nil
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
type_stack, err := SerializeType(ctx, value.Elem().Type())
|
|
|
|
if err != nil {
|
2023-10-30 01:25:18 -06:00
|
|
|
return Chunks{}, err
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
elem_chunks, err := SerializeValue(ctx, value.Elem())
|
2023-10-29 18:26:14 -06:00
|
|
|
if err != nil {
|
2023-10-30 01:25:18 -06:00
|
|
|
return Chunks{}, err
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
data := elem_chunks.Slice()
|
|
|
|
|
|
|
|
serialized_chunks, err := SerializedValue{type_stack, data}.Chunks()
|
2023-10-29 18:26:14 -06:00
|
|
|
if err != nil {
|
2023-10-30 01:25:18 -06:00
|
|
|
return Chunks{}, err
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
return NewChunks([]byte{0x00}).AddChunksToEnd(serialized_chunks), nil
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
func DeserializeInterface(ctx *Context, reflect_type reflect.Type, data []byte) (reflect.Value, []byte, error) {
|
|
|
|
if len(data) < 1 {
|
|
|
|
return reflect.Value{}, nil, fmt.Errorf("Not enough data to deserialize interface %d/1", len(data))
|
|
|
|
}
|
|
|
|
|
|
|
|
flags := data[0]
|
|
|
|
data = data[1:]
|
|
|
|
if flags == 0xFF {
|
|
|
|
return reflect.New(reflect_type).Elem(), data, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
serialized_value, remaining, err := ParseSerializedValue(data)
|
|
|
|
elem_type, types_remaining, err := DeserializeType(ctx, serialized_value.TypeStack)
|
|
|
|
if err != nil {
|
|
|
|
return reflect.Value{}, nil, err
|
|
|
|
} else if len(types_remaining) > 0 {
|
|
|
|
return reflect.Value{}, nil, fmt.Errorf("Types remaining in interface stack after deserializing")
|
|
|
|
}
|
|
|
|
|
|
|
|
elem_value, data_remaining, err := DeserializeValue(ctx, elem_type, serialized_value.Data)
|
|
|
|
if err != nil {
|
|
|
|
return reflect.Value{}, nil, err
|
|
|
|
} else if len(data_remaining) > 0 {
|
|
|
|
return reflect.Value{}, nil, fmt.Errorf("Data remaining in interface data after deserializing")
|
|
|
|
}
|
|
|
|
|
|
|
|
interface_value := reflect.New(reflect_type).Elem()
|
|
|
|
interface_value.Set(elem_value)
|
|
|
|
|
|
|
|
return interface_value, remaining, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
type SerializedValue struct {
|
|
|
|
TypeStack []SerializedType
|
|
|
|
Data []byte
|
2023-09-05 00:08:09 -06:00
|
|
|
}
|
|
|
|
|
2023-09-05 00:46:49 -06:00
|
|
|
func SerializeAny[T any](ctx *Context, value T) (SerializedValue, error) {
|
|
|
|
reflect_value := reflect.ValueOf(value)
|
2023-10-29 18:26:14 -06:00
|
|
|
type_stack, err := SerializeType(ctx, reflect_value.Type())
|
|
|
|
if err != nil {
|
|
|
|
return SerializedValue{}, err
|
|
|
|
}
|
|
|
|
data, err := SerializeValue(ctx, reflect_value)
|
|
|
|
if err != nil {
|
|
|
|
return SerializedValue{}, err
|
|
|
|
}
|
2023-10-30 01:25:18 -06:00
|
|
|
|
|
|
|
return SerializedValue{type_stack, data.Slice()}, nil
|
2023-09-05 00:08:09 -06:00
|
|
|
}
|
|
|
|
|
2023-10-29 18:26:14 -06:00
|
|
|
func SerializeType(ctx *Context, reflect_type reflect.Type) ([]SerializedType, error) {
|
|
|
|
ctx.Log.Logf("serialize", "Serializing type %+v", reflect_type)
|
|
|
|
|
|
|
|
type_info, type_exists := ctx.TypeReflects[reflect_type]
|
|
|
|
var serialize_type TypeSerializeFn = nil
|
2023-09-06 18:29:35 -06:00
|
|
|
var ctx_type SerializedType
|
2023-09-05 00:08:09 -06:00
|
|
|
if type_exists == true {
|
2023-10-29 18:26:14 -06:00
|
|
|
serialize_type = type_info.TypeSerialize
|
2023-09-06 18:29:35 -06:00
|
|
|
ctx_type = type_info.Type
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
if serialize_type == nil {
|
|
|
|
kind_info, handled := ctx.Kinds[reflect_type.Kind()]
|
|
|
|
if handled == true {
|
|
|
|
if type_exists == false {
|
|
|
|
ctx_type = kind_info.Type
|
|
|
|
}
|
|
|
|
serialize_type = kind_info.TypeSerialize
|
2023-09-05 00:08:09 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-10-29 18:26:14 -06:00
|
|
|
type_stack := []SerializedType{ctx_type}
|
|
|
|
if serialize_type != nil {
|
|
|
|
extra_types, err := serialize_type(ctx, reflect_type)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return append(type_stack, extra_types...), nil
|
|
|
|
} else {
|
|
|
|
return type_stack, nil
|
2023-09-05 00:08:09 -06:00
|
|
|
}
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
2023-09-05 00:08:09 -06:00
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func SerializeValue(ctx *Context, value reflect.Value) (Chunks, error) {
|
2023-10-29 18:26:14 -06:00
|
|
|
type_info, type_exists := ctx.TypeReflects[value.Type()]
|
|
|
|
var serialize SerializeFn = nil
|
|
|
|
if type_exists == true {
|
|
|
|
if type_info.Serialize != nil {
|
|
|
|
serialize = type_info.Serialize
|
|
|
|
}
|
2023-09-05 00:46:49 -06:00
|
|
|
}
|
2023-09-05 00:08:09 -06:00
|
|
|
|
2023-10-29 18:26:14 -06:00
|
|
|
if serialize == nil {
|
|
|
|
kind_info, handled := ctx.Kinds[value.Kind()]
|
|
|
|
if handled {
|
|
|
|
serialize = kind_info.Serialize
|
|
|
|
} else {
|
2023-10-30 01:25:18 -06:00
|
|
|
return Chunks{}, fmt.Errorf("Don't know how to serialize %+v", value.Type())
|
2023-10-29 18:26:14 -06:00
|
|
|
}
|
2023-09-06 18:29:35 -06:00
|
|
|
}
|
2023-10-29 18:26:14 -06:00
|
|
|
|
|
|
|
return serialize(ctx, value)
|
2023-09-05 00:08:09 -06:00
|
|
|
}
|
|
|
|
|
2023-09-13 16:27:55 -06:00
|
|
|
func ExtField(ctx *Context, ext Extension, field_name string) (reflect.Value, error) {
|
2023-09-05 00:08:09 -06:00
|
|
|
if ext == nil {
|
2023-09-13 16:27:55 -06:00
|
|
|
return reflect.Value{}, fmt.Errorf("Cannot get fields on nil Extension")
|
2023-09-05 00:08:09 -06:00
|
|
|
}
|
2023-09-13 16:27:55 -06:00
|
|
|
|
2023-09-05 00:08:09 -06:00
|
|
|
ext_value := reflect.ValueOf(ext).Elem()
|
2023-09-20 19:14:28 -06:00
|
|
|
for _, field := range reflect.VisibleFields(ext_value.Type()) {
|
2023-09-13 16:27:55 -06:00
|
|
|
gv_tag, tagged := field.Tag.Lookup("gv")
|
|
|
|
if tagged == true && gv_tag == field_name {
|
|
|
|
return ext_value.FieldByIndex(field.Index), nil
|
|
|
|
}
|
2023-09-05 00:08:09 -06:00
|
|
|
}
|
2023-09-13 16:27:55 -06:00
|
|
|
|
|
|
|
return reflect.Value{}, fmt.Errorf("%s is not a field in %+v", field_name, reflect.TypeOf(ext))
|
|
|
|
}
|
|
|
|
|
|
|
|
func SerializeField(ctx *Context, ext Extension, field_name string) (SerializedValue, error) {
|
|
|
|
field_value, err := ExtField(ctx, ext, field_name)
|
|
|
|
if err != nil {
|
|
|
|
return SerializedValue{}, err
|
|
|
|
}
|
2023-10-29 18:26:14 -06:00
|
|
|
type_stack, err := SerializeType(ctx, field_value.Type())
|
|
|
|
if err != nil {
|
|
|
|
return SerializedValue{}, err
|
|
|
|
}
|
|
|
|
data, err := SerializeValue(ctx, field_value)
|
|
|
|
if err != nil {
|
|
|
|
return SerializedValue{}, err
|
|
|
|
}
|
2023-10-30 01:25:18 -06:00
|
|
|
return SerializedValue{type_stack, data.Slice()}, nil
|
2023-09-05 00:08:09 -06:00
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
func (value SerializedValue) Chunks() (Chunks, error) {
|
|
|
|
header_data := make([]byte, 16)
|
|
|
|
binary.BigEndian.PutUint64(header_data[0:8], uint64(len(value.TypeStack)))
|
|
|
|
binary.BigEndian.PutUint64(header_data[8:16], uint64(len(value.Data)))
|
2023-09-05 00:08:09 -06:00
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
type_stack_bytes := make([][]byte, len(value.TypeStack))
|
|
|
|
for i, ctx_type := range(value.TypeStack) {
|
|
|
|
type_stack_bytes[i] = make([]byte, 8)
|
|
|
|
binary.BigEndian.PutUint64(type_stack_bytes[i], uint64(ctx_type))
|
2023-09-05 00:08:09 -06:00
|
|
|
}
|
|
|
|
|
2023-10-30 01:25:18 -06:00
|
|
|
return NewChunks(header_data).AddDataToEnd(type_stack_bytes...).AddDataToEnd(value.Data), nil
|
2023-09-05 00:08:09 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
func ParseSerializedValue(data []byte) (SerializedValue, []byte, error) {
|
|
|
|
if len(data) < 8 {
|
|
|
|
return SerializedValue{}, nil, fmt.Errorf("SerializedValue required to have at least 8 bytes when serialized")
|
|
|
|
}
|
|
|
|
num_types := int(binary.BigEndian.Uint64(data[0:8]))
|
|
|
|
data_size := int(binary.BigEndian.Uint64(data[8:16]))
|
|
|
|
type_stack := make([]SerializedType, num_types)
|
|
|
|
for i := 0; i < num_types; i += 1 {
|
2023-09-20 19:14:28 -06:00
|
|
|
type_start := (i + 2) * 8
|
|
|
|
type_end := (i + 3) * 8
|
2023-09-05 00:08:09 -06:00
|
|
|
type_stack[i] = SerializedType(binary.BigEndian.Uint64(data[type_start:type_end]))
|
|
|
|
}
|
|
|
|
|
2023-09-20 19:14:28 -06:00
|
|
|
types_end := 8 * (num_types + 2)
|
2023-09-05 00:08:09 -06:00
|
|
|
data_end := types_end + data_size
|
|
|
|
return SerializedValue{
|
|
|
|
type_stack,
|
|
|
|
data[types_end:data_end],
|
|
|
|
}, data[data_end:], nil
|
|
|
|
}
|
|
|
|
|
2023-10-29 18:26:14 -06:00
|
|
|
func DeserializeValue(ctx *Context, reflect_type reflect.Type, data []byte) (reflect.Value, []byte, error) {
|
|
|
|
ctx.Log.Logf("serialize", "Deserializing %+v with %d bytes", reflect_type, len(data))
|
|
|
|
var deserialize DeserializeFn = nil
|
2023-09-05 00:08:09 -06:00
|
|
|
|
2023-10-29 18:26:14 -06:00
|
|
|
type_info, type_exists := ctx.TypeReflects[reflect_type]
|
|
|
|
if type_exists == true {
|
|
|
|
deserialize = type_info.Deserialize
|
|
|
|
} else {
|
|
|
|
kind_info, exists := ctx.Kinds[reflect_type.Kind()]
|
|
|
|
if exists == false {
|
|
|
|
return reflect.Value{}, nil, fmt.Errorf("Cannot deserialize %+v/%+v: unknown type/kind", reflect_type, reflect_type.Kind())
|
|
|
|
}
|
|
|
|
deserialize = kind_info.Deserialize
|
|
|
|
}
|
|
|
|
|
|
|
|
return deserialize(ctx, reflect_type, data)
|
|
|
|
}
|
|
|
|
|
|
|
|
func DeserializeType(ctx *Context, type_stack []SerializedType) (reflect.Type, []SerializedType, error) {
|
|
|
|
ctx.Log.Logf("deserialize_types", "Deserializing type stack %+v", type_stack)
|
|
|
|
var deserialize_type TypeDeserializeFn = nil
|
2023-09-05 00:08:09 -06:00
|
|
|
var reflect_type reflect.Type = nil
|
|
|
|
|
2023-10-29 18:26:14 -06:00
|
|
|
if len(type_stack) < 1 {
|
|
|
|
return nil, nil, fmt.Errorf("No elements in type stack to deserialize(DeserializeType)")
|
2023-09-05 00:08:09 -06:00
|
|
|
}
|
|
|
|
|
2023-10-29 18:26:14 -06:00
|
|
|
ctx_type := type_stack[0]
|
|
|
|
type_stack = type_stack[1:]
|
2023-09-06 18:29:35 -06:00
|
|
|
|
|
|
|
type_info, type_exists := ctx.Types[SerializedType(ctx_type)]
|
|
|
|
if type_exists == true {
|
2023-10-29 18:26:14 -06:00
|
|
|
deserialize_type = type_info.TypeDeserialize
|
|
|
|
reflect_type = type_info.Reflect
|
2023-09-05 00:08:09 -06:00
|
|
|
} else {
|
2023-09-06 18:29:35 -06:00
|
|
|
kind_info, exists := ctx.KindTypes[SerializedType(ctx_type)]
|
2023-09-05 00:08:09 -06:00
|
|
|
if exists == false {
|
2023-10-29 18:26:14 -06:00
|
|
|
return nil, nil, fmt.Errorf("Cannot deserialize 0x%x: unknown type/kind", ctx_type)
|
2023-09-05 00:08:09 -06:00
|
|
|
}
|
2023-10-29 18:26:14 -06:00
|
|
|
deserialize_type = kind_info.TypeDeserialize
|
|
|
|
reflect_type = kind_info.Base
|
2023-09-05 00:08:09 -06:00
|
|
|
}
|
|
|
|
|
2023-10-29 18:26:14 -06:00
|
|
|
if deserialize_type == nil {
|
|
|
|
return reflect_type, type_stack, nil
|
2023-09-12 19:00:48 -06:00
|
|
|
} else {
|
2023-10-29 18:26:14 -06:00
|
|
|
return deserialize_type(ctx, type_stack)
|
2023-09-12 19:00:48 -06:00
|
|
|
}
|
2023-09-05 00:08:09 -06:00
|
|
|
}
|