Sort maps when serializing so they can be serialized consistently

gql_cataclysm
noah metz 2023-10-30 13:23:08 -06:00
parent dbe819fd05
commit 2081771135
2 changed files with 17 additions and 5 deletions

@ -293,7 +293,7 @@ func nodeLoop(ctx *Context, node *Node) error {
} }
validated := ed25519.Verify(msg.Source, sig_data, msg.Signature) validated := ed25519.Verify(msg.Source, sig_data, msg.Signature)
if validated == false { if validated == false {
ctx.Log.Logf("signal", "SIGNAL_VERIFY_ERR: %s - %s", node.ID, reflect.TypeOf(msg.Signal)) ctx.Log.Logf("signal_verify", "SIGNAL_VERIFY_ERR: %s - %s", node.ID, reflect.TypeOf(msg.Signal))
continue continue
} }

@ -9,6 +9,7 @@ import (
"math" "math"
"reflect" "reflect"
"sort" "sort"
"bytes"
) )
const ( const (
@ -826,7 +827,7 @@ func SerializeMap(ctx *Context, value reflect.Value) (Chunks, error) {
return NewChunks([]byte{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}), nil return NewChunks([]byte{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}), nil
} }
map_chunks := Chunks{} map_chunks := []Chunks{}
map_size := uint64(0) map_size := uint64(0)
map_iter := value.MapRange() map_iter := value.MapRange()
for map_iter.Next() { for map_iter.Next() {
@ -838,19 +839,30 @@ func SerializeMap(ctx *Context, value reflect.Value) (Chunks, error) {
if err != nil { if err != nil {
return Chunks{}, err return Chunks{}, err
} }
map_chunks = map_chunks.AddChunksToEnd(key_chunks)
val_chunks, err := SerializeValue(ctx, val) val_chunks, err := SerializeValue(ctx, val)
if err != nil { if err != nil {
return Chunks{}, err return Chunks{}, err
} }
map_chunks = map_chunks.AddChunksToEnd(val_chunks)
chunks := key_chunks.AddChunksToEnd(val_chunks)
map_chunks = append(map_chunks, chunks)
} }
// Sort map_chunks
sort.Slice(map_chunks, func(i, j int) bool {
return bytes.Compare(map_chunks[i].First.Data, map_chunks[j].First.Data) < 0
})
chunks := Chunks{}
for _, chunk := range(map_chunks) {
chunks = chunks.AddChunksToEnd(chunk)
}
size_data := make([]byte, 8) size_data := make([]byte, 8)
binary.BigEndian.PutUint64(size_data, map_size) binary.BigEndian.PutUint64(size_data, map_size)
return NewChunks(size_data).AddChunksToEnd(map_chunks), nil return NewChunks(size_data).AddChunksToEnd(chunks), nil
} }
func DeserializeMap(ctx *Context, reflect_type reflect.Type, data []byte) (reflect.Value, []byte, error) { func DeserializeMap(ctx *Context, reflect_type reflect.Type, data []byte) (reflect.Value, []byte, error) {