Sfoglia il codice sorgente

codec: unsafe mapRange iterator optimized

MapIter will return an invalid Key or Value if it already set
the Key or Value passed into itself.
Ugorji Nwoke 6 anni fa
parent
commit
20da2f131b
4 ha cambiato i file con 62 aggiunte e 48 eliminazioni
  1. 19 12
      codec/codec_test.go
  2. 11 5
      codec/encode.go
  3. 4 4
      codec/helper.go
  4. 28 27
      codec/helper_unsafe.go

+ 19 - 12
codec/codec_test.go

@@ -3262,14 +3262,22 @@ func TestMapRangeIndex(t *testing.T) {
 		m1c[k] = *v
 	}
 
+	fnrv := func(r1, r2 reflect.Value) reflect.Value {
+		if r1.IsValid() {
+			return r1
+		}
+		return r2
+	}
+
+	// var vx reflect.Value
+
 	mt := reflect.TypeOf(m1)
-	it := mapRange(rv4i(m1),
-		mapAddressableRV(mt.Key(), mt.Key().Kind()),
-		mapAddressableRV(mt.Elem(), mt.Elem().Kind()),
-		true)
+	rvk := mapAddressableRV(mt.Key(), mt.Key().Kind())
+	rvv := mapAddressableRV(mt.Elem(), mt.Elem().Kind())
+	it := mapRange(rv4i(m1), rvk, rvv, true)
 	for it.Next() {
-		k := it.Key().Interface().(string)
-		v := it.Value().Interface().(*T)
+		k := fnrv(it.Key(), rvk).Interface().(string)
+		v := fnrv(it.Value(), rvv).Interface().(*T)
 		testDeepEqualErr(m1[k], v, t, "map-key-eq-it-key")
 		if _, ok := m1c[k]; ok {
 			delete(m1c, k)
@@ -3293,13 +3301,12 @@ func TestMapRangeIndex(t *testing.T) {
 	}
 
 	mt = reflect.TypeOf(m2)
-	it = mapRange(rv4i(m2),
-		mapAddressableRV(mt.Key(), mt.Key().Kind()),
-		mapAddressableRV(mt.Elem(), mt.Elem().Kind()),
-		true)
+	rvk = mapAddressableRV(mt.Key(), mt.Key().Kind())
+	rvv = mapAddressableRV(mt.Elem(), mt.Elem().Kind())
+	it = mapRange(rv4i(m2), rvk, rvv, true)
 	for it.Next() {
-		k := it.Key().Interface().(*T)
-		v := it.Value().Interface().(T)
+		k := fnrv(it.Key(), rvk).Interface().(*T)
+		v := fnrv(it.Value(), rvv).Interface().(T)
 		testDeepEqualErr(m2[k], v, t, "map-key-eq-it-key")
 		if _, ok := m2c[k]; ok {
 			delete(m2c, k)

+ 11 - 5
codec/encode.go

@@ -666,20 +666,26 @@ func (e *Encoder) kMap(f *codecFnInfo, rv reflect.Value) {
 	var rvk = mapAddressableRV(f.ti.key, ktypeKind)
 
 	it := mapRange(rv, rvk, rvv, true)
+	var vx reflect.Value
 	for it.Next() {
 		e.mapElemKey()
+		if vx = it.Key(); !vx.IsValid() {
+			vx = rvk
+		}
 		if keyTypeIsString {
 			if e.h.StringToRaw {
-				e.e.EncodeStringBytesRaw(bytesView(it.Key().String()))
+				e.e.EncodeStringBytesRaw(bytesView(vx.String()))
 			} else {
-				e.e.EncodeStringEnc(cUTF8, it.Key().String())
+				e.e.EncodeStringEnc(cUTF8, vx.String())
 			}
 		} else {
-			e.encodeValue(it.Key(), keyFn)
+			e.encodeValue(vx, keyFn)
 		}
 		e.mapElemValue()
-		iv := it.Value()
-		e.encodeValue(iv, valFn)
+		if vx = it.Value(); !vx.IsValid() {
+			vx = rvv
+		}
+		e.encodeValue(vx, valFn)
 	}
 	it.Done()
 

+ 4 - 4
codec/helper.go

@@ -151,10 +151,10 @@ const (
 	//    runtime.SetFinalizer(d, (*Decoder).Release)
 	useFinalizers = false
 
-	// usePool controls whether we use sync.Pool or not.
-	//
-	// sync.Pool can help manage memory use, but it may come at a performance cost.
-	usePool = false
+	// // usePool controls whether we use sync.Pool or not.
+	// //
+	// // sync.Pool can help manage memory use, but it may come at a performance cost.
+	// usePool = false
 
 	// xdebug controls whether xdebugf prints any output
 	xdebug = true

+ 28 - 27
codec/helper_unsafe.go

@@ -9,7 +9,6 @@ package codec
 
 import (
 	"reflect"
-	"sync"
 	"sync/atomic"
 	"time"
 	"unsafe"
@@ -684,8 +683,8 @@ type unsafeMapHashIter struct {
 // }
 
 type unsafeMapIter struct {
-	it               *unsafeMapHashIter
-	k, v             reflect.Value
+	it *unsafeMapHashIter
+	// k, v             reflect.Value
 	mtyp, ktyp, vtyp unsafe.Pointer
 	mptr, kptr, vptr unsafe.Pointer
 	kisref, visref   bool
@@ -695,15 +694,15 @@ type unsafeMapIter struct {
 	// _ [2]uint64 // padding (cache-aligned)
 }
 
-// pprof show that 13% of cbor encode time taken in
-// allocation of unsafeMapIter.
-// Options are to try to alloc on stack, or pool it.
-// Easiest to pool it.
-const unsafeMapIterUsePool = false
+// // pprof show that 13% of cbor encode time taken in
+// // allocation of unsafeMapIter.
+// // Options are to try to alloc on stack, or pool it.
+// // Easiest to pool it.
+// const unsafeMapIterUsePool = false
 
-var unsafeMapIterPool = sync.Pool{
-	New: func() interface{} { return new(unsafeMapIter) },
-}
+// var unsafeMapIterPool = sync.Pool{
+// 	New: func() interface{} { return new(unsafeMapIter) },
+// }
 
 func (t *unsafeMapIter) Next() (r bool) {
 	if t == nil || t.done {
@@ -725,22 +724,23 @@ func (t *unsafeMapIter) Next() (r bool) {
 	return true
 }
 
-func (t *unsafeMapIter) Key() reflect.Value {
-	return t.k
+func (t *unsafeMapIter) Key() (r reflect.Value) {
+	// return t.k
+	return
 }
 
 func (t *unsafeMapIter) Value() (r reflect.Value) {
-	if t.mapvalues {
-		return t.v
-	}
+	// if t.mapvalues {
+	// 	return t.v
+	// }
 	return
 }
 
 func (t *unsafeMapIter) Done() {
-	if unsafeMapIterUsePool && t != nil {
-		*t = unsafeMapIter{}
-		unsafeMapIterPool.Put(t)
-	}
+	// if unsafeMapIterUsePool && t != nil {
+	// 	*t = unsafeMapIter{}
+	// 	unsafeMapIterPool.Put(t)
+	// }
 }
 
 func unsafeMapSet(p, ptyp, p2 unsafe.Pointer, isref bool) {
@@ -763,13 +763,14 @@ func mapRange(m, k, v reflect.Value, mapvalues bool) (t *unsafeMapIter) {
 		// return &unsafeMapIter{done: true}
 		return
 	}
-	if unsafeMapIterUsePool {
-		t = unsafeMapIterPool.Get().(*unsafeMapIter)
-	} else {
-		t = new(unsafeMapIter)
-	}
-	t.k = k
-	t.v = v
+	// if unsafeMapIterUsePool {
+	// 	t = unsafeMapIterPool.Get().(*unsafeMapIter)
+	// } else {
+	//	t = new(unsafeMapIter)
+	// }
+	t = new(unsafeMapIter)
+	// t.k = k
+	// t.v = v
 	t.mapvalues = mapvalues
 
 	var urv *unsafeReflectValue