Forráskód Böngészése

codec: misc refactoring, clean up and optimizations

streamline container tracking

    containerTracking is now strictly done by the Encoder/Decoder.
    The drivers are just "dumb" helpers.
    Since only json supports element separators, we also now
    directly call methods on json(Enc|Dec)Driver.

support precise float32 support where necessary (e.g. for json)

    Most formats will store float32 and float64 using exact bits,
    so they can be recovered with no loss of state.

    However, text based formats MUST have explicit definitions,
    lest we run the risk of losing precision when we attempt
    to treat float32 as float64.

    We support this by having JSON provide an explicit DecodeFloat32
    method, and a helper method in Decoder exposes the right one.

support [][]byte and map[X][]byte in fast-path encoding/decoding

    It is common to work with strings or bytes. Support both in fast-path,
    for folks who maintain map[string][]byte, etc.

    This required adding support for bytes ([]byte, []uint8, bytes).

    Updated tests to include [][]byte and map[X][]byte.

general Clean up

    - Let encWriterSwitch and decReaderSwitch be retrieved via trampoline call: w() or r()
    - eliminate extWrapper (taking up 4 words) and replace with interfaceExtWrapper or bytesExtWrapper
    - Reduce use of stack variables where value is unclear
    - move handling encoding of sequence of bytes to its own function (Encoder.kSliceBytes)
    - Reduce number of conditional calls by combining them
    - each jsonEncDriver keeps track of the bitset it uses to check if safe, based on HTMLCharsAsIs
    - set is initialized to a 1 entry slice: this is the common case
    - json: encodeFloat32 bitsize into consideration

Update benchmark scripts to be more useful
Ugorji Nwoke 6 éve
szülő
commit
86ce10f272

+ 1 - 1
README.md

@@ -289,7 +289,7 @@ some caveats. See Encode documentation.
 
 ```go
 const CborStreamBytes byte = 0x5f ...
-const GenVersion = 11
+const GenVersion = 12
 var GoRpc goRpc
 var MsgpackSpecRpc msgpackSpecRpc
 func GenHelperDecoder(d *Decoder) (gd genHelperDecoder, dd genHelperDecDriver)

+ 31 - 6
codec/bench/bench.sh

@@ -12,7 +12,7 @@ _go_get() {
        gopkg.in/mgo.v2/bson \
        gopkg.in/vmihailenco/msgpack.v2 \
        github.com/json-iterator/go \
-       github.com/mongodb/mongo-go-driver/bson \
+       go.mongodb.org/mongo-driver/bson \
        github.com/mailru/easyjson/...
 }
 
@@ -91,11 +91,35 @@ _suite_json() {
     done
 }
 
+_suite_very_quick_json_only_profile() {
+    go test -run Nothing -tags "alltests" -bench "__Json____.*${1}" \
+       -benchmem -benchtime 4s \
+       -cpuprofile cpu.out -memprofile mem.out -memprofilerate 1
+}
+
 _suite_very_quick_json() {
-    echo ">>>> bench TAGS: 'alltests x' SUITE: BenchmarkCodecQuickAllJsonSuite"
-    go test -run Nothing -tags "alltests x" -bench BenchmarkCodecVeryQuickAllJsonSuite -benchmem "$@"
+    # Quickly get numbers for json, stdjson, jsoniter and json (codecgen)"
+    echo ">>>> very quick json bench: hanging (middle) results is for codecgen"
+    local x=2
+    if [[ "$x" = 1 ]]; then
+        go test -run Nothing -tags "alltests x" -bench BenchmarkCodecVeryQuickAllJsonSuite -benchmem "$@"
+        echo
+        go test -run Nothing -tags "alltests codecgen" -bench "__Json____" -benchmem "$@"
+        return
+    fi
+    for j in "En" "De"; do
+        go test -run Nothing -tags "alltests x" -bench "__(Json|Std_Json|JsonIter).*${j}" -benchmem "$@"
+        echo
+        go test -run Nothing -tags "alltests codecgen" -bench "__Json____.*${j}" -benchmem "$@"
+        echo
+    done
 }
 
+_suite_very_quick_json_trim_output() {
+    _suite_very_quick_json  | grep -v -E "^(goos:|goarch:|pkg:|PASS|ok)"
+}
+
+
 _usage() {
     echo "usage: bench.sh -[dcsjq] for [download, code-generate, suite-of-tests, json-suite, quick-json-suite] respectively"
 }
@@ -107,10 +131,10 @@ _main() {
         return 1
     fi
     local args=()
-    while getopts "dcsjq" flag
+    while getopts "dcsjqp" flag
     do
         case "$flag" in
-            d|c|s|j|q) args+=( "$flag" ) ;;
+            d|c|s|j|q|p) args+=( "$flag" ) ;;
             *) _usage; return 1 ;;
         esac
     done
@@ -120,7 +144,8 @@ _main() {
     [[ " ${args[*]} " == *"c"*  ]] && _gen "$@"
     [[ " ${args[*]} " == *"s"* ]] && _suite "$@" && _suite_gen "$@" 
     [[ " ${args[*]} " == *"j"* ]] && _suite_json "$@"
-    [[ " ${args[*]} " == *"q"* ]] && _suite_very_quick_json "$@"
+    [[ " ${args[*]} " == *"q"* ]] && _suite_very_quick_json_trim_output "$@"
+    [[ " ${args[*]} " == *"p"* ]] && _suite_very_quick_json_only_profile "$@"
     # shift $((OPTIND-1))
 }
 

+ 2 - 0
codec/bench/shared_test.go

@@ -140,6 +140,8 @@ func init() {
 	testHandles = append(testHandles,
 		// testNoopH,
 		testMsgpackH, testBincH, testSimpleH, testCborH, testJsonH)
+	// JSON should do HTMLCharsAsIs by default
+	testJsonH.HTMLCharsAsIs = true
 	// set ExplicitRelease on each handle
 	testMsgpackH.ExplicitRelease = true
 	testBincH.ExplicitRelease = true

+ 1 - 1
codec/bench/x_bench_test.go

@@ -13,7 +13,7 @@ import (
 	"github.com/Sereal/Sereal/Go/sereal"
 	xdr "github.com/davecgh/go-xdr/xdr2"
 	jsoniter "github.com/json-iterator/go"
-	"github.com/mongodb/mongo-go-driver/bson"
+	"go.mongodb.org/mongo-driver/bson"         // "github.com/mongodb/mongo-go-driver/bson"
 	mgobson "gopkg.in/mgo.v2/bson"             //"labix.org/v2/mgo/bson"
 	vmsgpack "gopkg.in/vmihailenco/msgpack.v2" //"github.com/vmihailenco/msgpack"
 )

+ 8 - 10
codec/binc.go

@@ -100,6 +100,7 @@ func bincdesc(vd, vs byte) string {
 }
 
 type bincEncDriver struct {
+	encDriverNoopContainerWriter
 	e *Encoder
 	h *BincHandle
 	w *encWriterSwitch
@@ -107,7 +108,7 @@ type bincEncDriver struct {
 	b [16]byte          // scratch, used for encoding numbers - bigendian style
 	s uint16            // symbols sequencer
 	// c containerState
-	encDriverTrackContainerWriter
+	// encDriverTrackContainerWriter
 	noBuiltInTypes
 	// encNoSeparator
 	// _ [1]uint64 // padding
@@ -236,12 +237,10 @@ func (e *bincEncDriver) encodeExtPreamble(xtag byte, length int) {
 
 func (e *bincEncDriver) WriteArrayStart(length int) {
 	e.encLen(bincVdArray<<4, uint64(length))
-	e.c = containerArrayStart
 }
 
 func (e *bincEncDriver) WriteMapStart(length int) {
 	e.encLen(bincVdMap<<4, uint64(length))
-	e.c = containerMapStart
 }
 
 func (e *bincEncDriver) EncodeSymbol(v string) {
@@ -309,7 +308,7 @@ func (e *bincEncDriver) EncodeSymbol(v string) {
 }
 
 func (e *bincEncDriver) EncodeStringEnc(c charEncoding, v string) {
-	if e.c == containerMapKey && c == cUTF8 && (e.h.AsSymbols == 0 || e.h.AsSymbols == 1) {
+	if e.e.c == containerMapKey && c == cUTF8 && (e.h.AsSymbols == 0 || e.h.AsSymbols == 1) {
 		e.EncodeSymbol(v)
 		return
 	}
@@ -991,26 +990,25 @@ func (h *BincHandle) Name() string { return "binc" }
 
 // SetBytesExt sets an extension
 func (h *BincHandle) SetBytesExt(rt reflect.Type, tag uint64, ext BytesExt) (err error) {
-	return h.SetExt(rt, tag, &extWrapper{ext, interfaceExtFailer{}})
+	return h.SetExt(rt, tag, &bytesExtWrapper{BytesExt: ext})
 }
 
 func (h *BincHandle) newEncDriver(e *Encoder) encDriver {
-	return &bincEncDriver{e: e, h: h, w: e.w}
+	return &bincEncDriver{e: e, h: h, w: e.w()}
 }
 
 func (h *BincHandle) newDecDriver(d *Decoder) decDriver {
-	return &bincDecDriver{d: d, h: h, r: d.r, br: d.bytes}
+	return &bincDecDriver{d: d, h: h, r: d.r(), br: d.bytes}
 }
 
 func (e *bincEncDriver) reset() {
-	e.w = e.e.w
+	e.w = e.e.w()
 	e.s = 0
-	e.c = 0
 	e.m = nil
 }
 
 func (d *bincDecDriver) reset() {
-	d.r, d.br = d.d.r, d.d.bytes
+	d.r, d.br = d.d.r(), d.d.bytes
 	d.s = nil
 	d.bd, d.bdRead, d.vd, d.vs = 0, false, 0, 0
 }

+ 1 - 1
codec/build.sh

@@ -146,7 +146,7 @@ _codegenerators() {
         fi &&
         $c8 -rt codecgen -t 'codecgen generated' -o values_codecgen${c5} -d 19780 $zfin $zfin2 &&
         cp mammoth2_generated_test.go $c9 &&
-        $c8 -t '!notfastpath' -o mammoth2_codecgen${c5} -d 19781 mammoth2_generated_test.go &&
+        $c8 -t 'codecgen,!notfastpath generated,!notfastpath' -o mammoth2_codecgen${c5} -d 19781 mammoth2_generated_test.go &&
         rm -f $c9 &&
         echo "generators done!" 
 }

+ 5 - 5
codec/cbor.go

@@ -729,23 +729,23 @@ func (h *CborHandle) Name() string { return "cbor" }
 
 // SetInterfaceExt sets an extension
 func (h *CborHandle) SetInterfaceExt(rt reflect.Type, tag uint64, ext InterfaceExt) (err error) {
-	return h.SetExt(rt, tag, &extWrapper{bytesExtFailer{}, ext})
+	return h.SetExt(rt, tag, &interfaceExtWrapper{InterfaceExt: ext})
 }
 
 func (h *CborHandle) newEncDriver(e *Encoder) encDriver {
-	return &cborEncDriver{e: e, w: e.w, h: h}
+	return &cborEncDriver{e: e, w: e.w(), h: h}
 }
 
 func (h *CborHandle) newDecDriver(d *Decoder) decDriver {
-	return &cborDecDriver{d: d, h: h, r: d.r, br: d.bytes}
+	return &cborDecDriver{d: d, h: h, r: d.r(), br: d.bytes}
 }
 
 func (e *cborEncDriver) reset() {
-	e.w = e.e.w
+	e.w = e.e.w()
 }
 
 func (d *cborDecDriver) reset() {
-	d.r, d.br = d.d.r, d.d.bytes
+	d.r, d.br = d.d.r(), d.d.bytes
 	d.bd, d.bdRead = 0, false
 }
 

+ 4 - 3
codec/codec_test.go

@@ -1709,8 +1709,8 @@ func doTestSwallowAndZero(t *testing.T, h Handle) {
 	e1.MustEncode(v1)
 	d1 := NewDecoderBytes(b1, h)
 	d1.swallow()
-	if d1.r.numread() != uint(len(b1)) {
-		logT(t, "swallow didn't consume all encoded bytes: %v out of %v", d1.r.numread(), len(b1))
+	if d1.r().numread() != uint(len(b1)) {
+		logT(t, "swallow didn't consume all encoded bytes: %v out of %v", d1.r().numread(), len(b1))
 		failT(t)
 	}
 	setZero(v1)
@@ -2064,12 +2064,13 @@ func testMammoth(t *testing.T, name string, h Handle) {
 	var m, m2 TestMammoth
 	testRandomFillRV(reflect.ValueOf(&m).Elem())
 	b = testMarshalErr(&m, h, t, "mammoth-"+name)
+	// xdebugf("%s", b)
 	testUnmarshalErr(&m2, b, h, t, "mammoth-"+name)
 	testDeepEqualErr(&m, &m2, t, "mammoth-"+name)
-
 	var mm, mm2 TestMammoth2Wrapper
 	testRandomFillRV(reflect.ValueOf(&mm).Elem())
 	b = testMarshalErr(&mm, h, t, "mammoth2-"+name)
+	// os.Stderr.Write([]byte("\n\n\n\n" + string(b) + "\n\n\n\n"))
 	testUnmarshalErr(&mm2, b, h, t, "mammoth2-"+name)
 	testDeepEqualErr(&mm, &mm2, t, "mammoth2-"+name)
 	// testMammoth2(t, name, h)

+ 138 - 99
codec/decode.go

@@ -23,8 +23,8 @@ const (
 const (
 	decDefMaxDepth         = 1024 // maximum depth
 	decDefSliceCap         = 8
-	decDefChanCap          = 64                // should be large, as cap cannot be expanded
-	decScratchByteArrayLen = cacheLineSize - 4 // + (8 * 2) // - (8 * 1)
+	decDefChanCap          = 64            // should be large, as cap cannot be expanded
+	decScratchByteArrayLen = cacheLineSize // - 5 // + (8 * 2) // - (8 * 1)
 )
 
 var (
@@ -121,17 +121,20 @@ type decDriver interface {
 	DecodeTime() (t time.Time)
 
 	ReadArrayStart() int
-	ReadArrayElem()
 	ReadArrayEnd()
 	ReadMapStart() int
-	ReadMapElemKey()
-	ReadMapElemValue()
 	ReadMapEnd()
 
 	reset()
 	uncacheRead()
 }
 
+type decDriverContainerTracker interface {
+	ReadArrayElem()
+	ReadMapElemKey()
+	ReadMapElemValue()
+}
+
 type decodeError struct {
 	codecError
 	pos int
@@ -144,14 +147,15 @@ func (d decodeError) Error() string {
 type decDriverNoopContainerReader struct{}
 
 func (x decDriverNoopContainerReader) ReadArrayStart() (v int) { return }
-func (x decDriverNoopContainerReader) ReadArrayElem()          {}
 func (x decDriverNoopContainerReader) ReadArrayEnd()           {}
 func (x decDriverNoopContainerReader) ReadMapStart() (v int)   { return }
-func (x decDriverNoopContainerReader) ReadMapElemKey()         {}
-func (x decDriverNoopContainerReader) ReadMapElemValue()       {}
 func (x decDriverNoopContainerReader) ReadMapEnd()             {}
 func (x decDriverNoopContainerReader) CheckBreak() (v bool)    { return }
 
+// func (x decDriverNoopContainerReader) ReadArrayElem()          {}
+// func (x decDriverNoopContainerReader) ReadMapElemKey()         {}
+// func (x decDriverNoopContainerReader) ReadMapElemValue()       {}
+
 // func (x decNoSeparator) uncacheRead() {}
 
 // DecodeOptions captures configuration options during decode.
@@ -1402,7 +1406,6 @@ func decStructFieldKey(dd decDriver, keyType valueType, b *[decScratchByteArrayL
 func (d *Decoder) kStruct(f *codecFnInfo, rv reflect.Value) {
 	fti := f.ti
 	dd := d.d
-	elemsep := d.esep
 	sfn := structFieldNode{v: rv, update: true}
 	ctyp := dd.ContainerType()
 	var mf MissingFielder
@@ -1412,24 +1415,19 @@ func (d *Decoder) kStruct(f *codecFnInfo, rv reflect.Value) {
 		mf = rv2i(rv.Addr()).(MissingFielder)
 	}
 	if ctyp == valueTypeMap {
-		containerLen := dd.ReadMapStart()
+		containerLen := d.mapStart()
 		if containerLen == 0 {
-			dd.ReadMapEnd()
+			d.mapEnd()
 			return
 		}
-		d.depthIncr()
 		tisfi := fti.sfiSort
 		hasLen := containerLen >= 0
 
 		var rvkencname []byte
 		for j := 0; (hasLen && j < containerLen) || !(hasLen || dd.CheckBreak()); j++ {
-			if elemsep {
-				dd.ReadMapElemKey()
-			}
+			d.mapElemKey()
 			rvkencname = decStructFieldKey(dd, fti.keyType, &d.b)
-			if elemsep {
-				dd.ReadMapElemValue()
-			}
+			d.mapElemValue()
 			if k := fti.indexForEncName(rvkencname); k > -1 {
 				si := tisfi[k]
 				if dd.TryDecodeAsNil() {
@@ -1456,15 +1454,13 @@ func (d *Decoder) kStruct(f *codecFnInfo, rv reflect.Value) {
 			}
 			// keepAlive4StringView(rvkencnameB) // not needed, as reference is outside loop
 		}
-		dd.ReadMapEnd()
-		d.depthDecr()
+		d.mapEnd()
 	} else if ctyp == valueTypeArray {
-		containerLen := dd.ReadArrayStart()
+		containerLen := d.arrayStart()
 		if containerLen == 0 {
-			dd.ReadArrayEnd()
+			d.arrayEnd()
 			return
 		}
-		d.depthIncr()
 		// Not much gain from doing it two ways for array.
 		// Arrays are not used as much for structs.
 		hasLen := containerLen >= 0
@@ -1477,9 +1473,7 @@ func (d *Decoder) kStruct(f *codecFnInfo, rv reflect.Value) {
 				checkbreak = true
 				break
 			}
-			if elemsep {
-				dd.ReadArrayElem()
-			}
+			d.arrayElem()
 			if dd.TryDecodeAsNil() {
 				si.setToZeroValue(rv)
 			} else {
@@ -1492,14 +1486,11 @@ func (d *Decoder) kStruct(f *codecFnInfo, rv reflect.Value) {
 				if (hasLen && j == containerLen) || (!hasLen && dd.CheckBreak()) {
 					break
 				}
-				if elemsep {
-					dd.ReadArrayElem()
-				}
+				d.arrayElem()
 				d.structFieldNotFound(j, "")
 			}
 		}
-		dd.ReadArrayEnd()
-		d.depthDecr()
+		d.arrayEnd()
 	} else {
 		d.errorstr(errstrOnlyMapOrArrayCanDecodeIntoStruct)
 		return
@@ -1509,17 +1500,18 @@ func (d *Decoder) kStruct(f *codecFnInfo, rv reflect.Value) {
 func (d *Decoder) kSlice(f *codecFnInfo, rv reflect.Value) {
 	// A slice can be set from a map or array in stream.
 	// This way, the order can be kept (as order is lost with map).
-	ti := f.ti
-	if f.seq == seqTypeChan && ti.chandir&uint8(reflect.SendDir) == 0 {
+
+	frt := f.ti.rt
+	if f.seq == seqTypeChan && f.ti.chandir&uint8(reflect.SendDir) == 0 {
 		d.errorf("receive-only channel cannot be decoded")
 	}
 	dd := d.d
-	rtelem0 := ti.elem
+	rtelem0 := f.ti.elem
 	ctyp := dd.ContainerType()
 	if ctyp == valueTypeBytes || ctyp == valueTypeString {
 		// you can only decode bytes or string in the stream into a slice or array of bytes
-		if !(ti.rtid == uint8SliceTypId || rtelem0.Kind() == reflect.Uint8) {
-			d.errorf("bytes/string in stream must decode into slice/array of bytes, not %v", ti.rt)
+		if !(f.ti.rtid == uint8SliceTypId || rtelem0.Kind() == reflect.Uint8) {
+			d.errorf("bytes/string in stream must decode into slice/array of bytes, not %v", frt)
 		}
 		if f.seq == seqTypeChan {
 			bs2 := dd.DecodeBytes(nil, true)
@@ -1555,13 +1547,13 @@ func (d *Decoder) kSlice(f *codecFnInfo, rv reflect.Value) {
 		if rv.CanSet() {
 			if f.seq == seqTypeSlice {
 				if rv.IsNil() {
-					rv.Set(reflect.MakeSlice(ti.rt, 0, 0))
+					rv.Set(reflect.MakeSlice(frt, 0, 0))
 				} else {
 					rv.SetLen(0)
 				}
 			} else if f.seq == seqTypeChan {
 				if rv.IsNil() {
-					rv.Set(reflect.MakeChan(ti.rt, 0))
+					rv.Set(reflect.MakeChan(frt, 0))
 				}
 			}
 		}
@@ -1569,8 +1561,6 @@ func (d *Decoder) kSlice(f *codecFnInfo, rv reflect.Value) {
 		return
 	}
 
-	d.depthIncr()
-
 	rtelem0Size := int(rtelem0.Size())
 	rtElem0Kind := rtelem0.Kind()
 	rtelem0Mut := !isImmutableKind(rtElem0Kind)
@@ -1600,7 +1590,7 @@ func (d *Decoder) kSlice(f *codecFnInfo, rv reflect.Value) {
 					rv.SetLen(rvlen)
 				}
 			} else if rvCanset {
-				rv = reflect.MakeSlice(ti.rt, rvlen, rvlen)
+				rv = reflect.MakeSlice(frt, rvlen, rvlen)
 				rvcap = rvlen
 				rvChanged = true
 			} else {
@@ -1639,10 +1629,10 @@ func (d *Decoder) kSlice(f *codecFnInfo, rv reflect.Value) {
 			}
 			if rvCanset {
 				if f.seq == seqTypeSlice {
-					rv = reflect.MakeSlice(ti.rt, rvlen, rvlen)
+					rv = reflect.MakeSlice(frt, rvlen, rvlen)
 					rvChanged = true
 				} else { // chan
-					rv = reflect.MakeChan(ti.rt, rvlen)
+					rv = reflect.MakeChan(frt, rvlen)
 					rvChanged = true
 				}
 			} else {
@@ -1676,7 +1666,7 @@ func (d *Decoder) kSlice(f *codecFnInfo, rv reflect.Value) {
 					var rvcap2 int
 					var rvErrmsg2 string
 					rv9, rvcap2, rvChanged, rvErrmsg2 =
-						expandSliceRV(rv, ti.rt, rvCanset, rtelem0Size, 1, rvlen, rvcap)
+						expandSliceRV(rv, frt, rvCanset, rtelem0Size, 1, rvlen, rvcap)
 					if rvErrmsg2 != "" {
 						d.errorf(rvErrmsg2)
 					}
@@ -1722,7 +1712,7 @@ func (d *Decoder) kSlice(f *codecFnInfo, rv reflect.Value) {
 			rvlen = j
 		} else if j == 0 && rv.IsNil() {
 			if rvCanset {
-				rv = reflect.MakeSlice(ti.rt, 0, 0)
+				rv = reflect.MakeSlice(frt, 0, 0)
 				rvChanged = true
 			} // else { d.errorf("kSlice: cannot change non-settable slice") }
 		}
@@ -1733,7 +1723,6 @@ func (d *Decoder) kSlice(f *codecFnInfo, rv reflect.Value) {
 		rv0.Set(rv)
 	}
 
-	d.depthDecr()
 }
 
 // func (d *Decoder) kArray(f *codecFnInfo, rv reflect.Value) {
@@ -1743,8 +1732,7 @@ func (d *Decoder) kSlice(f *codecFnInfo, rv reflect.Value) {
 
 func (d *Decoder) kMap(f *codecFnInfo, rv reflect.Value) {
 	dd := d.d
-	containerLen := dd.ReadMapStart()
-	elemsep := d.esep
+	containerLen := d.mapStart()
 	ti := f.ti
 	if rv.IsNil() {
 		rvlen := decInferLen(containerLen, d.h.MaxInitLen, int(ti.key.Size()+ti.elem.Size()))
@@ -1752,12 +1740,10 @@ func (d *Decoder) kMap(f *codecFnInfo, rv reflect.Value) {
 	}
 
 	if containerLen == 0 {
-		dd.ReadMapEnd()
+		d.mapEnd()
 		return
 	}
 
-	d.depthIncr()
-
 	ktype, vtype := ti.key, ti.elem
 	ktypeId := rt2id(ktype)
 	vtypeKind := vtype.Kind()
@@ -1801,9 +1787,7 @@ func (d *Decoder) kMap(f *codecFnInfo, rv reflect.Value) {
 			rvkp = reflect.New(ktype)
 			rvk = rvkp.Elem()
 		}
-		if elemsep {
-			dd.ReadMapElemKey()
-		}
+		d.mapElemKey()
 		// if false && dd.TryDecodeAsNil() { // nil cannot be a map key, so disregard this block
 		// 	// Previously, if a nil key, we just ignored the mapped value and continued.
 		// 	// However, that makes the result of encoding and then decoding map[intf]intf{nil:nil}
@@ -1832,9 +1816,7 @@ func (d *Decoder) kMap(f *codecFnInfo, rv reflect.Value) {
 			}
 		}
 
-		if elemsep {
-			dd.ReadMapElemValue()
-		}
+		d.mapElemValue()
 
 		// Brittle, but OK per TryDecodeAsNil() contract.
 		// i.e. TryDecodeAsNil never shares slices with other decDriver procedures
@@ -1899,9 +1881,8 @@ func (d *Decoder) kMap(f *codecFnInfo, rv reflect.Value) {
 		// }
 	}
 
-	dd.ReadMapEnd()
+	d.mapEnd()
 
-	d.depthDecr()
 }
 
 // decNaked is used to keep track of the primitives decoded.
@@ -2282,15 +2263,16 @@ type Decoder struct {
 
 	// NOTE: Decoder shouldn't call its read methods,
 	// as the handler MAY need to do some coordination.
-	r *decReaderSwitch
+	// r *decReaderSwitch
 
 	// bi *bufioDecReader
 	// cache the mapTypeId and sliceTypeId for faster comparisons
 	mtid uintptr
 	stid uintptr
 
-	h  *BasicHandle
-	hh Handle
+	jdec *jsonDecDriver
+	h    *BasicHandle
+	hh   Handle
 
 	// ---- cpu cache line boundary?
 	decReaderSwitch
@@ -2304,14 +2286,14 @@ type Decoder struct {
 
 	is map[string]string // used for interning strings
 
-	_ uintptr // padding (so scratch is in its own cache line)
-
 	err error
 
 	// ---- cpu cache line boundary?
 	// ---- writable fields during execution --- *try* to keep in sep cache line
 	maxdepth int16
 	depth    int16
+	c        containerState
+	_        [3]byte                      // padding
 	b        [decScratchByteArrayLen]byte // scratch buffer, used by Decoder and xxxEncDrivers
 
 	// padding - false sharing help // modify 232 if Decoder struct changes.
@@ -2345,12 +2327,13 @@ func newDecoder(h Handle) *Decoder {
 		runtime.SetFinalizer(d, (*Decoder).finalize)
 		// xdebugf(">>>> new(Decoder) with finalizer")
 	}
-	d.r = &d.decReaderSwitch
+	// d.r = &d.decReaderSwitch
 	d.hh = h
 	d.be = h.isBinary()
 	// NOTE: do not initialize d.n here. It is lazily initialized in d.naked()
 	var jh *JsonHandle
 	jh, d.js = h.(*JsonHandle)
+	d.jdec = nil
 	if d.js {
 		d.jsms = jh.MapKeyAsString
 	}
@@ -2359,10 +2342,17 @@ func newDecoder(h Handle) *Decoder {
 		d.is = make(map[string]string, 32)
 	}
 	d.d = h.newDecDriver(d)
+	if d.js {
+		d.jdec = d.d.(*jsonDecDriver)
+	}
 	// d.cr, _ = d.d.(containerStateRecv)
 	return d
 }
 
+func (d *Decoder) r() *decReaderSwitch {
+	return &d.decReaderSwitch
+}
+
 func (d *Decoder) resetCommon() {
 	// d.r = &d.decReaderSwitch
 	d.d.reset()
@@ -2590,37 +2580,26 @@ func (d *Decoder) swallow() {
 	if dd.TryDecodeAsNil() {
 		return
 	}
-	elemsep := d.esep
 	switch dd.ContainerType() {
 	case valueTypeMap:
-		containerLen := dd.ReadMapStart()
-		d.depthIncr()
+		containerLen := d.mapStart()
 		hasLen := containerLen >= 0
 		for j := 0; (hasLen && j < containerLen) || !(hasLen || dd.CheckBreak()); j++ {
 			// if clenGtEqualZero {if j >= containerLen {break} } else if dd.CheckBreak() {break}
-			if elemsep {
-				dd.ReadMapElemKey()
-			}
+			d.mapElemKey()
 			d.swallow()
-			if elemsep {
-				dd.ReadMapElemValue()
-			}
+			d.mapElemValue()
 			d.swallow()
 		}
-		dd.ReadMapEnd()
-		d.depthDecr()
+		d.mapEnd()
 	case valueTypeArray:
-		containerLen := dd.ReadArrayStart()
-		d.depthIncr()
+		containerLen := d.arrayStart()
 		hasLen := containerLen >= 0
 		for j := 0; (hasLen && j < containerLen) || !(hasLen || dd.CheckBreak()); j++ {
-			if elemsep {
-				dd.ReadArrayElem()
-			}
+			d.arrayElem()
 			d.swallow()
 		}
-		dd.ReadArrayEnd()
-		d.depthDecr()
+		d.arrayEnd()
 	case valueTypeBytes:
 		dd.DecodeBytes(d.b[:], true)
 	case valueTypeString:
@@ -2732,11 +2711,7 @@ func (d *Decoder) decode(iv interface{}) {
 	case *uint64:
 		*v = d.d.DecodeUint64()
 	case *float32:
-		f64 := d.d.DecodeFloat64()
-		if chkOvf.Float32(f64) {
-			d.errorf("float32 overflow: %v", f64)
-		}
-		*v = float32(f64)
+		*v = float32(d.decodeFloat32())
 	case *float64:
 		*v = d.d.DecodeFloat64()
 	case *[]uint8:
@@ -2897,9 +2872,9 @@ func (d *Decoder) string(v []byte) (s string) {
 // nextValueBytes returns the next value in the stream as a set of bytes.
 func (d *Decoder) nextValueBytes() (bs []byte) {
 	d.d.uncacheRead()
-	d.r.track()
+	d.r().track()
 	d.swallow()
-	bs = d.r.stopTrack()
+	bs = d.r().stopTrack()
 	return
 }
 
@@ -2913,12 +2888,76 @@ func (d *Decoder) rawBytes() []byte {
 }
 
 func (d *Decoder) wrapErr(v interface{}, err *error) {
-	*err = decodeError{codecError: codecError{name: d.hh.Name(), err: v}, pos: int(d.r.numread())}
+	*err = decodeError{codecError: codecError{name: d.hh.Name(), err: v}, pos: int(d.r().numread())}
 }
 
 // NumBytesRead returns the number of bytes read
 func (d *Decoder) NumBytesRead() int {
-	return int(d.r.numread())
+	return int(d.r().numread())
+}
+
+// decodeFloat32 will delegate to an appropriate DecodeFloat32 implementation (if exists),
+// else if will call DecodeFloat64 and ensure the value doesn't overflow.
+//
+// Note that we return float64 to reduce unnecessary conversions
+func (d *Decoder) decodeFloat32() float64 {
+	if d.js {
+		return d.jdec.DecodeFloat32() // custom implementation for 32-bit
+	}
+	return chkOvf.Float32V(d.d.DecodeFloat64())
+}
+
+// ---- container tracking
+// Note: We update the .c after calling the callback.
+// This way, the callback can know what the last status was.
+
+func (d *Decoder) mapStart() (v int) {
+	v = d.d.ReadMapStart()
+	d.depthIncr()
+	d.c = containerMapStart
+	return
+}
+
+func (d *Decoder) mapElemKey() {
+	if d.js {
+		d.jdec.ReadMapElemKey()
+	}
+	d.c = containerMapKey
+}
+
+func (d *Decoder) mapElemValue() {
+	if d.js {
+		d.jdec.ReadMapElemValue()
+	}
+	d.c = containerMapValue
+}
+
+func (d *Decoder) mapEnd() {
+	d.d.ReadMapEnd()
+	d.depthDecr()
+	d.c = containerMapEnd
+	d.c = 0
+}
+
+func (d *Decoder) arrayStart() (v int) {
+	v = d.d.ReadArrayStart()
+	d.depthIncr()
+	d.c = containerArrayStart
+	return
+}
+
+func (d *Decoder) arrayElem() {
+	if d.js {
+		d.jdec.ReadArrayElem()
+	}
+	d.c = containerArrayElem
+}
+
+func (d *Decoder) arrayEnd() {
+	d.d.ReadArrayEnd()
+	d.depthDecr()
+	d.c = containerArrayEnd
+	d.c = 0
 }
 
 // --------------------------------------------------
@@ -2937,9 +2976,9 @@ func (d *Decoder) decSliceHelperStart() (x decSliceHelper, clen int) {
 	switch ctyp {
 	case valueTypeArray:
 		x.array = true
-		clen = dd.ReadArrayStart()
+		clen = d.arrayStart()
 	case valueTypeMap:
-		clen = dd.ReadMapStart() * 2
+		clen = d.mapStart() * 2
 	default:
 		d.errorf("only encoded map or array can be decoded into a slice (%d)", ctyp)
 	}
@@ -2950,19 +2989,19 @@ func (d *Decoder) decSliceHelperStart() (x decSliceHelper, clen int) {
 
 func (x decSliceHelper) End() {
 	if x.array {
-		x.d.d.ReadArrayEnd()
+		x.d.arrayEnd()
 	} else {
-		x.d.d.ReadMapEnd()
+		x.d.mapEnd()
 	}
 }
 
 func (x decSliceHelper) ElemContainerState(index int) {
 	if x.array {
-		x.d.d.ReadArrayElem()
+		x.d.arrayElem()
 	} else if index%2 == 0 {
-		x.d.d.ReadMapElemKey()
+		x.d.mapElemKey()
 	} else {
-		x.d.d.ReadMapElemValue()
+		x.d.mapElemValue()
 	}
 }
 

+ 295 - 306
codec/encode.go

@@ -56,17 +56,20 @@ type encDriver interface {
 	//encBignum(f *big.Int)
 	//encStringRunes(c charEncoding, v []rune)
 	WriteArrayStart(length int)
-	WriteArrayElem()
 	WriteArrayEnd()
 	WriteMapStart(length int)
-	WriteMapElemKey()
-	WriteMapElemValue()
 	WriteMapEnd()
 
 	reset()
 	atEndOfEncode()
 }
 
+type encDriverContainerTracker interface {
+	WriteArrayElem()
+	WriteMapElemKey()
+	WriteMapElemValue()
+}
+
 type encDriverAsis interface {
 	EncodeAsis(v []byte)
 }
@@ -82,26 +85,27 @@ func (e encodeError) Error() string {
 type encDriverNoopContainerWriter struct{}
 
 func (encDriverNoopContainerWriter) WriteArrayStart(length int) {}
-func (encDriverNoopContainerWriter) WriteArrayElem()            {}
 func (encDriverNoopContainerWriter) WriteArrayEnd()             {}
 func (encDriverNoopContainerWriter) WriteMapStart(length int)   {}
-func (encDriverNoopContainerWriter) WriteMapElemKey()           {}
-func (encDriverNoopContainerWriter) WriteMapElemValue()         {}
 func (encDriverNoopContainerWriter) WriteMapEnd()               {}
 func (encDriverNoopContainerWriter) atEndOfEncode()             {}
 
-type encDriverTrackContainerWriter struct {
-	c containerState
-}
+// func (encDriverNoopContainerWriter) WriteArrayElem()            {}
+// func (encDriverNoopContainerWriter) WriteMapElemKey()           {}
+// func (encDriverNoopContainerWriter) WriteMapElemValue()         {}
 
-func (e *encDriverTrackContainerWriter) WriteArrayStart(length int) { e.c = containerArrayStart }
-func (e *encDriverTrackContainerWriter) WriteArrayElem()            { e.c = containerArrayElem }
-func (e *encDriverTrackContainerWriter) WriteArrayEnd()             { e.c = containerArrayEnd }
-func (e *encDriverTrackContainerWriter) WriteMapStart(length int)   { e.c = containerMapStart }
-func (e *encDriverTrackContainerWriter) WriteMapElemKey()           { e.c = containerMapKey }
-func (e *encDriverTrackContainerWriter) WriteMapElemValue()         { e.c = containerMapValue }
-func (e *encDriverTrackContainerWriter) WriteMapEnd()               { e.c = containerMapEnd }
-func (e *encDriverTrackContainerWriter) atEndOfEncode()             {}
+// type encDriverTrackContainerWriter struct {
+// 	c containerState
+// }
+
+// func (e *encDriverTrackContainerWriter) WriteArrayStart(length int) { e.c = containerArrayStart }
+// func (e *encDriverTrackContainerWriter) WriteArrayElem()            { e.c = containerArrayElem }
+// func (e *encDriverTrackContainerWriter) WriteArrayEnd()             { e.c = containerArrayEnd }
+// func (e *encDriverTrackContainerWriter) WriteMapStart(length int)   { e.c = containerMapStart }
+// func (e *encDriverTrackContainerWriter) WriteMapElemKey()           { e.c = containerMapKey }
+// func (e *encDriverTrackContainerWriter) WriteMapElemValue()         { e.c = containerMapValue }
+// func (e *encDriverTrackContainerWriter) WriteMapEnd()               { e.c = containerMapEnd }
+// func (e *encDriverTrackContainerWriter) atEndOfEncode()             {}
 
 // type ioEncWriterWriter interface {
 // 	WriteByte(c byte) error
@@ -310,6 +314,7 @@ type bufioEncWriter struct {
 	// less used fields
 	bytesBufPooler
 
+	b [40]byte // scratch buffer and padding (cache-aligned)
 	// a int
 	// b   [4]byte
 	// err
@@ -325,6 +330,8 @@ func (z *bufioEncWriter) reset(w io.Writer, bufsize int) {
 	z.sz = bufsize
 	if cap(z.buf) >= bufsize {
 		z.buf = z.buf[:cap(z.buf)]
+	} else if bufsize <= len(z.b) {
+		z.buf = z.b[:]
 	} else {
 		z.buf = z.bytesBufPooler.get(bufsize)
 		// z.buf = make([]byte, bufsize)
@@ -507,97 +514,30 @@ func (e *Encoder) kErr(f *codecFnInfo, rv reflect.Value) {
 }
 
 func (e *Encoder) kSlice(f *codecFnInfo, rv reflect.Value) {
-	ti := f.ti
-	ee := e.e
 	// array may be non-addressable, so we have to manage with care
 	//   (don't call rv.Bytes, rv.Slice, etc).
 	// E.g. type struct S{B [2]byte};
 	//   Encode(S{}) will bomb on "panic: slice of unaddressable array".
 	if f.seq != seqTypeArray {
 		if rv.IsNil() {
-			ee.EncodeNil()
+			e.e.EncodeNil()
 			return
 		}
 		// If in this method, then there was no extension function defined.
 		// So it's okay to treat as []byte.
-		if ti.rtid == uint8SliceTypId {
-			ee.EncodeStringBytesRaw(rv.Bytes())
+		if f.ti.rtid == uint8SliceTypId {
+			e.e.EncodeStringBytesRaw(rv.Bytes())
 			return
 		}
 	}
-	if f.seq == seqTypeChan && ti.chandir&uint8(reflect.RecvDir) == 0 {
+	if f.seq == seqTypeChan && f.ti.chandir&uint8(reflect.RecvDir) == 0 {
 		e.errorf("send-only channel cannot be encoded")
 	}
-	elemsep := e.esep
-	rtelem := ti.elem
-	rtelemIsByte := uint8TypId == rt2id(rtelem) // NOT rtelem.Kind() == reflect.Uint8
-	var l int
+	mbs := f.ti.mbs
+	rtelem := f.ti.elem
 	// if a slice, array or chan of bytes, treat specially
-	if rtelemIsByte {
-		switch f.seq {
-		case seqTypeSlice:
-			ee.EncodeStringBytesRaw(rv.Bytes())
-		case seqTypeArray:
-			l = rv.Len()
-			if rv.CanAddr() {
-				ee.EncodeStringBytesRaw(rv.Slice(0, l).Bytes())
-			} else {
-				var bs []byte
-				if l <= cap(e.b) {
-					bs = e.b[:l]
-				} else {
-					bs = make([]byte, l)
-				}
-				reflect.Copy(reflect.ValueOf(bs), rv)
-				ee.EncodeStringBytesRaw(bs)
-			}
-		case seqTypeChan:
-			// do not use range, so that the number of elements encoded
-			// does not change, and encoding does not hang waiting on someone to close chan.
-			// for b := range rv2i(rv).(<-chan byte) { bs = append(bs, b) }
-			// ch := rv2i(rv).(<-chan byte) // fix error - that this is a chan byte, not a <-chan byte.
-
-			if rv.IsNil() {
-				ee.EncodeNil()
-				break
-			}
-			bs := e.b[:0]
-			irv := rv2i(rv)
-			ch, ok := irv.(<-chan byte)
-			if !ok {
-				ch = irv.(chan byte)
-			}
-
-		L1:
-			switch timeout := e.h.ChanRecvTimeout; {
-			case timeout == 0: // only consume available
-				for {
-					select {
-					case b := <-ch:
-						bs = append(bs, b)
-					default:
-						break L1
-					}
-				}
-			case timeout > 0: // consume until timeout
-				tt := time.NewTimer(timeout)
-				for {
-					select {
-					case b := <-ch:
-						bs = append(bs, b)
-					case <-tt.C:
-						// close(tt.C)
-						break L1
-					}
-				}
-			default: // consume until close
-				for b := range ch {
-					bs = append(bs, b)
-				}
-			}
-
-			ee.EncodeStringBytesRaw(bs)
-		}
+	if !mbs && uint8TypId == rt2id(rtelem) { // NOT rtelem.Kind() == reflect.Uint8
+		e.kSliceBytes(rv, f.seq)
 		return
 	}
 
@@ -633,15 +573,15 @@ func (e *Encoder) kSlice(f *codecFnInfo, rv reflect.Value) {
 		rv = rvcs // TODO: ensure this doesn't mess up anywhere that rv of kind chan is expected
 	}
 
-	l = rv.Len()
-	if ti.mbs {
+	var l = rv.Len()
+	if mbs {
 		if l%2 == 1 {
 			e.errorf("mapBySlice requires even slice length, but got %v", l)
 			return
 		}
-		ee.WriteMapStart(l / 2)
+		e.mapStart(l / 2)
 	} else {
-		ee.WriteArrayStart(l)
+		e.arrayStart(l)
 	}
 
 	if l > 0 {
@@ -656,81 +596,122 @@ func (e *Encoder) kSlice(f *codecFnInfo, rv reflect.Value) {
 			fn = e.h.fn(rtelem, true, true)
 		}
 		for j := 0; j < l; j++ {
-			if elemsep {
-				if ti.mbs {
-					if j%2 == 0 {
-						ee.WriteMapElemKey()
-					} else {
-						ee.WriteMapElemValue()
-					}
+			if mbs {
+				if j%2 == 0 {
+					e.mapElemKey()
 				} else {
-					ee.WriteArrayElem()
+					e.mapElemValue()
 				}
+			} else {
+				e.arrayElem()
 			}
 			e.encodeValue(rv.Index(j), fn, true)
 		}
 	}
 
-	if ti.mbs {
-		ee.WriteMapEnd()
+	if mbs {
+		e.mapEnd()
 	} else {
-		ee.WriteArrayEnd()
+		e.arrayEnd()
 	}
 }
 
-func (e *Encoder) kStructNoOmitempty(f *codecFnInfo, rv reflect.Value) {
-	fti := f.ti
-	tisfi := fti.sfiSrc
-	toMap := !(fti.toArray || e.h.StructToArray)
-	if toMap {
-		tisfi = fti.sfiSort
-	}
+func (e *Encoder) kSliceBytes(rv reflect.Value, seq seqType) {
+	// xdebugf("kSliceBytes: seq: %d, rvType: %v", seq, rv.Type())
+	switch seq {
+	case seqTypeSlice:
+		e.e.EncodeStringBytesRaw(rv.Bytes())
+	case seqTypeArray:
+		var l = rv.Len()
+		if rv.CanAddr() {
+			e.e.EncodeStringBytesRaw(rv.Slice(0, l).Bytes())
+		} else {
+			var bs []byte
+			if l <= cap(e.b) {
+				bs = e.b[:l]
+			} else {
+				bs = make([]byte, l)
+			}
+			reflect.Copy(reflect.ValueOf(bs), rv)
+			e.e.EncodeStringBytesRaw(bs)
+		}
+	case seqTypeChan:
+		// do not use range, so that the number of elements encoded
+		// does not change, and encoding does not hang waiting on someone to close chan.
+		// for b := range rv2i(rv).(<-chan byte) { bs = append(bs, b) }
+		// ch := rv2i(rv).(<-chan byte) // fix error - that this is a chan byte, not a <-chan byte.
 
-	ee := e.e
+		if rv.IsNil() {
+			e.e.EncodeNil()
+			break
+		}
+		bs := e.b[:0]
+		irv := rv2i(rv)
+		ch, ok := irv.(<-chan byte)
+		if !ok {
+			ch = irv.(chan byte)
+		}
 
-	sfn := structFieldNode{v: rv, update: false}
-	if toMap {
-		ee.WriteMapStart(len(tisfi))
-		if e.esep {
-			for _, si := range tisfi {
-				ee.WriteMapElemKey()
-				e.kStructFieldKey(fti.keyType, si.encNameAsciiAlphaNum, si.encName)
-				ee.WriteMapElemValue()
-				e.encodeValue(sfn.field(si), nil, true)
+	L1:
+		switch timeout := e.h.ChanRecvTimeout; {
+		case timeout == 0: // only consume available
+			for {
+				select {
+				case b := <-ch:
+					bs = append(bs, b)
+				default:
+					break L1
+				}
 			}
-		} else {
-			for _, si := range tisfi {
-				e.kStructFieldKey(fti.keyType, si.encNameAsciiAlphaNum, si.encName)
-				e.encodeValue(sfn.field(si), nil, true)
+		case timeout > 0: // consume until timeout
+			tt := time.NewTimer(timeout)
+			for {
+				select {
+				case b := <-ch:
+					bs = append(bs, b)
+				case <-tt.C:
+					// close(tt.C)
+					break L1
+				}
+			}
+		default: // consume until close
+			for b := range ch {
+				bs = append(bs, b)
 			}
 		}
-		ee.WriteMapEnd()
+
+		e.e.EncodeStringBytesRaw(bs)
+	}
+}
+
+func (e *Encoder) kStructNoOmitempty(f *codecFnInfo, rv reflect.Value) {
+	sfn := structFieldNode{v: rv, update: false}
+	if f.ti.toArray || e.h.StructToArray { // toArray
+		e.arrayStart(len(f.ti.sfiSrc))
+		for _, si := range f.ti.sfiSrc {
+			e.arrayElem()
+			e.encodeValue(sfn.field(si), nil, true)
+		}
+		e.arrayEnd()
 	} else {
-		ee.WriteArrayStart(len(tisfi))
-		if e.esep {
-			for _, si := range tisfi {
-				ee.WriteArrayElem()
-				e.encodeValue(sfn.field(si), nil, true)
-			}
-		} else {
-			for _, si := range tisfi {
-				e.encodeValue(sfn.field(si), nil, true)
-			}
+		e.mapStart(len(f.ti.sfiSort))
+		for _, si := range f.ti.sfiSort {
+			e.mapElemKey()
+			e.kStructFieldKey(f.ti.keyType, si.encNameAsciiAlphaNum, si.encName)
+			e.mapElemValue()
+			e.encodeValue(sfn.field(si), nil, true)
 		}
-		ee.WriteArrayEnd()
+		e.mapEnd()
 	}
 }
 
 func (e *Encoder) kStructFieldKey(keyType valueType, encNameAsciiAlphaNum bool, encName string) {
-	encStructFieldKey(encName, e.e, e.w, keyType, encNameAsciiAlphaNum, e.js)
+	encStructFieldKey(encName, e.e, e.w(), keyType, encNameAsciiAlphaNum, e.js)
 }
 
 func (e *Encoder) kStruct(f *codecFnInfo, rv reflect.Value) {
-	fti := f.ti
-	elemsep := e.esep
-	tisfi := fti.sfiSrc
 	var newlen int
-	toMap := !(fti.toArray || e.h.StructToArray)
+	toMap := !(f.ti.toArray || e.h.StructToArray)
 	var mf map[string]interface{}
 	if f.ti.mf {
 		mf = rv2i(rv).(MissingFielder).CodecMissingFields()
@@ -748,12 +729,7 @@ func (e *Encoder) kStruct(f *codecFnInfo, rv reflect.Value) {
 		toMap = true
 		newlen += len(mf)
 	}
-	// if toMap, use the sorted array. If toArray, use unsorted array (to match sequence in struct)
-	if toMap {
-		tisfi = fti.sfiSort
-	}
-	newlen += len(tisfi)
-	ee := e.e
+	newlen += len(f.ti.sfiSrc)
 
 	// Use sync.Pool to reduce allocating slices unnecessarily.
 	// The cost of sync.Pool is less than the cost of new allocation.
@@ -771,86 +747,75 @@ func (e *Encoder) kStruct(f *codecFnInfo, rv reflect.Value) {
 	var spool sfiRvPooler
 	var fkvs = spool.get(newlen)
 
-	var kv sfiRv
 	recur := e.h.RecursiveEmptyCheck
 	sfn := structFieldNode{v: rv, update: false}
-	newlen = 0
-	for _, si := range tisfi {
-		// kv.r = si.field(rv, false)
-		kv.r = sfn.field(si)
-		if toMap {
+
+	var kv sfiRv
+	var j int
+	if toMap {
+		newlen = 0
+		for _, si := range f.ti.sfiSort { // use sorted array
+			// kv.r = si.field(rv, false)
+			kv.r = sfn.field(si)
 			if si.omitEmpty() && isEmptyValue(kv.r, e.h.TypeInfos, recur, recur) {
 				continue
 			}
 			kv.v = si // si.encName
-		} else {
-			// use the zero value.
-			// if a reference or struct, set to nil (so you do not output too much)
-			if si.omitEmpty() && isEmptyValue(kv.r, e.h.TypeInfos, recur, recur) {
-				switch kv.r.Kind() {
-				case reflect.Struct, reflect.Interface, reflect.Ptr,
-					reflect.Array, reflect.Map, reflect.Slice:
-					kv.r = reflect.Value{} //encode as nil
-				}
-			}
-		}
-		fkvs[newlen] = kv
-		newlen++
-	}
-	fkvs = fkvs[:newlen]
-
-	var mflen int
-	for k, v := range mf {
-		if k == "" {
-			delete(mf, k)
-			continue
-		}
-		if fti.infoFieldOmitempty && isEmptyValue(reflect.ValueOf(v), e.h.TypeInfos, recur, recur) {
-			delete(mf, k)
-			continue
+			fkvs[newlen] = kv
+			newlen++
 		}
-		mflen++
-	}
-
-	var j int
-	if toMap {
-		ee.WriteMapStart(newlen + mflen)
-		if elemsep {
-			for j = 0; j < len(fkvs); j++ {
-				kv = fkvs[j]
-				ee.WriteMapElemKey()
-				e.kStructFieldKey(fti.keyType, kv.v.encNameAsciiAlphaNum, kv.v.encName)
-				ee.WriteMapElemValue()
-				e.encodeValue(kv.r, nil, true)
+		var mflen int
+		for k, v := range mf {
+			if k == "" {
+				delete(mf, k)
+				continue
 			}
-		} else {
-			for j = 0; j < len(fkvs); j++ {
-				kv = fkvs[j]
-				e.kStructFieldKey(fti.keyType, kv.v.encNameAsciiAlphaNum, kv.v.encName)
-				e.encodeValue(kv.r, nil, true)
+			if f.ti.infoFieldOmitempty && isEmptyValue(reflect.ValueOf(v), e.h.TypeInfos, recur, recur) {
+				delete(mf, k)
+				continue
 			}
+			mflen++
+		}
+		// encode it all
+		e.mapStart(newlen + mflen)
+		for j = 0; j < newlen; j++ {
+			kv = fkvs[j]
+			e.mapElemKey()
+			e.kStructFieldKey(f.ti.keyType, kv.v.encNameAsciiAlphaNum, kv.v.encName)
+			e.mapElemValue()
+			e.encodeValue(kv.r, nil, true)
 		}
 		// now, add the others
 		for k, v := range mf {
-			ee.WriteMapElemKey()
-			e.kStructFieldKey(fti.keyType, false, k)
-			ee.WriteMapElemValue()
+			e.mapElemKey()
+			e.kStructFieldKey(f.ti.keyType, false, k)
+			e.mapElemValue()
 			e.encode(v)
 		}
-		ee.WriteMapEnd()
+		e.mapEnd()
 	} else {
-		ee.WriteArrayStart(newlen)
-		if elemsep {
-			for j = 0; j < len(fkvs); j++ {
-				ee.WriteArrayElem()
-				e.encodeValue(fkvs[j].r, nil, true)
-			}
-		} else {
-			for j = 0; j < len(fkvs); j++ {
-				e.encodeValue(fkvs[j].r, nil, true)
+		newlen = len(f.ti.sfiSrc)
+		// kv.v = nil
+		for i, si := range f.ti.sfiSrc { // use unsorted array (to match sequence in struct)
+			// kv.r = si.field(rv, false)
+			kv.r = sfn.field(si)
+			// use the zero value.
+			// if a reference or struct, set to nil (so you do not output too much)
+			if si.omitEmpty() && isEmptyValue(kv.r, e.h.TypeInfos, recur, recur) {
+				switch kv.r.Kind() {
+				case reflect.Struct, reflect.Interface, reflect.Ptr, reflect.Array, reflect.Map, reflect.Slice:
+					kv.r = reflect.Value{} //encode as nil
+				}
 			}
+			fkvs[i] = kv
+		}
+		// encode it all
+		e.arrayStart(newlen)
+		for j = 0; j < newlen; j++ {
+			e.arrayElem()
+			e.encodeValue(fkvs[j].r, nil, true)
 		}
-		ee.WriteArrayEnd()
+		e.arrayEnd()
 	}
 
 	// do not use defer. Instead, use explicit pool return at end of function.
@@ -860,16 +825,15 @@ func (e *Encoder) kStruct(f *codecFnInfo, rv reflect.Value) {
 }
 
 func (e *Encoder) kMap(f *codecFnInfo, rv reflect.Value) {
-	ee := e.e
 	if rv.IsNil() {
-		ee.EncodeNil()
+		e.e.EncodeNil()
 		return
 	}
 
 	l := rv.Len()
-	ee.WriteMapStart(l)
+	e.mapStart(l)
 	if l == 0 {
-		ee.WriteMapEnd()
+		e.mapEnd()
 		return
 	}
 	// var asSymbols bool
@@ -881,12 +845,8 @@ func (e *Encoder) kMap(f *codecFnInfo, rv reflect.Value) {
 	// encoding type, because preEncodeValue may break it down to
 	// a concrete type and kInterface will bomb.
 	var keyFn, valFn *codecFn
-	ti := f.ti
-	rtkey0 := ti.key
-	rtkey := rtkey0
-	rtval0 := ti.elem
-	rtval := rtval0
-	// rtkeyid := rt2id(rtkey0)
+	rtval := f.ti.elem
+	// rtkeyid := rt2id(f.ti.key)
 	for rtval.Kind() == reflect.Ptr {
 		rtval = rtval.Elem()
 	}
@@ -896,12 +856,13 @@ func (e *Encoder) kMap(f *codecFnInfo, rv reflect.Value) {
 	mks := rv.MapKeys()
 
 	if e.h.Canonical {
-		e.kMapCanonical(rtkey, rv, mks, valFn)
-		ee.WriteMapEnd()
+		e.kMapCanonical(f.ti.key, rv, mks, valFn)
+		e.mapEnd()
 		return
 	}
 
-	var keyTypeIsString = stringTypId == rt2id(rtkey0) // rtkeyid
+	rtkey := f.ti.key
+	var keyTypeIsString = stringTypId == rt2id(rtkey) // rtkeyid
 	if !keyTypeIsString {
 		for rtkey.Kind() == reflect.Ptr {
 			rtkey = rtkey.Elem()
@@ -914,30 +875,23 @@ func (e *Encoder) kMap(f *codecFnInfo, rv reflect.Value) {
 
 	// for j, lmks := 0, len(mks); j < lmks; j++ {
 	for j := range mks {
-		if e.esep {
-			ee.WriteMapElemKey()
-		}
+		e.mapElemKey()
 		if keyTypeIsString {
 			if e.h.StringToRaw {
-				ee.EncodeStringBytesRaw(bytesView(mks[j].String()))
+				e.e.EncodeStringBytesRaw(bytesView(mks[j].String()))
 			} else {
-				ee.EncodeStringEnc(cUTF8, mks[j].String())
+				e.e.EncodeStringEnc(cUTF8, mks[j].String())
 			}
 		} else {
 			e.encodeValue(mks[j], keyFn, true)
 		}
-		if e.esep {
-			ee.WriteMapElemValue()
-		}
+		e.mapElemValue()
 		e.encodeValue(rv.MapIndex(mks[j]), valFn, true)
-
 	}
-	ee.WriteMapEnd()
+	e.mapEnd()
 }
 
 func (e *Encoder) kMapCanonical(rtkey reflect.Type, rv reflect.Value, mks []reflect.Value, valFn *codecFn) {
-	ee := e.e
-	elemsep := e.esep
 	// we previously did out-of-band if an extension was registered.
 	// This is not necessary, as the natural kind is sufficient for ordering.
 
@@ -951,13 +905,9 @@ func (e *Encoder) kMapCanonical(rtkey reflect.Type, rv reflect.Value, mks []refl
 		}
 		sort.Sort(boolRvSlice(mksv))
 		for i := range mksv {
-			if elemsep {
-				ee.WriteMapElemKey()
-			}
-			ee.EncodeBool(mksv[i].v)
-			if elemsep {
-				ee.WriteMapElemValue()
-			}
+			e.mapElemKey()
+			e.e.EncodeBool(mksv[i].v)
+			e.mapElemValue()
 			e.encodeValue(rv.MapIndex(mksv[i].r), valFn, true)
 		}
 	case reflect.String:
@@ -969,17 +919,13 @@ func (e *Encoder) kMapCanonical(rtkey reflect.Type, rv reflect.Value, mks []refl
 		}
 		sort.Sort(stringRvSlice(mksv))
 		for i := range mksv {
-			if elemsep {
-				ee.WriteMapElemKey()
-			}
+			e.mapElemKey()
 			if e.h.StringToRaw {
-				ee.EncodeStringBytesRaw(bytesView(mksv[i].v))
+				e.e.EncodeStringBytesRaw(bytesView(mksv[i].v))
 			} else {
-				ee.EncodeStringEnc(cUTF8, mksv[i].v)
-			}
-			if elemsep {
-				ee.WriteMapElemValue()
+				e.e.EncodeStringEnc(cUTF8, mksv[i].v)
 			}
+			e.mapElemValue()
 			e.encodeValue(rv.MapIndex(mksv[i].r), valFn, true)
 		}
 	case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint, reflect.Uintptr:
@@ -991,13 +937,9 @@ func (e *Encoder) kMapCanonical(rtkey reflect.Type, rv reflect.Value, mks []refl
 		}
 		sort.Sort(uint64RvSlice(mksv))
 		for i := range mksv {
-			if elemsep {
-				ee.WriteMapElemKey()
-			}
-			ee.EncodeUint(mksv[i].v)
-			if elemsep {
-				ee.WriteMapElemValue()
-			}
+			e.mapElemKey()
+			e.e.EncodeUint(mksv[i].v)
+			e.mapElemValue()
 			e.encodeValue(rv.MapIndex(mksv[i].r), valFn, true)
 		}
 	case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int:
@@ -1009,13 +951,9 @@ func (e *Encoder) kMapCanonical(rtkey reflect.Type, rv reflect.Value, mks []refl
 		}
 		sort.Sort(int64RvSlice(mksv))
 		for i := range mksv {
-			if elemsep {
-				ee.WriteMapElemKey()
-			}
-			ee.EncodeInt(mksv[i].v)
-			if elemsep {
-				ee.WriteMapElemValue()
-			}
+			e.mapElemKey()
+			e.e.EncodeInt(mksv[i].v)
+			e.mapElemValue()
 			e.encodeValue(rv.MapIndex(mksv[i].r), valFn, true)
 		}
 	case reflect.Float32:
@@ -1027,13 +965,9 @@ func (e *Encoder) kMapCanonical(rtkey reflect.Type, rv reflect.Value, mks []refl
 		}
 		sort.Sort(float64RvSlice(mksv))
 		for i := range mksv {
-			if elemsep {
-				ee.WriteMapElemKey()
-			}
-			ee.EncodeFloat32(float32(mksv[i].v))
-			if elemsep {
-				ee.WriteMapElemValue()
-			}
+			e.mapElemKey()
+			e.e.EncodeFloat32(float32(mksv[i].v))
+			e.mapElemValue()
 			e.encodeValue(rv.MapIndex(mksv[i].r), valFn, true)
 		}
 	case reflect.Float64:
@@ -1045,13 +979,9 @@ func (e *Encoder) kMapCanonical(rtkey reflect.Type, rv reflect.Value, mks []refl
 		}
 		sort.Sort(float64RvSlice(mksv))
 		for i := range mksv {
-			if elemsep {
-				ee.WriteMapElemKey()
-			}
-			ee.EncodeFloat64(mksv[i].v)
-			if elemsep {
-				ee.WriteMapElemValue()
-			}
+			e.mapElemKey()
+			e.e.EncodeFloat64(mksv[i].v)
+			e.mapElemValue()
 			e.encodeValue(rv.MapIndex(mksv[i].r), valFn, true)
 		}
 	case reflect.Struct:
@@ -1064,13 +994,9 @@ func (e *Encoder) kMapCanonical(rtkey reflect.Type, rv reflect.Value, mks []refl
 			}
 			sort.Sort(timeRvSlice(mksv))
 			for i := range mksv {
-				if elemsep {
-					ee.WriteMapElemKey()
-				}
-				ee.EncodeTime(mksv[i].v)
-				if elemsep {
-					ee.WriteMapElemValue()
-				}
+				e.mapElemKey()
+				e.e.EncodeTime(mksv[i].v)
+				e.mapElemValue()
 				e.encodeValue(rv.MapIndex(mksv[i].r), valFn, true)
 			}
 			break
@@ -1091,13 +1017,9 @@ func (e *Encoder) kMapCanonical(rtkey reflect.Type, rv reflect.Value, mks []refl
 		}
 		sort.Sort(bytesRvSlice(mksbv))
 		for j := range mksbv {
-			if elemsep {
-				ee.WriteMapElemKey()
-			}
+			e.mapElemKey()
 			e.asis(mksbv[j].v)
-			if elemsep {
-				ee.WriteMapElemValue()
-			}
+			e.mapElemValue()
 			e.encodeValue(rv.MapIndex(mksbv[j].r), valFn, true)
 		}
 	}
@@ -1111,6 +1033,8 @@ type encWriterSwitch struct {
 	isas  bool // whether e.as != nil
 	js    bool // is json encoder?
 	be    bool // is binary encoder?
+
+	c containerState
 	// _    [3]byte // padding
 	// _    [2]uint64 // padding
 	// _    uint64    // padding
@@ -1276,13 +1200,14 @@ type Encoder struct {
 
 	// NOTE: Encoder shouldn't call it's write methods,
 	// as the handler MAY need to do some coordination.
-	w *encWriterSwitch
+	// w *encWriterSwitch
 
 	// bw *bufio.Writer
 	as encDriverAsis
 
-	h  *BasicHandle
-	hh Handle
+	jenc *jsonEncDriver
+	h    *BasicHandle
+	hh   Handle
 
 	// ---- cpu cache line boundary
 	encWriterSwitch
@@ -1291,9 +1216,10 @@ type Encoder struct {
 
 	// ---- cpu cache line boundary
 	// ---- writable fields during execution --- *try* to keep in sep cache line
-	ci set
+	ci    set        // holds set of addresses found during an encoding (if CheckCircularRef=true)
+	cidef [1]uintptr // default ci
 
-	b [(5 * 8)]byte // for encoding chan byte, (non-addressable) [N]byte, etc
+	b [(4 * 8)]byte // for encoding chan byte, (non-addressable) [N]byte, etc
 
 	// ---- cpu cache line boundary?
 	// b [scratchByteArrayLen]byte
@@ -1329,13 +1255,17 @@ func newEncoder(h Handle) *Encoder {
 		runtime.SetFinalizer(e, (*Encoder).finalize)
 		// xdebugf(">>>> new(Encoder) with finalizer")
 	}
-	e.w = &e.encWriterSwitch
+	// e.w = &e.encWriterSwitch
 	e.hh = h
 	e.esep = h.hasElemSeparators()
 
 	return e
 }
 
+func (e *Encoder) w() *encWriterSwitch {
+	return &e.encWriterSwitch
+}
+
 func (e *Encoder) resetCommon() {
 	// e.w = &e.encWriterSwitch
 	if e.e == nil || e.hh.recreateEncDriver(e.e) {
@@ -1343,9 +1273,21 @@ func (e *Encoder) resetCommon() {
 		e.as, e.isas = e.e.(encDriverAsis)
 		// e.cr, _ = e.e.(containerStateRecv)
 	}
+
+	if e.ci == nil {
+		e.ci = (set)(e.cidef[:0])
+	} else {
+		e.ci = e.ci[:0]
+	}
+
 	e.be = e.hh.isBinary()
+	e.jenc = nil
 	_, e.js = e.hh.(*JsonHandle)
+	if e.js {
+		e.jenc = e.e.(interface{ getJsonEncDriver() *jsonEncDriver }).getJsonEncDriver()
+	}
 	e.e.reset()
+	e.c = 0
 	e.err = nil
 }
 
@@ -1500,7 +1442,7 @@ func (e *Encoder) Encode(v interface{}) (err error) {
 		defer func() {
 			// if error occurred during encoding, return that error;
 			// else if error occurred on end'ing (i.e. during flush), return that error.
-			err = e.w.endErr()
+			err = e.w().endErr()
 			x := recover()
 			if x == nil {
 				if e.err != err {
@@ -1533,7 +1475,7 @@ func (e *Encoder) mustEncode(v interface{}) {
 	if e.wf == nil {
 		e.encode(v)
 		e.e.atEndOfEncode()
-		e.w.end()
+		e.w().end()
 		return
 	}
 
@@ -1548,7 +1490,7 @@ func (e *Encoder) mustEncode(v interface{}) {
 
 	if e.wf.calls == 0 {
 		e.e.atEndOfEncode()
-		e.w.end()
+		e.w().end()
 		if !e.h.ExplicitRelease {
 			e.wf.release()
 		}
@@ -1556,7 +1498,7 @@ func (e *Encoder) mustEncode(v interface{}) {
 }
 
 // func (e *Encoder) deferred(err1 *error) {
-// 	e.w.end()
+// 	e.w().end()
 // 	if recoverPanicToErr {
 // 		if x := recover(); x != nil {
 // 			panicValToErr(e, x, err1)
@@ -1805,7 +1747,7 @@ func (e *Encoder) asis(v []byte) {
 	if e.isas {
 		e.as.EncodeAsis(v)
 	} else {
-		e.w.writeb(v)
+		e.w().writeb(v)
 	}
 }
 
@@ -1821,6 +1763,53 @@ func (e *Encoder) wrapErr(v interface{}, err *error) {
 	*err = encodeError{codecError{name: e.hh.Name(), err: v}}
 }
 
+// ---- container tracker methods
+// Note: We update the .c after calling the callback.
+// This way, the callback can know what the last status was.
+
+func (e *Encoder) mapStart(length int) {
+	e.e.WriteMapStart(length)
+	e.c = containerMapStart
+}
+
+func (e *Encoder) mapElemKey() {
+	if e.js {
+		e.jenc.WriteMapElemKey()
+	}
+	e.c = containerMapKey
+}
+
+func (e *Encoder) mapElemValue() {
+	if e.js {
+		e.jenc.WriteMapElemValue()
+	}
+	e.c = containerMapValue
+}
+
+func (e *Encoder) mapEnd() {
+	e.e.WriteMapEnd()
+	e.c = containerMapEnd
+	e.c = 0
+}
+
+func (e *Encoder) arrayStart(length int) {
+	e.e.WriteArrayStart(length)
+	e.c = containerArrayStart
+}
+
+func (e *Encoder) arrayElem() {
+	if e.js {
+		e.jenc.WriteArrayElem()
+	}
+	e.c = containerArrayElem
+}
+
+func (e *Encoder) arrayEnd() {
+	e.e.WriteArrayEnd()
+	e.c = 0
+	e.c = containerArrayEnd
+}
+
 func encStructFieldKey(encName string, ee encDriver, w *encWriterSwitch,
 	keyType valueType, encNameAsciiAlphaNum bool, js bool) {
 	var m must

A különbségek nem kerülnek megjelenítésre, a fájl túl nagy
+ 322 - 335
codec/fast-path.generated.go


+ 60 - 55
codec/fast-path.go.tmpl

@@ -10,7 +10,7 @@ package codec
 // Fast path functions try to create a fast path encode or decode implementation
 // for common maps and slices.
 //
-// We define the functions and register then in this single file
+// We define the functions and register them in this single file
 // so as not to pollute the encode.go and decode.go, and create a dependency in there.
 // This file can be omitted without causing a build failure.
 //
@@ -18,9 +18,10 @@ package codec
 //	  - Many calls bypass reflection altogether
 // 
 // Currently support
-//	  - slice of all builtin types,
-//	  - map of all builtin types to string or interface value
-//	  - symmetrical maps of all builtin types (e.g. str-str, uint8-uint8)
+//	  - slice of all builtin types (numeric, bool, string, []byte)
+//    - maps of builtin types to builtin or interface{} type, EXCEPT FOR
+//      keys of type uintptr, int8/16/32, uint16/32, float32/64, bool, interface{}
+//      AND values of type type int8/16/32, uint16/32
 // This should provide adequate "typical" implementations.
 // 
 // Note that fast track decode functions must handle values for which an address cannot be obtained.
@@ -188,31 +189,34 @@ func (e *Encoder) {{ .MethodNamePfx "fastpathEnc" false }}R(f *codecFnInfo, rv r
 	}
 }
 func (fastpathT) {{ .MethodNamePfx "Enc" false }}V(v []{{ .Elem }}, e *Encoder) {
-	if v == nil { e.e.EncodeNil(); return }
-	e.e.WriteArrayStart(len(v))
-	for _, v2 := range v {
-		if e.esep { e.e.WriteArrayElem() }
-		{{ encmd .Elem "v2"}}
+	if v == nil {
+		e.e.EncodeNil()
+		return
+	}
+	e.arrayStart(len(v))
+	for j := range v {
+		e.arrayElem()
+		{{ encmd .Elem "v[j]"}}
 	} 
-	e.e.WriteArrayEnd()
+	e.arrayEnd()
 }
 func (fastpathT) {{ .MethodNamePfx "EncAsMap" false }}V(v []{{ .Elem }}, e *Encoder) {
-	if len(v)%2 == 1 {
+	if v == nil {
+		e.e.EncodeNil()
+    } else if len(v)%2 == 1 {
 		e.errorf(fastpathMapBySliceErrMsg, len(v))
-		return
-	}
-	e.e.WriteMapStart(len(v) / 2)
-	for j, v2 := range v {
-		if e.esep {
+	} else {
+		e.mapStart(len(v) / 2)
+		for j := range v {
 			if j%2 == 0 {
-				e.e.WriteMapElemKey()
+				e.mapElemKey()
 			} else {
-				e.e.WriteMapElemValue()
+				e.mapElemValue()
 			}
-		}
-		{{ encmd .Elem "v2"}}
+			{{ encmd .Elem "v[j]"}}
+ 		}
+		e.mapEnd()
 	}
-	e.e.WriteMapEnd()
 }
 {{end}}{{end}}{{end}}
 
@@ -222,10 +226,10 @@ func (e *Encoder) {{ .MethodNamePfx "fastpathEnc" false }}R(f *codecFnInfo, rv r
 }
 func (fastpathT) {{ .MethodNamePfx "Enc" false }}V(v map[{{ .MapKey }}]{{ .Elem }}, e *Encoder) {
 	if v == nil { e.e.EncodeNil(); return }
-	e.e.WriteMapStart(len(v))
+	e.mapStart(len(v))
 	if e.h.Canonical { {{/* need to figure out .NoCanonical */}}
-		{{if eq .MapKey "interface{}"}}{{/* out of band 
-		*/}}var mksv []byte = make([]byte, 0, len(v)*16) // temporary byte slice for the encoding
+		{{if eq .MapKey "interface{}"}}{{/* out of band */ -}}
+		var mksv []byte = make([]byte, 0, len(v)*16) // temporary byte slice for the encoding
 		e2 := NewEncoderBytes(&mksv, e.hh)
 		v2 := make([]bytesIntf, len(v))
 		var i, l uint
@@ -240,9 +244,9 @@ func (fastpathT) {{ .MethodNamePfx "Enc" false }}V(v map[{{ .MapKey }}]{{ .Elem
 		}
 		sort.Sort(bytesIntfSlice(v2))
 		for j := range v2 {
-			if e.esep { e.e.WriteMapElemKey() }
+			e.mapElemKey()
 			e.asis(v2[j].v)
-			if e.esep { e.e.WriteMapElemValue() }
+			e.mapElemValue()
 			e.encode(v[v2[j].i])
 		} {{else}}{{ $x := sorttype .MapKey true}}v2 := make([]{{ $x }}, len(v))
 		var i uint
@@ -252,20 +256,20 @@ func (fastpathT) {{ .MethodNamePfx "Enc" false }}V(v map[{{ .MapKey }}]{{ .Elem
 		}
 		sort.Sort({{ sorttype .MapKey false}}(v2))
 		for _, k2 := range v2 {
-			if e.esep { e.e.WriteMapElemKey() }
+			e.mapElemKey()
 			{{if eq .MapKey "string"}} if e.h.StringToRaw {e.e.EncodeStringBytesRaw(bytesView(k2))} else {e.e.EncodeStringEnc(cUTF8, k2)} {{else}}{{ $y := printf "%s(k2)" .MapKey }}{{if eq $x .MapKey }}{{ $y = "k2" }}{{end}}{{ encmd .MapKey $y }}{{end}}
-			if e.esep { e.e.WriteMapElemValue() }
+			e.mapElemValue()
 			{{ $y := printf "v[%s(k2)]" .MapKey }}{{if eq $x .MapKey }}{{ $y = "v[k2]" }}{{end}}{{ encmd .Elem $y }}
 		} {{end}}
 	} else { 
 		for k2, v2 := range v {
-			if e.esep { e.e.WriteMapElemKey() }
+			e.mapElemKey()
 			{{if eq .MapKey "string"}} if e.h.StringToRaw {e.e.EncodeStringBytesRaw(bytesView(k2))} else {e.e.EncodeStringEnc(cUTF8, k2)} {{else}}{{ encmd .MapKey "k2"}}{{end}}
-			if e.esep { e.e.WriteMapElemValue() }
+			e.mapElemValue()
 			{{ encmd .Elem "v2"}}
 		}
 	}
-	e.e.WriteMapEnd()
+	e.mapEnd()
 }
 {{end}}{{end}}{{end}}
 
@@ -336,8 +340,7 @@ Slices can change if they
 func (d *Decoder) {{ .MethodNamePfx "fastpathDec" false }}R(f *codecFnInfo, rv reflect.Value) {
 	if array := f.seq == seqTypeArray; !array && rv.Kind() == reflect.Ptr {
 		vp := rv2i(rv).(*[]{{ .Elem }})
-		v, changed := fastpathTV.{{ .MethodNamePfx "Dec" false }}V(*vp, !array, d)
-		if changed { *vp = v }
+		if v, changed := fastpathTV.{{ .MethodNamePfx "Dec" false }}V(*vp, !array, d); changed { *vp = v }
 	} else {
 		v := rv2i(rv).([]{{ .Elem }})
 		v2, changed := fastpathTV.{{ .MethodNamePfx "Dec" false }}V(v, !array, d)
@@ -347,8 +350,7 @@ func (d *Decoder) {{ .MethodNamePfx "fastpathDec" false }}R(f *codecFnInfo, rv r
 	}
 }
 func (f fastpathT) {{ .MethodNamePfx "Dec" false }}X(vp *[]{{ .Elem }}, d *Decoder) {
-	v, changed := f.{{ .MethodNamePfx "Dec" false }}V(*vp, true, d)
-	if changed { *vp = v }
+	if v, changed := f.{{ .MethodNamePfx "Dec" false }}V(*vp, true, d); changed { *vp = v }
 }
 func (fastpathT) {{ .MethodNamePfx "Dec" false }}V(v []{{ .Elem }}, canChange bool, d *Decoder) (_ []{{ .Elem }}, changed bool) {
 	{{/* dd := d.d
@@ -363,7 +365,6 @@ func (fastpathT) {{ .MethodNamePfx "Dec" false }}V(v []{{ .Elem }}, canChange bo
 		slh.End()
 		return v, changed
 	}
-	d.depthIncr()
 	hasLen := containerLenS > 0
 	var xlen int 
 	if hasLen && canChange {
@@ -391,7 +392,7 @@ func (fastpathT) {{ .MethodNamePfx "Dec" false }}V(v []{{ .Elem }}, canChange bo
 			v = make([]{{ .Elem }}, uint(xlen))
 			changed = true 
 		}
-		// if indefinite, etc, then expand the slice if necessary
+		{{/* // if indefinite, etc, then expand the slice if necessary */ -}}
 		var decodeIntoBlank bool
 		if j >= len(v) {
 			if canChange {
@@ -421,7 +422,6 @@ func (fastpathT) {{ .MethodNamePfx "Dec" false }}V(v []{{ .Elem }}, canChange bo
 		}
 	}
 	slh.End()
-	d.depthDecr()
 	return v, changed 
 }
 {{end}}{{end}}{{end}}
@@ -435,54 +435,59 @@ Maps can change if they are
 func (d *Decoder) {{ .MethodNamePfx "fastpathDec" false }}R(f *codecFnInfo, rv reflect.Value) {
 	if rv.Kind() == reflect.Ptr {
 		vp := rv2i(rv).(*map[{{ .MapKey }}]{{ .Elem }})
-		v, changed := fastpathTV.{{ .MethodNamePfx "Dec" false }}V(*vp, true, d);
-		if changed { *vp = v }
+		if v, changed := fastpathTV.{{ .MethodNamePfx "Dec" false }}V(*vp, true, d); changed { *vp = v }
 	} else {
 		fastpathTV.{{ .MethodNamePfx "Dec" false }}V(rv2i(rv).(map[{{ .MapKey }}]{{ .Elem }}), false, d)
 	}
 }
 func (f fastpathT) {{ .MethodNamePfx "Dec" false }}X(vp *map[{{ .MapKey }}]{{ .Elem }}, d *Decoder) {
-	v, changed := f.{{ .MethodNamePfx "Dec" false }}V(*vp, true, d)
-	if changed { *vp = v }
+	if v, changed := f.{{ .MethodNamePfx "Dec" false }}V(*vp, true, d); changed { *vp = v }
 }
 func (fastpathT) {{ .MethodNamePfx "Dec" false }}V(v map[{{ .MapKey }}]{{ .Elem }}, canChange bool, 
 	d *Decoder) (_ map[{{ .MapKey }}]{{ .Elem }}, changed bool) {
     {{/*
 		// if d.d.isContainerType(valueTypeNil) {d.d.TryDecodeAsNil()
 	*/ -}}
-	containerLen := d.d.ReadMapStart()
+	containerLen := d.mapStart()
 	if canChange && v == nil {
-		xlen := decInferLen(containerLen, d.h.MaxInitLen, {{ .Size }})
-		v = make(map[{{ .MapKey }}]{{ .Elem }}, xlen)
+		v = make(map[{{ .MapKey }}]{{ .Elem }}, decInferLen(containerLen, d.h.MaxInitLen, {{ .Size }}))
 		changed = true
 	}
 	if containerLen == 0 {
-		d.d.ReadMapEnd()
+		d.mapEnd()
 		return v, changed
 	}
-	d.depthIncr()
-	{{ if eq .Elem "interface{}" }}mapGet := v != nil && !d.h.MapValueReset && !d.h.InterfaceReset
-	{{end}}var mk {{ .MapKey }}
+	{{if eq .Elem "interface{}" }}mapGet := v != nil && !d.h.MapValueReset && !d.h.InterfaceReset
+    {{else if eq .Elem "bytes" "[]byte" }}mapGet := v != nil && !d.h.MapValueReset
+    {{end -}}
+    var mk {{ .MapKey }}
 	var mv {{ .Elem }}
 	hasLen := containerLen > 0
 	for j := 0; (hasLen && j < containerLen) || !(hasLen || d.d.CheckBreak()); j++ {
-		if d.esep { d.d.ReadMapElemKey() }
+		d.mapElemKey()
 		{{ if eq .MapKey "interface{}" }}mk = nil 
 		d.decode(&mk)
 		if bv, bok := mk.([]byte); bok {
 			mk = d.string(bv) {{/* // maps cannot have []byte as key. switch to string. */}}
 		}{{ else }}mk = {{ decmd .MapKey }}{{ end }}
-		if d.esep { d.d.ReadMapElemValue() }
+		d.mapElemValue()
 		if d.d.TryDecodeAsNil() {
 			if v == nil {} else if d.h.DeleteOnNilMapValue { delete(v, mk) } else { v[mk] = {{ zerocmd .Elem }} }
 			continue 
 		}
-		{{ if eq .Elem "interface{}" }}if mapGet { mv = v[mk] } else { mv = nil }
-		d.decode(&mv){{ else }}mv = {{ decmd .Elem }}{{ end }}
+		{{ if eq .Elem "interface{}" "[]byte" "bytes" -}}
+		if mapGet { mv = v[mk] } else { mv = nil }
+		{{ end -}}
+		{{ if eq .Elem "interface{}" -}}
+		d.decode(&mv)
+		{{ else if eq .Elem "[]byte" "bytes" -}}
+		mv = d.d.DecodeBytes(mv, false)
+		{{ else -}}
+		mv = {{ decmd .Elem }}
+		{{ end -}}
 		if v != nil { v[mk] = mv }
 	}
-	d.d.ReadMapEnd()
-	d.depthDecr()
+	d.mapEnd()
 	return v, changed
 }
 {{end}}{{end}}{{end}}

+ 17 - 12
codec/gen-dec-array.go.tmpl

@@ -12,7 +12,8 @@ if {{var "l"}} == 0 {
 	} {{else if isChan }}if {{var "v"}} == nil {
 		{{var "v"}} = make({{ .CTyp }}, 0)
 		{{var "c"}} = true
-	} {{end}}
+	}
+    {{end -}}
 } else {
 	{{var "hl"}} := {{var "l"}} > 0
 	var {{var "rl"}} int
@@ -30,9 +31,10 @@ if {{var "l"}} == 0 {
 		{{var "v"}} = {{var "v"}}[:{{var "l"}}]
 		{{var "c"}} = true
 	}
-	} {{end}}
+	}
+    {{end -}}
 	var {{var "j"}} int 
-    // var {{var "dn"}} bool 
+    {{/* // var {{var "dn"}} bool */ -}}
 	for {{var "j"}} = 0; ({{var "hl"}} && {{var "j"}} < {{var "l"}}) || !({{var "hl"}} || r.CheckBreak()); {{var "j"}}++ { // bounds-check-elimination
 		{{if not isArray}} if {{var "j"}} == 0 && {{var "v"}} == nil {
 			if {{var "hl"}} {
@@ -42,27 +44,28 @@ if {{var "l"}} == 0 {
 			}
 			{{var "v"}} = make({{if isSlice}}[]{{ .Typ }}{{else if isChan}}{{.CTyp}}{{end}}, {{var "rl"}})
 			{{var "c"}} = true 
-		}{{end}}
+		}
+        {{end -}}
 		{{var "h"}}.ElemContainerState({{var "j"}})
-        {{/* {{var "dn"}} = r.TryDecodeAsNil() */}}{{/* commented out, as decLineVar handles this already each time */}}
+        {{/* {{var "dn"}} = r.TryDecodeAsNil() */}}{{/* commented out, as decLineVar handles this already each time */ -}}
         {{if isChan}}{{ $x := printf "%[1]vvcx%[2]v" .TempVar .Rand }}var {{$x}} {{ .Typ }}
-		{{ decLineVar $x }}
+		{{ decLineVar $x -}}
 		{{var "v"}} <- {{ $x }}
         // println(">>>> sending ", {{ $x }}, " into ", {{var "v"}}) // TODO: remove this
-        {{else}}{{/* // if indefinite, etc, then expand the slice if necessary */}}
+        {{else}}{{/* // if indefinite, etc, then expand the slice if necessary */ -}}
 		var {{var "db"}} bool
 		if {{var "j"}} >= len({{var "v"}}) {
 			{{if isSlice }} {{var "v"}} = append({{var "v"}}, {{ zero }})
 			{{var "c"}} = true
 			{{else}} z.DecArrayCannotExpand(len(v), {{var "j"}}+1); {{var "db"}} = true
-			{{end}}
+			{{end -}}
 		}
 		if {{var "db"}} {
 			z.DecSwallow()
 		} else {
-			{{ $x := printf "%[1]vv%[2]v[%[1]vj%[2]v]" .TempVar .Rand }}{{ decLineVar $x }}
+			{{ $x := printf "%[1]vv%[2]v[%[1]vj%[2]v]" .TempVar .Rand }}{{ decLineVar $x -}}
 		}
-        {{end}}
+        {{end -}}
 	}
 	{{if isSlice}} if {{var "j"}} < len({{var "v"}}) {
 		{{var "v"}} = {{var "v"}}[:{{var "j"}}]
@@ -70,9 +73,11 @@ if {{var "l"}} == 0 {
 	} else if {{var "j"}} == 0 && {{var "v"}} == nil {
 		{{var "v"}} = make([]{{ .Typ }}, 0)
 		{{var "c"}} = true
-	} {{end}}
+	}
+    {{end -}}
 }
 {{var "h"}}.End()
 {{if not isArray }}if {{var "c"}} { 
 	*{{ .Varname }} = {{var "v"}}
-}{{end}}
+}
+{{end -}}

+ 19 - 11
codec/gen-dec-map.go.tmpl

@@ -1,5 +1,5 @@
 {{var "v"}} := *{{ .Varname }}
-{{var "l"}} := r.ReadMapStart()
+{{var "l"}} := z.DecReadMapStart()
 {{var "bh"}} := z.DecBasicHandle()
 if {{var "v"}} == nil {
 	{{var "rl"}} := z.DecInferLen({{var "l"}}, {{var "bh"}}.MaxInitLen, {{ .Size }})
@@ -17,21 +17,29 @@ if {{var "bh"}}.MapValueReset {
 if {{var "l"}} != 0 {
 {{var "hl"}} := {{var "l"}} > 0 
 	for {{var "j"}} := 0; ({{var "hl"}} && {{var "j"}} < {{var "l"}}) || !({{var "hl"}} || r.CheckBreak()); {{var "j"}}++ {
-	r.ReadMapElemKey() {{/* z.DecSendContainerState(codecSelfer_containerMapKey{{ .Sfx }}) */}}
-	{{ $x := printf "%vmk%v" .TempVar .Rand }}{{ decLineVarK $x }}
-{{ if eq .KTyp "interface{}" }}{{/* // special case if a byte array. */}}if {{var "bv"}}, {{var "bok"}} := {{var "mk"}}.([]byte); {{var "bok"}} {
+	z.DecReadMapElemKey() {{/* z.DecSendContainerState(codecSelfer_containerMapKey{{ .Sfx }}) */}}
+	{{ $x := printf "%vmk%v" .TempVar .Rand }}{{ decLineVarK $x -}}
+	{{ if eq .KTyp "interface{}" }}{{/* // special case if a byte array. */ -}}
+    if {{var "bv"}}, {{var "bok"}} := {{var "mk"}}.([]byte); {{var "bok"}} {
 		{{var "mk"}} = string({{var "bv"}})
-	}{{ end }}{{if decElemKindPtr}}
-	{{var "ms"}} = true{{end}}
+	}
+    {{ end -}}
+    {{if decElemKindPtr -}}
+	{{var "ms"}} = true
+    {{end -}}
 	if {{var "mg"}} {
-		{{if decElemKindPtr}}{{var "mv"}}, {{var "mok"}} = {{var "v"}}[{{var "mk"}}] 
+		{{if decElemKindPtr -}}
+        {{var "mv"}}, {{var "mok"}} = {{var "v"}}[{{var "mk"}}] 
 		if {{var "mok"}} {
 			{{var "ms"}} = false
-		} {{else}}{{var "mv"}} = {{var "v"}}[{{var "mk"}}] {{end}}
+		}
+        {{else -}}
+        {{var "mv"}} = {{var "v"}}[{{var "mk"}}]
+        {{end -}}
 	} {{if not decElemKindImmutable}}else { {{var "mv"}} = {{decElemZero}} }{{end}}
-	r.ReadMapElemValue() {{/* z.DecSendContainerState(codecSelfer_containerMapValue{{ .Sfx }}) */}}
+	z.DecReadMapElemValue() {{/* z.DecSendContainerState(codecSelfer_containerMapValue{{ .Sfx }}) */}}
 	{{var "mdn"}} = false
-	{{ $x := printf "%vmv%v" .TempVar .Rand }}{{ $y := printf "%vmdn%v" .TempVar .Rand }}{{ decLineVar $x $y }}
+	{{ $x := printf "%vmv%v" .TempVar .Rand }}{{ $y := printf "%vmdn%v" .TempVar .Rand }}{{ decLineVar $x $y -}}
 	if {{var "mdn"}} {
 		if {{ var "bh" }}.DeleteOnNilMapValue { delete({{var "v"}}, {{var "mk"}}) } else { {{var "v"}}[{{var "mk"}}] = {{decElemZero}} }
 	} else if {{if decElemKindPtr}} {{var "ms"}} && {{end}} {{var "v"}} != nil {
@@ -39,4 +47,4 @@ if {{var "l"}} != 0 {
 	}
 }
 } // else len==0: TODO: Should we clear map entries?
-r.ReadMapEnd() {{/* z.DecSendContainerState(codecSelfer_containerMapEnd{{ .Sfx }}) */}}
+z.DecReadMapEnd() {{/* z.DecSendContainerState(codecSelfer_containerMapEnd{{ .Sfx }}) */}}

+ 42 - 100
codec/gen-helper.generated.go

@@ -13,7 +13,7 @@ import (
 )
 
 // GenVersion is the current version of codecgen.
-const GenVersion = 11
+const GenVersion = 12
 
 // This file is used to generate helper code for codecgen.
 // The values here i.e. genHelper(En|De)coder are not to be used directly by
@@ -47,42 +47,8 @@ type genHelperEncDriver struct {
 	encDriver
 }
 
-func (x genHelperEncDriver) EncodeBuiltin(rt uintptr, v interface{}) {}
-func (x genHelperEncDriver) EncStructFieldKey(keyType valueType, s string) {
-	encStructFieldKey(s, x.encDriver, nil, keyType, false, false)
-}
-func (x genHelperEncDriver) EncodeSymbol(s string) {
-	x.encDriver.EncodeStringEnc(cUTF8, s)
-}
-
 type genHelperDecDriver struct {
 	decDriver
-	C checkOverflow
-}
-
-func (x genHelperDecDriver) DecodeBuiltin(rt uintptr, v interface{}) {}
-func (x genHelperDecDriver) DecStructFieldKey(keyType valueType, buf *[decScratchByteArrayLen]byte) []byte {
-	return decStructFieldKey(x.decDriver, keyType, buf)
-}
-func (x genHelperDecDriver) DecodeInt(bitsize uint8) (i int64) {
-	return x.C.IntV(x.decDriver.DecodeInt64(), bitsize)
-}
-func (x genHelperDecDriver) DecodeUint(bitsize uint8) (ui uint64) {
-	return x.C.UintV(x.decDriver.DecodeUint64(), bitsize)
-}
-func (x genHelperDecDriver) DecodeFloat(chkOverflow32 bool) (f float64) {
-	f = x.DecodeFloat64()
-	if chkOverflow32 && chkOvf.Float32(f) {
-		panicv.errorf("float32 overflow: %v", f)
-	}
-	return
-}
-func (x genHelperDecDriver) DecodeFloat32As64() (f float64) {
-	f = x.DecodeFloat64()
-	if chkOvf.Float32(f) {
-		panicv.errorf("float32 overflow: %v", f)
-	}
-	return
 }
 
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
@@ -142,18 +108,6 @@ func (f genHelperEncoder) EncBinaryMarshal(iv encoding.BinaryMarshaler) {
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 func (f genHelperEncoder) EncRaw(iv Raw) { f.e.rawBytes(iv) }
 
-// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
-//
-// Deprecated: builtin no longer supported - so we make this method a no-op,
-// but leave in-place so that old generated files continue to work without regeneration.
-func (f genHelperEncoder) TimeRtidIfBinc() (v uintptr) { return }
-
-// func (f genHelperEncoder) TimeRtidIfBinc() uintptr {
-// 	if _, ok := f.e.hh.(*BincHandle); ok {
-// 		return timeTypId
-// 	}
-// }
-
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 func (f genHelperEncoder) I2Rtid(v interface{}) uintptr {
 	return i2rtid(v)
@@ -171,31 +125,32 @@ func (f genHelperEncoder) EncExtension(v interface{}, xfFn *extTypeTagFn) {
 
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 func (f genHelperEncoder) WriteStr(s string) {
-	f.e.w.writestr(s)
+	f.e.w().writestr(s)
 }
 
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 func (f genHelperEncoder) BytesView(v string) []byte { return bytesView(v) }
 
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
-//
-// Deprecated: No longer used,
-// but leave in-place so that old generated files continue to work without regeneration.
-func (f genHelperEncoder) HasExtensions() bool {
-	return len(f.e.h.extHandle) != 0
-}
+func (f genHelperEncoder) EncWriteMapStart(length int) { f.e.mapStart(length) }
 
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
-//
-// Deprecated: No longer used,
-// but leave in-place so that old generated files continue to work without regeneration.
-func (f genHelperEncoder) EncExt(v interface{}) (r bool) {
-	if xfFn := f.e.h.getExt(i2rtid(v)); xfFn != nil {
-		f.e.e.EncodeExt(v, xfFn.tag, xfFn.ext, f.e)
-		return true
-	}
-	return false
-}
+func (f genHelperEncoder) EncWriteMapEnd() { f.e.mapEnd() }
+
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperEncoder) EncWriteArrayStart(length int) { f.e.arrayStart(length) }
+
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperEncoder) EncWriteArrayEnd() { f.e.arrayEnd() }
+
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperEncoder) EncWriteArrayElem() { f.e.arrayElem() }
+
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperEncoder) EncWriteMapElemKey() { f.e.mapElemKey() }
+
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperEncoder) EncWriteMapElemValue() { f.e.mapElemValue() }
 
 // ---------------- DECODER FOLLOWS -----------------
 
@@ -277,20 +232,6 @@ func (f genHelperDecoder) DecBinaryUnmarshal(bm encoding.BinaryUnmarshaler) {
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 func (f genHelperDecoder) DecRaw() []byte { return f.d.rawBytes() }
 
-// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
-//
-// Deprecated: builtin no longer supported - so we make this method a no-op,
-// but leave in-place so that old generated files continue to work without regeneration.
-func (f genHelperDecoder) TimeRtidIfBinc() (v uintptr) { return }
-
-// func (f genHelperDecoder) TimeRtidIfBinc() uintptr {
-// 	// Note: builtin is no longer supported - so make this a no-op
-// 	if _, ok := f.d.hh.(*BincHandle); ok {
-// 		return timeTypId
-// 	}
-// 	return 0
-// }
-
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 func (f genHelperDecoder) IsJSONHandle() bool {
 	return f.d.js
@@ -312,32 +253,33 @@ func (f genHelperDecoder) DecExtension(v interface{}, xfFn *extTypeTagFn) {
 }
 
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
-//
-// Deprecated: No longer used,
-// but leave in-place so that old generated files continue to work without regeneration.
-func (f genHelperDecoder) HasExtensions() bool {
-	return len(f.d.h.extHandle) != 0
+func (f genHelperDecoder) DecInferLen(clen, maxlen, unit int) (rvlen int) {
+	return decInferLen(clen, maxlen, unit)
 }
 
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
-//
-// Deprecated: No longer used,
-// but leave in-place so that old generated files continue to work without regeneration.
-func (f genHelperDecoder) DecExt(v interface{}) (r bool) {
-	if xfFn := f.d.h.getExt(i2rtid(v)); xfFn != nil {
-		f.d.d.DecodeExt(v, xfFn.tag, xfFn.ext)
-		return true
-	}
-	return false
-}
+func (f genHelperDecoder) StringView(v []byte) string { return stringView(v) }
 
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
-func (f genHelperDecoder) DecInferLen(clen, maxlen, unit int) (rvlen int) {
-	return decInferLen(clen, maxlen, unit)
-}
+func (f genHelperDecoder) DecReadMapStart() int { return f.d.mapStart() }
 
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
-//
-// Deprecated: no longer used,
-// but leave in-place so that old generated files continue to work without regeneration.
-func (f genHelperDecoder) StringView(v []byte) string { return stringView(v) }
+func (f genHelperDecoder) DecReadMapEnd() { f.d.mapEnd() }
+
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperDecoder) DecReadArrayStart() int { return f.d.arrayStart() }
+
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperDecoder) DecReadArrayEnd() { f.d.arrayEnd() }
+
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperDecoder) DecReadArrayElem() { f.d.arrayElem() }
+
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperDecoder) DecReadMapElemKey() { f.d.mapElemKey() }
+
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperDecoder) DecReadMapElemValue() { f.d.mapElemValue() }
+
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperDecoder) DecDecodeFloat32() float64 { return f.d.decodeFloat32() }

+ 40 - 90
codec/gen-helper.go.tmpl

@@ -47,43 +47,21 @@ type genHelperEncDriver struct {
 	encDriver
 }
 
-func (x genHelperEncDriver) EncodeBuiltin(rt uintptr, v interface{}) {}
-func (x genHelperEncDriver) EncStructFieldKey(keyType valueType, s string) {
-	encStructFieldKey(s, x.encDriver, nil, keyType, false, false)
-}
-func (x genHelperEncDriver) EncodeSymbol(s string) {
-	x.encDriver.EncodeStringEnc(cUTF8, s)
-}
-
 type genHelperDecDriver struct {
 	decDriver
-	C checkOverflow
 }
 
-func (x genHelperDecDriver) DecodeBuiltin(rt uintptr, v interface{}) {}
+{{/*
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (x genHelperEncDriver) EncStructFieldKey(keyType valueType, s string) {
+	encStructFieldKey(s, x.encDriver, nil, keyType, false, false)
+}
+
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 func (x genHelperDecDriver) DecStructFieldKey(keyType valueType, buf *[decScratchByteArrayLen]byte) []byte {
 	return decStructFieldKey(x.decDriver, keyType, buf)
 }
-func (x genHelperDecDriver) DecodeInt(bitsize uint8) (i int64) {
-	return x.C.IntV(x.decDriver.DecodeInt64(), bitsize)
-}
-func (x genHelperDecDriver) DecodeUint(bitsize uint8) (ui uint64) {
-	return x.C.UintV(x.decDriver.DecodeUint64(), bitsize)
-}
-func (x genHelperDecDriver) DecodeFloat(chkOverflow32 bool) (f float64) {
-	f = x.DecodeFloat64()
-	if chkOverflow32 && chkOvf.Float32(f) {
-		panicv.errorf("float32 overflow: %v", f)
-	}
-	return
-}
-func (x genHelperDecDriver) DecodeFloat32As64() (f float64) {
-	f = x.DecodeFloat64()
-	if chkOvf.Float32(f) {
-		panicv.errorf("float32 overflow: %v", f)
-	}
-	return
-}
+*/ -}}
 
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 type genHelperEncoder struct {
@@ -134,16 +112,6 @@ func (f genHelperEncoder) EncBinaryMarshal(iv encoding.BinaryMarshaler) {
 }
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 func (f genHelperEncoder) EncRaw(iv Raw) { f.e.rawBytes(iv) }
-// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
-//
-// Deprecated: builtin no longer supported - so we make this method a no-op, 
-// but leave in-place so that old generated files continue to work without regeneration.
-func (f genHelperEncoder) TimeRtidIfBinc() (v uintptr) { return }
-// func (f genHelperEncoder) TimeRtidIfBinc() uintptr {
-// 	if _, ok := f.e.hh.(*BincHandle); ok {
-// 		return timeTypId
-// 	}
-// }
 
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 func (f genHelperEncoder) I2Rtid(v interface{}) uintptr {
@@ -159,28 +127,25 @@ func (f genHelperEncoder) EncExtension(v interface{}, xfFn *extTypeTagFn) {
 }
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 func (f genHelperEncoder) WriteStr(s string) {
-	f.e.w.writestr(s)
+	f.e.w().writestr(s)
 }
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 func (f genHelperEncoder) BytesView(v string) []byte { return bytesView(v) }
+
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
-//
-// Deprecated: No longer used,
-// but leave in-place so that old generated files continue to work without regeneration.
-func (f genHelperEncoder) HasExtensions() bool {
-	return len(f.e.h.extHandle) != 0
-}
+func (f genHelperEncoder) EncWriteMapStart(length int) { f.e.mapStart(length) }
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
-//
-// Deprecated: No longer used,
-// but leave in-place so that old generated files continue to work without regeneration.
-func (f genHelperEncoder) EncExt(v interface{}) (r bool) {
-	if xfFn := f.e.h.getExt(i2rtid(v)); xfFn != nil {
-		f.e.e.EncodeExt(v, xfFn.tag, xfFn.ext, f.e)
-		return true
-	}
-	return false 
-}
+func (f genHelperEncoder) EncWriteMapEnd() { f.e.mapEnd() }
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperEncoder) EncWriteArrayStart(length int) { f.e.arrayStart(length) }
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperEncoder) EncWriteArrayEnd() { f.e.arrayEnd() }
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperEncoder) EncWriteArrayElem() { f.e.arrayElem() }
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperEncoder) EncWriteMapElemKey() { f.e.mapElemKey() }
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperEncoder) EncWriteMapElemValue() { f.e.mapElemValue() }
 
 // ---------------- DECODER FOLLOWS -----------------
 
@@ -249,18 +214,6 @@ func (f genHelperDecoder) DecBinaryUnmarshal(bm encoding.BinaryUnmarshaler) {
 }
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 func (f genHelperDecoder) DecRaw() []byte {	return f.d.rawBytes() }
-// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
-//
-// Deprecated: builtin no longer supported - so we make this method a no-op, 
-// but leave in-place so that old generated files continue to work without regeneration.
-func (f genHelperDecoder) TimeRtidIfBinc() (v uintptr) { return }
-// func (f genHelperDecoder) TimeRtidIfBinc() uintptr {
-// 	// Note: builtin is no longer supported - so make this a no-op
-// 	if _, ok := f.d.hh.(*BincHandle); ok {
-// 		return timeTypId
-// 	}
-// 	return 0
-// }
 
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 func (f genHelperDecoder) IsJSONHandle() bool {
@@ -279,30 +232,27 @@ func (f genHelperDecoder) DecExtension(v interface{}, xfFn *extTypeTagFn) {
 	f.d.d.DecodeExt(v, xfFn.tag, xfFn.ext)
 }
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
-//
-// Deprecated: No longer used,
-// but leave in-place so that old generated files continue to work without regeneration.
-func (f genHelperDecoder) HasExtensions() bool {
-	return len(f.d.h.extHandle) != 0
-}
-// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
-//
-// Deprecated: No longer used,
-// but leave in-place so that old generated files continue to work without regeneration.
-func (f genHelperDecoder) DecExt(v interface{}) (r bool) {
-	if xfFn := f.d.h.getExt(i2rtid(v)); xfFn != nil {
-		f.d.d.DecodeExt(v, xfFn.tag, xfFn.ext)
-		return true
-	}
-	return false 
-}
-// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 func (f genHelperDecoder) DecInferLen(clen, maxlen, unit int) (rvlen int) {
 	return decInferLen(clen, maxlen, unit)
 }
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
-//
-// Deprecated: no longer used, 
-// but leave in-place so that old generated files continue to work without regeneration.
 func (f genHelperDecoder) StringView(v []byte) string { return stringView(v) }
 
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperDecoder) DecReadMapStart() int { return f.d.mapStart() }
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperDecoder) DecReadMapEnd() { f.d.mapEnd() }
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperDecoder) DecReadArrayStart() int { return f.d.arrayStart() }
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperDecoder) DecReadArrayEnd() { f.d.arrayEnd() }
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperDecoder) DecReadArrayElem() { f.d.arrayElem() }
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperDecoder) DecReadMapElemKey() { f.d.mapElemKey() }
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperDecoder) DecReadMapElemValue() { f.d.mapElemValue() }
+
+// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
+func (f genHelperDecoder) DecDecodeFloat32() float64 { return f.d.decodeFloat32() }
+

+ 36 - 23
codec/gen.generated.go

@@ -9,7 +9,7 @@ package codec
 
 const genDecMapTmpl = `
 {{var "v"}} := *{{ .Varname }}
-{{var "l"}} := r.ReadMapStart()
+{{var "l"}} := z.DecReadMapStart()
 {{var "bh"}} := z.DecBasicHandle()
 if {{var "v"}} == nil {
 	{{var "rl"}} := z.DecInferLen({{var "l"}}, {{var "bh"}}.MaxInitLen, {{ .Size }})
@@ -27,21 +27,29 @@ if {{var "bh"}}.MapValueReset {
 if {{var "l"}} != 0 {
 {{var "hl"}} := {{var "l"}} > 0 
 	for {{var "j"}} := 0; ({{var "hl"}} && {{var "j"}} < {{var "l"}}) || !({{var "hl"}} || r.CheckBreak()); {{var "j"}}++ {
-	r.ReadMapElemKey() {{/* z.DecSendContainerState(codecSelfer_containerMapKey{{ .Sfx }}) */}}
-	{{ $x := printf "%vmk%v" .TempVar .Rand }}{{ decLineVarK $x }}
-{{ if eq .KTyp "interface{}" }}{{/* // special case if a byte array. */}}if {{var "bv"}}, {{var "bok"}} := {{var "mk"}}.([]byte); {{var "bok"}} {
+	z.DecReadMapElemKey() {{/* z.DecSendContainerState(codecSelfer_containerMapKey{{ .Sfx }}) */}}
+	{{ $x := printf "%vmk%v" .TempVar .Rand }}{{ decLineVarK $x -}}
+	{{ if eq .KTyp "interface{}" }}{{/* // special case if a byte array. */ -}}
+    if {{var "bv"}}, {{var "bok"}} := {{var "mk"}}.([]byte); {{var "bok"}} {
 		{{var "mk"}} = string({{var "bv"}})
-	}{{ end }}{{if decElemKindPtr}}
-	{{var "ms"}} = true{{end}}
+	}
+    {{ end -}}
+    {{if decElemKindPtr -}}
+	{{var "ms"}} = true
+    {{end -}}
 	if {{var "mg"}} {
-		{{if decElemKindPtr}}{{var "mv"}}, {{var "mok"}} = {{var "v"}}[{{var "mk"}}] 
+		{{if decElemKindPtr -}}
+        {{var "mv"}}, {{var "mok"}} = {{var "v"}}[{{var "mk"}}] 
 		if {{var "mok"}} {
 			{{var "ms"}} = false
-		} {{else}}{{var "mv"}} = {{var "v"}}[{{var "mk"}}] {{end}}
+		}
+        {{else -}}
+        {{var "mv"}} = {{var "v"}}[{{var "mk"}}]
+        {{end -}}
 	} {{if not decElemKindImmutable}}else { {{var "mv"}} = {{decElemZero}} }{{end}}
-	r.ReadMapElemValue() {{/* z.DecSendContainerState(codecSelfer_containerMapValue{{ .Sfx }}) */}}
+	z.DecReadMapElemValue() {{/* z.DecSendContainerState(codecSelfer_containerMapValue{{ .Sfx }}) */}}
 	{{var "mdn"}} = false
-	{{ $x := printf "%vmv%v" .TempVar .Rand }}{{ $y := printf "%vmdn%v" .TempVar .Rand }}{{ decLineVar $x $y }}
+	{{ $x := printf "%vmv%v" .TempVar .Rand }}{{ $y := printf "%vmdn%v" .TempVar .Rand }}{{ decLineVar $x $y -}}
 	if {{var "mdn"}} {
 		if {{ var "bh" }}.DeleteOnNilMapValue { delete({{var "v"}}, {{var "mk"}}) } else { {{var "v"}}[{{var "mk"}}] = {{decElemZero}} }
 	} else if {{if decElemKindPtr}} {{var "ms"}} && {{end}} {{var "v"}} != nil {
@@ -49,7 +57,7 @@ if {{var "l"}} != 0 {
 	}
 }
 } // else len==0: TODO: Should we clear map entries?
-r.ReadMapEnd() {{/* z.DecSendContainerState(codecSelfer_containerMapEnd{{ .Sfx }}) */}}
+z.DecReadMapEnd() {{/* z.DecSendContainerState(codecSelfer_containerMapEnd{{ .Sfx }}) */}}
 `
 
 const genDecListTmpl = `
@@ -67,7 +75,8 @@ if {{var "l"}} == 0 {
 	} {{else if isChan }}if {{var "v"}} == nil {
 		{{var "v"}} = make({{ .CTyp }}, 0)
 		{{var "c"}} = true
-	} {{end}}
+	}
+    {{end -}}
 } else {
 	{{var "hl"}} := {{var "l"}} > 0
 	var {{var "rl"}} int
@@ -85,9 +94,10 @@ if {{var "l"}} == 0 {
 		{{var "v"}} = {{var "v"}}[:{{var "l"}}]
 		{{var "c"}} = true
 	}
-	} {{end}}
+	}
+    {{end -}}
 	var {{var "j"}} int 
-    // var {{var "dn"}} bool 
+    {{/* // var {{var "dn"}} bool */ -}}
 	for {{var "j"}} = 0; ({{var "hl"}} && {{var "j"}} < {{var "l"}}) || !({{var "hl"}} || r.CheckBreak()); {{var "j"}}++ { // bounds-check-elimination
 		{{if not isArray}} if {{var "j"}} == 0 && {{var "v"}} == nil {
 			if {{var "hl"}} {
@@ -97,27 +107,28 @@ if {{var "l"}} == 0 {
 			}
 			{{var "v"}} = make({{if isSlice}}[]{{ .Typ }}{{else if isChan}}{{.CTyp}}{{end}}, {{var "rl"}})
 			{{var "c"}} = true 
-		}{{end}}
+		}
+        {{end -}}
 		{{var "h"}}.ElemContainerState({{var "j"}})
-        {{/* {{var "dn"}} = r.TryDecodeAsNil() */}}{{/* commented out, as decLineVar handles this already each time */}}
+        {{/* {{var "dn"}} = r.TryDecodeAsNil() */}}{{/* commented out, as decLineVar handles this already each time */ -}}
         {{if isChan}}{{ $x := printf "%[1]vvcx%[2]v" .TempVar .Rand }}var {{$x}} {{ .Typ }}
-		{{ decLineVar $x }}
+		{{ decLineVar $x -}}
 		{{var "v"}} <- {{ $x }}
         // println(">>>> sending ", {{ $x }}, " into ", {{var "v"}}) // TODO: remove this
-        {{else}}{{/* // if indefinite, etc, then expand the slice if necessary */}}
+        {{else}}{{/* // if indefinite, etc, then expand the slice if necessary */ -}}
 		var {{var "db"}} bool
 		if {{var "j"}} >= len({{var "v"}}) {
 			{{if isSlice }} {{var "v"}} = append({{var "v"}}, {{ zero }})
 			{{var "c"}} = true
 			{{else}} z.DecArrayCannotExpand(len(v), {{var "j"}}+1); {{var "db"}} = true
-			{{end}}
+			{{end -}}
 		}
 		if {{var "db"}} {
 			z.DecSwallow()
 		} else {
-			{{ $x := printf "%[1]vv%[2]v[%[1]vj%[2]v]" .TempVar .Rand }}{{ decLineVar $x }}
+			{{ $x := printf "%[1]vv%[2]v[%[1]vj%[2]v]" .TempVar .Rand }}{{ decLineVar $x -}}
 		}
-        {{end}}
+        {{end -}}
 	}
 	{{if isSlice}} if {{var "j"}} < len({{var "v"}}) {
 		{{var "v"}} = {{var "v"}}[:{{var "j"}}]
@@ -125,12 +136,14 @@ if {{var "l"}} == 0 {
 	} else if {{var "j"}} == 0 && {{var "v"}} == nil {
 		{{var "v"}} = make([]{{ .Typ }}, 0)
 		{{var "c"}} = true
-	} {{end}}
+	}
+    {{end -}}
 }
 {{var "h"}}.End()
 {{if not isArray }}if {{var "c"}} { 
 	*{{ .Varname }} = {{var "v"}}
-}{{end}}
+}
+{{end -}}
 `
 
 const genEncChanTmpl = `

+ 17 - 7
codec/helper.go

@@ -265,11 +265,11 @@ type containerState uint8
 const (
 	_ containerState = iota
 
-	containerMapStart // slot left open, since Driver method already covers it
+	containerMapStart
 	containerMapKey
 	containerMapValue
 	containerMapEnd
-	containerArrayStart // slot left open, since Driver methods already cover it
+	containerArrayStart
 	containerArrayElem
 	containerArrayEnd
 )
@@ -1004,11 +1004,6 @@ func (x addExtWrapper) UpdateExt(dest interface{}, v interface{}) {
 	x.ReadExt(dest, v.([]byte))
 }
 
-type extWrapper struct {
-	BytesExt
-	InterfaceExt
-}
-
 type bytesExtFailer struct{}
 
 func (bytesExtFailer) WriteExt(v interface{}) []byte {
@@ -1029,6 +1024,21 @@ func (interfaceExtFailer) UpdateExt(dest interface{}, v interface{}) {
 	panicv.errorstr("InterfaceExt.UpdateExt is not supported")
 }
 
+// type extWrapper struct {
+// 	BytesExt
+// 	InterfaceExt
+// }
+
+type bytesExtWrapper struct {
+	interfaceExtFailer
+	BytesExt
+}
+
+type interfaceExtWrapper struct {
+	bytesExtFailer
+	InterfaceExt
+}
+
 type binaryEncodingType struct{}
 
 func (binaryEncodingType) isBinary() bool { return true }

+ 1 - 5
codec/helper_not_unsafe.go

@@ -182,11 +182,7 @@ func (d *Decoder) kTime(f *codecFnInfo, rv reflect.Value) {
 }
 
 func (d *Decoder) kFloat32(f *codecFnInfo, rv reflect.Value) {
-	fv := d.d.DecodeFloat64()
-	if chkOvf.Float32(fv) {
-		d.errorf("float32 overflow: %v", fv)
-	}
-	rv.SetFloat(fv)
+	rv.SetFloat(d.decodeFloat32())
 }
 
 func (d *Decoder) kFloat64(f *codecFnInfo, rv reflect.Value) {

+ 1 - 5
codec/helper_unsafe.go

@@ -326,12 +326,8 @@ func (d *Decoder) kTime(f *codecFnInfo, rv reflect.Value) {
 }
 
 func (d *Decoder) kFloat32(f *codecFnInfo, rv reflect.Value) {
-	fv := d.d.DecodeFloat64()
-	if chkOvf.Float32(fv) {
-		d.errorf("float32 overflow: %v", fv)
-	}
 	urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
-	*(*float32)(urv.ptr) = float32(fv)
+	*(*float32)(urv.ptr) = float32(d.decodeFloat32())
 }
 
 func (d *Decoder) kFloat64(f *codecFnInfo, rv reflect.Value) {

+ 184 - 160
codec/json.go

@@ -122,48 +122,38 @@ func init() {
 
 type jsonEncDriverTypical struct {
 	jsonEncDriver
-	_ uint64 // padding
 }
 
-func (e *jsonEncDriverTypical) typical() {}
-
 func (e *jsonEncDriverTypical) WriteArrayStart(length int) {
 	e.w.writen1('[')
-	e.c = containerArrayStart
 }
 
 func (e *jsonEncDriverTypical) WriteArrayElem() {
-	if e.c != containerArrayStart {
+	if e.e.c != containerArrayStart {
 		e.w.writen1(',')
 	}
-	e.c = containerArrayElem
 }
 
 func (e *jsonEncDriverTypical) WriteArrayEnd() {
 	e.w.writen1(']')
-	e.c = containerArrayEnd
 }
 
 func (e *jsonEncDriverTypical) WriteMapStart(length int) {
 	e.w.writen1('{')
-	e.c = containerMapStart
 }
 
 func (e *jsonEncDriverTypical) WriteMapElemKey() {
-	if e.c != containerMapStart {
+	if e.e.c != containerMapStart {
 		e.w.writen1(',')
 	}
-	e.c = containerMapKey
 }
 
 func (e *jsonEncDriverTypical) WriteMapElemValue() {
 	e.w.writen1(':')
-	e.c = containerMapValue
 }
 
 func (e *jsonEncDriverTypical) WriteMapEnd() {
 	e.w.writen1('}')
-	e.c = containerMapEnd
 }
 
 func (e *jsonEncDriverTypical) EncodeBool(b bool) {
@@ -175,8 +165,7 @@ func (e *jsonEncDriverTypical) EncodeBool(b bool) {
 }
 
 func (e *jsonEncDriverTypical) EncodeFloat64(f float64) {
-	fmt, prec := jsonFloatStrconvFmtPrec(f)
-	e.w.writeb(strconv.AppendFloat(e.b[:0], f, fmt, prec, 64))
+	e.encodeFloat(f, 64)
 }
 
 func (e *jsonEncDriverTypical) EncodeInt(v int64) {
@@ -188,7 +177,12 @@ func (e *jsonEncDriverTypical) EncodeUint(v uint64) {
 }
 
 func (e *jsonEncDriverTypical) EncodeFloat32(f float32) {
-	e.EncodeFloat64(float64(f))
+	e.encodeFloat(float64(f), 32)
+}
+
+func (e *jsonEncDriverTypical) encodeFloat(f float64, bitsize uint8) {
+	fmt, prec := jsonFloatStrconvFmtPrec(f)
+	e.w.writeb(strconv.AppendFloat(e.b[:0], f, fmt, prec, int(bitsize)))
 }
 
 // func (e *jsonEncDriverTypical) atEndOfEncode() {
@@ -201,15 +195,6 @@ func (e *jsonEncDriverTypical) EncodeFloat32(f float32) {
 
 type jsonEncDriverGeneric struct {
 	jsonEncDriver
-	// ds string // indent string
-	di int8   // indent per
-	d  bool   // indenting?
-	dt bool   // indent using tabs
-	dl uint16 // indent level
-	ks bool   // map key as string
-	is byte   // integer as string
-	// _  byte      // padding
-	// _  [2]uint64 // padding
 }
 
 // indent is done as below:
@@ -219,15 +204,19 @@ type jsonEncDriverGeneric struct {
 
 func (e *jsonEncDriverGeneric) reset() {
 	e.jsonEncDriver.reset()
-	e.d, e.dt, e.dl, e.di = false, false, 0, 0
-	if e.h.Indent > 0 {
+	e.d, e.dl, e.di = false, 0, 0
+	if e.h.Indent != 0 {
 		e.d = true
 		e.di = int8(e.h.Indent)
-	} else if e.h.Indent < 0 {
-		e.d = true
-		e.dt = true
-		e.di = int8(-e.h.Indent)
 	}
+	// if e.h.Indent > 0 {
+	// 	e.d = true
+	// 	e.di = int8(e.h.Indent)
+	// } else if e.h.Indent < 0 {
+	// 	e.d = true
+	// 	// e.dt = true
+	// 	e.di = int8(-e.h.Indent)
+	// }
 	e.ks = e.h.MapKeyAsString
 	e.is = e.h.IntegerAsString
 }
@@ -237,28 +226,14 @@ func (e *jsonEncDriverGeneric) WriteArrayStart(length int) {
 		e.dl++
 	}
 	e.w.writen1('[')
-	e.c = containerArrayStart
-}
-
-func (e *jsonEncDriverGeneric) WriteArrayElem() {
-	if e.c != containerArrayStart {
-		e.w.writen1(',')
-	}
-	if e.d {
-		e.writeIndent()
-	}
-	e.c = containerArrayElem
 }
 
 func (e *jsonEncDriverGeneric) WriteArrayEnd() {
 	if e.d {
 		e.dl--
-		if e.c != containerArrayStart {
-			e.writeIndent()
-		}
+		e.writeIndent()
 	}
 	e.w.writen1(']')
-	e.c = containerArrayEnd
 }
 
 func (e *jsonEncDriverGeneric) WriteMapStart(length int) {
@@ -266,59 +241,20 @@ func (e *jsonEncDriverGeneric) WriteMapStart(length int) {
 		e.dl++
 	}
 	e.w.writen1('{')
-	e.c = containerMapStart
-}
-
-func (e *jsonEncDriverGeneric) WriteMapElemKey() {
-	if e.c != containerMapStart {
-		e.w.writen1(',')
-	}
-	if e.d {
-		e.writeIndent()
-	}
-	e.c = containerMapKey
-}
-
-func (e *jsonEncDriverGeneric) WriteMapElemValue() {
-	if e.d {
-		e.w.writen2(':', ' ')
-	} else {
-		e.w.writen1(':')
-	}
-	e.c = containerMapValue
 }
 
 func (e *jsonEncDriverGeneric) WriteMapEnd() {
 	if e.d {
 		e.dl--
-		if e.c != containerMapStart {
+		if e.e.c != containerMapStart {
 			e.writeIndent()
 		}
 	}
 	e.w.writen1('}')
-	e.c = containerMapEnd
-}
-
-func (e *jsonEncDriverGeneric) writeIndent() {
-	e.w.writen1('\n')
-	x := int(e.di) * int(e.dl)
-	if e.dt {
-		for x > jsonSpacesOrTabsLen {
-			e.w.writeb(jsonTabs[:])
-			x -= jsonSpacesOrTabsLen
-		}
-		e.w.writeb(jsonTabs[:x])
-	} else {
-		for x > jsonSpacesOrTabsLen {
-			e.w.writeb(jsonSpaces[:])
-			x -= jsonSpacesOrTabsLen
-		}
-		e.w.writeb(jsonSpaces[:x])
-	}
 }
 
 func (e *jsonEncDriverGeneric) EncodeBool(b bool) {
-	if e.ks && e.c == containerMapKey {
+	if e.ks && e.e.c == containerMapKey {
 		if b {
 			e.w.writeb(jsonLiterals[jsonLitTrueQ : jsonLitTrueQ+6])
 		} else {
@@ -338,7 +274,7 @@ func (e *jsonEncDriverGeneric) EncodeFloat64(f float64) {
 	fmt, prec := jsonFloatStrconvFmtPrec(f)
 
 	var blen int
-	if e.ks && e.c == containerMapKey {
+	if e.ks && e.e.c == containerMapKey {
 		blen = 2 + len(strconv.AppendFloat(e.b[1:1], f, fmt, prec, 64))
 		e.b[0] = '"'
 		e.b[blen-1] = '"'
@@ -350,7 +286,7 @@ func (e *jsonEncDriverGeneric) EncodeFloat64(f float64) {
 
 func (e *jsonEncDriverGeneric) EncodeInt(v int64) {
 	x := e.is
-	if x == 'A' || x == 'L' && (v > 1<<53 || v < -(1<<53)) || (e.ks && e.c == containerMapKey) {
+	if x == 'A' || x == 'L' && (v > 1<<53 || v < -(1<<53)) || (e.ks && e.e.c == containerMapKey) {
 		blen := 2 + len(strconv.AppendInt(e.b[1:1], v, 10))
 		e.b[0] = '"'
 		e.b[blen-1] = '"'
@@ -362,7 +298,7 @@ func (e *jsonEncDriverGeneric) EncodeInt(v int64) {
 
 func (e *jsonEncDriverGeneric) EncodeUint(v uint64) {
 	x := e.is
-	if x == 'A' || x == 'L' && v > 1<<53 || (e.ks && e.c == containerMapKey) {
+	if x == 'A' || x == 'L' && v > 1<<53 || (e.ks && e.e.c == containerMapKey) {
 		blen := 2 + len(strconv.AppendUint(e.b[1:1], v, 10))
 		e.b[0] = '"'
 		e.b[blen-1] = '"'
@@ -394,16 +330,76 @@ func (e *jsonEncDriverGeneric) EncodeFloat32(f float32) {
 
 type jsonEncDriver struct {
 	noBuiltInTypes
+	w *encWriterSwitch
+	e *Encoder
+	h *JsonHandle
+
 	bs []byte // for encoding strings
-	// scratch: encode time, numbers, etc. Note: leave 1 byte for containerState
-	b [jsonScratchArrayLen - 24 - 1]byte // leave space for bs(len,cap), containerState
-	c containerState
-	// _ [2]uint64                 // padding
+	se interfaceExtWrapper
+
 	// ---- cpu cache line boundary?
-	e  *Encoder
-	h  *JsonHandle
-	w  *encWriterSwitch
-	se extWrapper
+	// ds string // indent string
+	di int8 // indent per: if negative, use tabs
+	d  bool // indenting?
+	// dt bool   // indent using tabs
+	dl uint16 // indent level
+	ks bool   // map key as string
+	is byte   // integer as string
+
+	s *bitset256 // safe set for characters (taking h.HTMLAsIs into consideration)
+	// scratch: encode time, numbers, etc. Note: leave 1 byte for containerState
+	b [jsonScratchArrayLen - 16]byte // leave space for bs(len,cap), containerState
+}
+
+// Keep writeIndent, WriteArrayElem, WriteMapElemKey, WriteMapElemValue
+// in jsonEncDriver, so that *Encoder can directly call them
+
+func (e *jsonEncDriver) getJsonEncDriver() *jsonEncDriver { return e }
+
+func (e *jsonEncDriver) writeIndent() {
+	e.w.writen1('\n')
+	x := int(e.di) * int(e.dl)
+	if e.di < 0 {
+		x = -x
+		for x > jsonSpacesOrTabsLen {
+			e.w.writeb(jsonTabs[:])
+			x -= jsonSpacesOrTabsLen
+		}
+		e.w.writeb(jsonTabs[:x])
+	} else {
+		for x > jsonSpacesOrTabsLen {
+			e.w.writeb(jsonSpaces[:])
+			x -= jsonSpacesOrTabsLen
+		}
+		e.w.writeb(jsonSpaces[:x])
+	}
+}
+
+func (e *jsonEncDriver) WriteArrayElem() {
+	// xdebugf("WriteArrayElem: e.e.c: %d", e.e.c)
+	if e.e.c != containerArrayStart {
+		e.w.writen1(',')
+	}
+	if e.d {
+		e.writeIndent()
+	}
+}
+
+func (e *jsonEncDriver) WriteMapElemKey() {
+	if e.e.c != containerMapStart {
+		e.w.writen1(',')
+	}
+	if e.d {
+		e.writeIndent()
+	}
+}
+
+func (e *jsonEncDriver) WriteMapElemValue() {
+	if e.d {
+		e.w.writen2(':', ' ')
+	} else {
+		e.w.writen1(':')
+	}
 }
 
 func (e *jsonEncDriver) EncodeNil() {
@@ -412,7 +408,7 @@ func (e *jsonEncDriver) EncodeNil() {
 	// ie if initial token is n.
 	e.w.writeb(jsonLiterals[jsonLitNull : jsonLitNull+4])
 
-	// if e.h.MapKeyAsString && e.c == containerMapKey {
+	// if e.h.MapKeyAsString && e.e.c == containerMapKey {
 	// 	e.w.writeb(jsonLiterals[jsonLitNullQ : jsonLitNullQ+6])
 	// } else {
 	// 	e.w.writeb(jsonLiterals[jsonLitNull : jsonLitNull+4])
@@ -485,19 +481,22 @@ func (e *jsonEncDriver) quoteStr(s string) {
 	// adapted from std pkg encoding/json
 	const hex = "0123456789abcdef"
 	w := e.w
-	htmlasis := e.h.HTMLCharsAsIs
 	w.writen1('"')
 	var start int
-	for i, slen := 0, len(s); i < slen; {
+	for i := 0; i < len(s); {
 		// encode all bytes < 0x20 (except \r, \n).
 		// also encode < > & to prevent security holes when served to some browsers.
+
+		// We optimize for ascii, by assumining that most characters are in the BMP
+		// and natively consumed by json without much computation.
+
+		// if 0x20 <= b && b != '\\' && b != '"' && b != '<' && b != '>' && b != '&' {
+		// if (htmlasis && jsonCharSafeSet.isset(b)) || jsonCharHtmlSafeSet.isset(b) {
+		if e.s.isset(s[i]) {
+			i++
+			continue
+		}
 		if b := s[i]; b < utf8.RuneSelf {
-			// if 0x20 <= b && b != '\\' && b != '"' && b != '<' && b != '>' && b != '&' {
-			// if (htmlasis && jsonCharSafeSet.isset(b)) || jsonCharHtmlSafeSet.isset(b) {
-			if jsonCharHtmlSafeSet.isset(b) || (htmlasis && jsonCharSafeSet.isset(b)) {
-				i++
-				continue
-			}
 			if start < i {
 				w.writestr(s[start:i])
 			}
@@ -523,13 +522,15 @@ func (e *jsonEncDriver) quoteStr(s string) {
 			continue
 		}
 		c, size := utf8.DecodeRuneInString(s[i:])
-		if c == utf8.RuneError && size == 1 {
-			if start < i {
-				w.writestr(s[start:i])
+		if c == utf8.RuneError {
+			if size == 1 {
+				if start < i {
+					w.writestr(s[start:i])
+				}
+				w.writestr(`\ufffd`)
+				i++
+				start = i
 			}
-			w.writestr(`\ufffd`)
-			i += size
-			start = i
 			continue
 		}
 		// U+2028 is LINE SEPARATOR. U+2029 is PARAGRAPH SEPARATOR.
@@ -553,20 +554,19 @@ func (e *jsonEncDriver) quoteStr(s string) {
 }
 
 func (e *jsonEncDriver) atEndOfEncode() {
-	// if e.c == 0 { // scalar written, output space
+	// if e.e.c == 0 { // scalar written, output space
 	// 	e.w.writen1(' ')
 	// } else if e.h.TermWhitespace { // container written, output new-line
 	// 	e.w.writen1('\n')
 	// }
 	if e.h.TermWhitespace {
-		if e.c == 0 { // scalar written, output space
+		if e.e.c == 0 { // scalar written, output space
 			e.w.writen1(' ')
 		} else { // container written, output new-line
 			e.w.writen1('\n')
 		}
 	}
 
-	// e.c = 0
 }
 
 type jsonDecDriver struct {
@@ -574,18 +574,18 @@ type jsonDecDriver struct {
 	d  *Decoder
 	h  *JsonHandle
 	r  *decReaderSwitch
-	se extWrapper
+	se interfaceExtWrapper
 
+	bs []byte // scratch, initialized from b. For parsing strings or numbers.
 	// ---- writable fields during execution --- *try* to keep in sep cache line
 
 	// ---- cpu cache line boundary?
-	bs []byte                         // scratch, initialized from b. For parsing strings or numbers.
-	b  [jsonScratchArrayLen - 16]byte // scratch 1, used for parsing strings or numbers or time.Time
+	b [jsonScratchArrayLen]byte // scratch 1, used for parsing strings or numbers or time.Time
 	// ---- cpu cache line boundary?
-	c     containerState
+	// c     containerState
 	tok   uint8                         // used to store the token read right after skipWhiteSpace
 	fnull bool                          // found null from appendStringAsBytes
-	_     byte                          // padding
+	_     [2]byte                       // padding
 	bstr  [4]byte                       // scratch used for string \UXXX parsing
 	b2    [jsonScratchArrayLen - 8]byte // scratch 2, used only for readUntil, decNumBytes
 
@@ -614,7 +614,6 @@ func (d *jsonDecDriver) ReadMapStart() int {
 		d.d.errorf("read map - expect char '%c' but got char '%c'", xc, d.tok)
 	}
 	d.tok = 0
-	d.c = containerMapStart
 	return -1
 }
 
@@ -627,7 +626,6 @@ func (d *jsonDecDriver) ReadArrayStart() int {
 		d.d.errorf("read array - expect char '%c' but got char '%c'", xc, d.tok)
 	}
 	d.tok = 0
-	d.c = containerArrayStart
 	return -1
 }
 
@@ -641,7 +639,7 @@ func (d *jsonDecDriver) CheckBreak() bool {
 // For the ReadXXX methods below, we could just delegate to helper functions
 // readContainerState(c containerState, xc uint8, check bool)
 // - ReadArrayElem would become:
-//   readContainerState(containerArrayElem, ',', d.c != containerArrayStart)
+//   readContainerState(containerArrayElem, ',', d.d.c != containerArrayStart)
 //
 // However, until mid-stack inlining comes in go1.11 which supports inlining of
 // one-liners, we explicitly write them all 5 out to elide the extra func call.
@@ -653,13 +651,13 @@ func (d *jsonDecDriver) ReadArrayElem() {
 	if d.tok == 0 {
 		d.tok = d.r.skip(&jsonCharWhitespaceSet)
 	}
-	if d.c != containerArrayStart {
+	// xdebugf("ReadArrayElem: d.d.c: %d, token: %c", d.d.c, d.tok)
+	if d.d.c != containerArrayStart {
 		if d.tok != xc {
 			d.d.errorf("read array element - expect char '%c' but got char '%c'", xc, d.tok)
 		}
 		d.tok = 0
 	}
-	d.c = containerArrayElem
 }
 
 func (d *jsonDecDriver) ReadArrayEnd() {
@@ -671,7 +669,6 @@ func (d *jsonDecDriver) ReadArrayEnd() {
 		d.d.errorf("read array end - expect char '%c' but got char '%c'", xc, d.tok)
 	}
 	d.tok = 0
-	d.c = containerArrayEnd
 }
 
 func (d *jsonDecDriver) ReadMapElemKey() {
@@ -679,13 +676,12 @@ func (d *jsonDecDriver) ReadMapElemKey() {
 	if d.tok == 0 {
 		d.tok = d.r.skip(&jsonCharWhitespaceSet)
 	}
-	if d.c != containerMapStart {
+	if d.d.c != containerMapStart {
 		if d.tok != xc {
 			d.d.errorf("read map key - expect char '%c' but got char '%c'", xc, d.tok)
 		}
 		d.tok = 0
 	}
-	d.c = containerMapKey
 }
 
 func (d *jsonDecDriver) ReadMapElemValue() {
@@ -697,7 +693,6 @@ func (d *jsonDecDriver) ReadMapElemValue() {
 		d.d.errorf("read map value - expect char '%c' but got char '%c'", xc, d.tok)
 	}
 	d.tok = 0
-	d.c = containerMapValue
 }
 
 func (d *jsonDecDriver) ReadMapEnd() {
@@ -709,7 +704,6 @@ func (d *jsonDecDriver) ReadMapEnd() {
 		d.d.errorf("read map end - expect char '%c' but got char '%c'", xc, d.tok)
 	}
 	d.tok = 0
-	d.c = containerMapEnd
 }
 
 // func (d *jsonDecDriver) readLit(length, fromIdx uint8) {
@@ -762,7 +756,7 @@ func (d *jsonDecDriver) DecodeBool() (v bool) {
 	if d.tok == 0 {
 		d.tok = d.r.skip(&jsonCharWhitespaceSet)
 	}
-	fquot := d.c == containerMapKey && d.tok == '"'
+	fquot := d.d.c == containerMapKey && d.tok == '"'
 	if fquot {
 		d.tok = d.r.readn1()
 	}
@@ -902,18 +896,26 @@ func (d *jsonDecDriver) decUint64ViaFloat(s string) (u uint64) {
 	return uint64(fi)
 }
 
-func (d *jsonDecDriver) DecodeFloat64() (f float64) {
+func (d *jsonDecDriver) decodeFloat(bitsize uint8) (f float64) {
 	bs := d.decNumBytes()
 	if len(bs) == 0 {
 		return
 	}
-	f, err := strconv.ParseFloat(stringView(bs), 64)
+	f, err := strconv.ParseFloat(stringView(bs), int(bitsize))
 	if err != nil {
 		d.d.errorv(err)
 	}
 	return
 }
 
+func (d *jsonDecDriver) DecodeFloat64() (f float64) {
+	return d.decodeFloat(64)
+}
+
+func (d *jsonDecDriver) DecodeFloat32() (f float64) {
+	return d.decodeFloat(32)
+}
+
 func (d *jsonDecDriver) DecodeExt(rv interface{}, xtag uint64, ext Ext) (realxtag uint64) {
 	if ext == nil {
 		re := rv.(*RawExt)
@@ -1177,7 +1179,7 @@ F:
 
 func (d *jsonDecDriver) bsToString() string {
 	// if x := d.s.sc; x != nil && x.so && x.st == '}' { // map key
-	if jsonAlwaysReturnInternString || d.c == containerMapKey {
+	if jsonAlwaysReturnInternString || d.d.c == containerMapKey {
 		return d.d.string(d.bs)
 	}
 	return string(d.bs)
@@ -1209,7 +1211,7 @@ func (d *jsonDecDriver) DecodeNaked() {
 	case '"':
 		// if a string, and MapKeyAsString, then try to decode it as a nil, bool or number first
 		d.appendStringAsBytes()
-		if len(d.bs) > 0 && d.c == containerMapKey && d.h.MapKeyAsString {
+		if len(d.bs) > 0 && d.d.c == containerMapKey && d.h.MapKeyAsString {
 			switch stringView(d.bs) {
 			case "null":
 				z.v = valueTypeNil
@@ -1334,18 +1336,14 @@ func (h *JsonHandle) typical() bool {
 	return h.Indent == 0 && !h.MapKeyAsString && h.IntegerAsString != 'A' && h.IntegerAsString != 'L'
 }
 
-type jsonTypical interface {
-	typical()
-}
-
 func (h *JsonHandle) recreateEncDriver(ed encDriver) (v bool) {
-	_, v = ed.(jsonTypical)
+	_, v = ed.(*jsonEncDriverTypical)
 	return v != h.typical()
 }
 
 // SetInterfaceExt sets an extension
 func (h *JsonHandle) SetInterfaceExt(rt reflect.Type, tag uint64, ext InterfaceExt) (err error) {
-	return h.SetExt(rt, tag, &extWrapper{bytesExtFailer{}, ext})
+	return h.SetExt(rt, tag, &interfaceExtWrapper{InterfaceExt: ext})
 }
 
 func (h *JsonHandle) newEncDriver(e *Encoder) (ee encDriver) {
@@ -1360,7 +1358,6 @@ func (h *JsonHandle) newEncDriver(e *Encoder) (ee encDriver) {
 		hd = &v.jsonEncDriver
 	}
 	hd.e, hd.h, hd.bs = e, h, hd.b[:0]
-	hd.se.BytesExt = bytesExtFailer{}
 	ee.reset()
 	return
 }
@@ -1368,51 +1365,72 @@ func (h *JsonHandle) newEncDriver(e *Encoder) (ee encDriver) {
 func (h *JsonHandle) newDecDriver(d *Decoder) decDriver {
 	// d := jsonDecDriver{r: r.(*bytesDecReader), h: h}
 	hd := jsonDecDriver{d: d, h: h}
-	hd.se.BytesExt = bytesExtFailer{}
 	hd.bs = hd.b[:0]
 	hd.reset()
 	return &hd
 }
 
 func (e *jsonEncDriver) reset() {
-	e.w = e.e.w
+	e.w = e.e.w()
+	// (htmlasis && jsonCharSafeSet.isset(b)) || jsonCharHtmlSafeSet.isset(b)
+	if e.h.HTMLCharsAsIs {
+		e.s = &jsonCharSafeSet
+	} else {
+		e.s = &jsonCharHtmlSafeSet
+	}
 	e.se.InterfaceExt = e.h.RawBytesExt
 	if e.bs == nil {
 		e.bs = e.b[:0]
 	} else {
 		e.bs = e.bs[:0]
 	}
-	e.c = 0
 }
 
 func (d *jsonDecDriver) reset() {
-	d.r = d.d.r
+	d.r = d.d.r()
 	d.se.InterfaceExt = d.h.RawBytesExt
 	if d.bs != nil {
 		d.bs = d.bs[:0]
 	}
-	d.c, d.tok = 0, 0
+	d.tok = 0
 	// d.n.reset()
 }
 
 func jsonFloatStrconvFmtPrec(f float64) (fmt byte, prec int) {
-	prec = -1
+	// set prec to 1 iff mod is 0.
+	//     better than using jsonIsFloatBytesB2 to check if a . or E in the float bytes.
+	// this ensures that every float has an e or .0 in it.
+
 	var abs = math.Abs(f)
-	if abs != 0 && (abs < 1e-6 || abs >= 1e21) {
+	if abs == 0 || abs == 1 {
+		fmt = 'f'
+		prec = 1
+	} else if abs < 1e-6 || abs >= 1e21 {
 		fmt = 'e'
+		prec = -1
+	} else if abs < 0 {
+		fmt = 'f'
+		prec = -1
+	} else if _, mod := math.Modf(abs); mod == 0 {
+		fmt = 'f'
+		prec = 1
 	} else {
 		fmt = 'f'
-		// set prec to 1 iff mod is 0.
-		//     better than using jsonIsFloatBytesB2 to check if a . or E in the float bytes.
-		// this ensures that every float has an e or .0 in it.
-		if abs <= 1 {
-			if abs == 0 || abs == 1 {
-				prec = 1
-			}
-		} else if _, mod := math.Modf(abs); mod == 0 {
-			prec = 1
-		}
+		prec = -1
 	}
+
+	// prec = -1
+	// if abs != 0 && (abs < 1e-6 || abs >= 1e21) {
+	// 	fmt = 'e'
+	// } else {
+	// 	fmt = 'f'
+	// 	if abs == 0 || abs == 1 {
+	// 		prec = 1
+	// 	} else if abs < 0 {
+	// 	} else if _, mod := math.Modf(abs); mod == 0 {
+	// 		prec = 1
+	// 	}
+	// }
 	return
 }
 
@@ -1457,7 +1475,13 @@ func jsonParseInteger(s []byte) (n uint64, neg, badSyntax, overflow bool) {
 	return
 }
 
+var _ decDriverContainerTracker = (*jsonDecDriver)(nil)
+var _ encDriverContainerTracker = (*jsonEncDriver)(nil)
 var _ decDriver = (*jsonDecDriver)(nil)
 var _ encDriver = (*jsonEncDriverGeneric)(nil)
 var _ encDriver = (*jsonEncDriverTypical)(nil)
-var _ jsonTypical = (*jsonEncDriverTypical)(nil)
+var _ (interface{ getJsonEncDriver() *jsonEncDriver }) = (*jsonEncDriverTypical)(nil)
+var _ (interface{ getJsonEncDriver() *jsonEncDriver }) = (*jsonEncDriverGeneric)(nil)
+var _ (interface{ getJsonEncDriver() *jsonEncDriver }) = (*jsonEncDriver)(nil)
+
+// var _ encDriver = (*jsonEncDriver)(nil)

A különbségek nem kerülnek megjelenítésre, a fájl túl nagy
+ 2214 - 6447
codec/mammoth2_codecgen_generated_test.go


+ 148 - 154
codec/mammoth2_generated_test.go

@@ -32,6 +32,8 @@ type TestMammoth2 struct {
 	FptrIntf    *interface{}
 	FString     string
 	FptrString  *string
+	FBytes      []byte
+	FptrBytes   *[]byte
 	FFloat32    float32
 	FptrFloat32 *float32
 	FFloat64    float64
@@ -65,6 +67,8 @@ type TestMammoth2 struct {
 	FptrSliceIntf    *[]interface{}
 	FSliceString     []string
 	FptrSliceString  *[]string
+	FSliceBytes      [][]byte
+	FptrSliceBytes   *[][]byte
 	FSliceFloat32    []float32
 	FptrSliceFloat32 *[]float32
 	FSliceFloat64    []float64
@@ -94,160 +98,150 @@ type TestMammoth2 struct {
 	FSliceBool       []bool
 	FptrSliceBool    *[]bool
 
-	FMapStringIntf        map[string]interface{}
-	FptrMapStringIntf     *map[string]interface{}
-	FMapStringString      map[string]string
-	FptrMapStringString   *map[string]string
-	FMapStringUint        map[string]uint
-	FptrMapStringUint     *map[string]uint
-	FMapStringUint8       map[string]uint8
-	FptrMapStringUint8    *map[string]uint8
-	FMapStringUint64      map[string]uint64
-	FptrMapStringUint64   *map[string]uint64
-	FMapStringUintptr     map[string]uintptr
-	FptrMapStringUintptr  *map[string]uintptr
-	FMapStringInt         map[string]int
-	FptrMapStringInt      *map[string]int
-	FMapStringInt64       map[string]int64
-	FptrMapStringInt64    *map[string]int64
-	FMapStringFloat32     map[string]float32
-	FptrMapStringFloat32  *map[string]float32
-	FMapStringFloat64     map[string]float64
-	FptrMapStringFloat64  *map[string]float64
-	FMapStringBool        map[string]bool
-	FptrMapStringBool     *map[string]bool
-	FMapUintIntf          map[uint]interface{}
-	FptrMapUintIntf       *map[uint]interface{}
-	FMapUintString        map[uint]string
-	FptrMapUintString     *map[uint]string
-	FMapUintUint          map[uint]uint
-	FptrMapUintUint       *map[uint]uint
-	FMapUintUint8         map[uint]uint8
-	FptrMapUintUint8      *map[uint]uint8
-	FMapUintUint64        map[uint]uint64
-	FptrMapUintUint64     *map[uint]uint64
-	FMapUintUintptr       map[uint]uintptr
-	FptrMapUintUintptr    *map[uint]uintptr
-	FMapUintInt           map[uint]int
-	FptrMapUintInt        *map[uint]int
-	FMapUintInt64         map[uint]int64
-	FptrMapUintInt64      *map[uint]int64
-	FMapUintFloat32       map[uint]float32
-	FptrMapUintFloat32    *map[uint]float32
-	FMapUintFloat64       map[uint]float64
-	FptrMapUintFloat64    *map[uint]float64
-	FMapUintBool          map[uint]bool
-	FptrMapUintBool       *map[uint]bool
-	FMapUint8Intf         map[uint8]interface{}
-	FptrMapUint8Intf      *map[uint8]interface{}
-	FMapUint8String       map[uint8]string
-	FptrMapUint8String    *map[uint8]string
-	FMapUint8Uint         map[uint8]uint
-	FptrMapUint8Uint      *map[uint8]uint
-	FMapUint8Uint8        map[uint8]uint8
-	FptrMapUint8Uint8     *map[uint8]uint8
-	FMapUint8Uint64       map[uint8]uint64
-	FptrMapUint8Uint64    *map[uint8]uint64
-	FMapUint8Uintptr      map[uint8]uintptr
-	FptrMapUint8Uintptr   *map[uint8]uintptr
-	FMapUint8Int          map[uint8]int
-	FptrMapUint8Int       *map[uint8]int
-	FMapUint8Int64        map[uint8]int64
-	FptrMapUint8Int64     *map[uint8]int64
-	FMapUint8Float32      map[uint8]float32
-	FptrMapUint8Float32   *map[uint8]float32
-	FMapUint8Float64      map[uint8]float64
-	FptrMapUint8Float64   *map[uint8]float64
-	FMapUint8Bool         map[uint8]bool
-	FptrMapUint8Bool      *map[uint8]bool
-	FMapUint64Intf        map[uint64]interface{}
-	FptrMapUint64Intf     *map[uint64]interface{}
-	FMapUint64String      map[uint64]string
-	FptrMapUint64String   *map[uint64]string
-	FMapUint64Uint        map[uint64]uint
-	FptrMapUint64Uint     *map[uint64]uint
-	FMapUint64Uint8       map[uint64]uint8
-	FptrMapUint64Uint8    *map[uint64]uint8
-	FMapUint64Uint64      map[uint64]uint64
-	FptrMapUint64Uint64   *map[uint64]uint64
-	FMapUint64Uintptr     map[uint64]uintptr
-	FptrMapUint64Uintptr  *map[uint64]uintptr
-	FMapUint64Int         map[uint64]int
-	FptrMapUint64Int      *map[uint64]int
-	FMapUint64Int64       map[uint64]int64
-	FptrMapUint64Int64    *map[uint64]int64
-	FMapUint64Float32     map[uint64]float32
-	FptrMapUint64Float32  *map[uint64]float32
-	FMapUint64Float64     map[uint64]float64
-	FptrMapUint64Float64  *map[uint64]float64
-	FMapUint64Bool        map[uint64]bool
-	FptrMapUint64Bool     *map[uint64]bool
-	FMapUintptrIntf       map[uintptr]interface{}
-	FptrMapUintptrIntf    *map[uintptr]interface{}
-	FMapUintptrString     map[uintptr]string
-	FptrMapUintptrString  *map[uintptr]string
-	FMapUintptrUint       map[uintptr]uint
-	FptrMapUintptrUint    *map[uintptr]uint
-	FMapUintptrUint8      map[uintptr]uint8
-	FptrMapUintptrUint8   *map[uintptr]uint8
-	FMapUintptrUint64     map[uintptr]uint64
-	FptrMapUintptrUint64  *map[uintptr]uint64
-	FMapUintptrUintptr    map[uintptr]uintptr
-	FptrMapUintptrUintptr *map[uintptr]uintptr
-	FMapUintptrInt        map[uintptr]int
-	FptrMapUintptrInt     *map[uintptr]int
-	FMapUintptrInt64      map[uintptr]int64
-	FptrMapUintptrInt64   *map[uintptr]int64
-	FMapUintptrFloat32    map[uintptr]float32
-	FptrMapUintptrFloat32 *map[uintptr]float32
-	FMapUintptrFloat64    map[uintptr]float64
-	FptrMapUintptrFloat64 *map[uintptr]float64
-	FMapUintptrBool       map[uintptr]bool
-	FptrMapUintptrBool    *map[uintptr]bool
-	FMapIntIntf           map[int]interface{}
-	FptrMapIntIntf        *map[int]interface{}
-	FMapIntString         map[int]string
-	FptrMapIntString      *map[int]string
-	FMapIntUint           map[int]uint
-	FptrMapIntUint        *map[int]uint
-	FMapIntUint8          map[int]uint8
-	FptrMapIntUint8       *map[int]uint8
-	FMapIntUint64         map[int]uint64
-	FptrMapIntUint64      *map[int]uint64
-	FMapIntUintptr        map[int]uintptr
-	FptrMapIntUintptr     *map[int]uintptr
-	FMapIntInt            map[int]int
-	FptrMapIntInt         *map[int]int
-	FMapIntInt64          map[int]int64
-	FptrMapIntInt64       *map[int]int64
-	FMapIntFloat32        map[int]float32
-	FptrMapIntFloat32     *map[int]float32
-	FMapIntFloat64        map[int]float64
-	FptrMapIntFloat64     *map[int]float64
-	FMapIntBool           map[int]bool
-	FptrMapIntBool        *map[int]bool
-	FMapInt64Intf         map[int64]interface{}
-	FptrMapInt64Intf      *map[int64]interface{}
-	FMapInt64String       map[int64]string
-	FptrMapInt64String    *map[int64]string
-	FMapInt64Uint         map[int64]uint
-	FptrMapInt64Uint      *map[int64]uint
-	FMapInt64Uint8        map[int64]uint8
-	FptrMapInt64Uint8     *map[int64]uint8
-	FMapInt64Uint64       map[int64]uint64
-	FptrMapInt64Uint64    *map[int64]uint64
-	FMapInt64Uintptr      map[int64]uintptr
-	FptrMapInt64Uintptr   *map[int64]uintptr
-	FMapInt64Int          map[int64]int
-	FptrMapInt64Int       *map[int64]int
-	FMapInt64Int64        map[int64]int64
-	FptrMapInt64Int64     *map[int64]int64
-	FMapInt64Float32      map[int64]float32
-	FptrMapInt64Float32   *map[int64]float32
-	FMapInt64Float64      map[int64]float64
-	FptrMapInt64Float64   *map[int64]float64
-	FMapInt64Bool         map[int64]bool
-	FptrMapInt64Bool      *map[int64]bool
+	FMapStringIntf       map[string]interface{}
+	FptrMapStringIntf    *map[string]interface{}
+	FMapStringString     map[string]string
+	FptrMapStringString  *map[string]string
+	FMapStringBytes      map[string][]byte
+	FptrMapStringBytes   *map[string][]byte
+	FMapStringUint       map[string]uint
+	FptrMapStringUint    *map[string]uint
+	FMapStringUint8      map[string]uint8
+	FptrMapStringUint8   *map[string]uint8
+	FMapStringUint64     map[string]uint64
+	FptrMapStringUint64  *map[string]uint64
+	FMapStringUintptr    map[string]uintptr
+	FptrMapStringUintptr *map[string]uintptr
+	FMapStringInt        map[string]int
+	FptrMapStringInt     *map[string]int
+	FMapStringInt64      map[string]int64
+	FptrMapStringInt64   *map[string]int64
+	FMapStringFloat32    map[string]float32
+	FptrMapStringFloat32 *map[string]float32
+	FMapStringFloat64    map[string]float64
+	FptrMapStringFloat64 *map[string]float64
+	FMapStringBool       map[string]bool
+	FptrMapStringBool    *map[string]bool
+	FMapUintIntf         map[uint]interface{}
+	FptrMapUintIntf      *map[uint]interface{}
+	FMapUintString       map[uint]string
+	FptrMapUintString    *map[uint]string
+	FMapUintBytes        map[uint][]byte
+	FptrMapUintBytes     *map[uint][]byte
+	FMapUintUint         map[uint]uint
+	FptrMapUintUint      *map[uint]uint
+	FMapUintUint8        map[uint]uint8
+	FptrMapUintUint8     *map[uint]uint8
+	FMapUintUint64       map[uint]uint64
+	FptrMapUintUint64    *map[uint]uint64
+	FMapUintUintptr      map[uint]uintptr
+	FptrMapUintUintptr   *map[uint]uintptr
+	FMapUintInt          map[uint]int
+	FptrMapUintInt       *map[uint]int
+	FMapUintInt64        map[uint]int64
+	FptrMapUintInt64     *map[uint]int64
+	FMapUintFloat32      map[uint]float32
+	FptrMapUintFloat32   *map[uint]float32
+	FMapUintFloat64      map[uint]float64
+	FptrMapUintFloat64   *map[uint]float64
+	FMapUintBool         map[uint]bool
+	FptrMapUintBool      *map[uint]bool
+	FMapUint8Intf        map[uint8]interface{}
+	FptrMapUint8Intf     *map[uint8]interface{}
+	FMapUint8String      map[uint8]string
+	FptrMapUint8String   *map[uint8]string
+	FMapUint8Bytes       map[uint8][]byte
+	FptrMapUint8Bytes    *map[uint8][]byte
+	FMapUint8Uint        map[uint8]uint
+	FptrMapUint8Uint     *map[uint8]uint
+	FMapUint8Uint8       map[uint8]uint8
+	FptrMapUint8Uint8    *map[uint8]uint8
+	FMapUint8Uint64      map[uint8]uint64
+	FptrMapUint8Uint64   *map[uint8]uint64
+	FMapUint8Uintptr     map[uint8]uintptr
+	FptrMapUint8Uintptr  *map[uint8]uintptr
+	FMapUint8Int         map[uint8]int
+	FptrMapUint8Int      *map[uint8]int
+	FMapUint8Int64       map[uint8]int64
+	FptrMapUint8Int64    *map[uint8]int64
+	FMapUint8Float32     map[uint8]float32
+	FptrMapUint8Float32  *map[uint8]float32
+	FMapUint8Float64     map[uint8]float64
+	FptrMapUint8Float64  *map[uint8]float64
+	FMapUint8Bool        map[uint8]bool
+	FptrMapUint8Bool     *map[uint8]bool
+	FMapUint64Intf       map[uint64]interface{}
+	FptrMapUint64Intf    *map[uint64]interface{}
+	FMapUint64String     map[uint64]string
+	FptrMapUint64String  *map[uint64]string
+	FMapUint64Bytes      map[uint64][]byte
+	FptrMapUint64Bytes   *map[uint64][]byte
+	FMapUint64Uint       map[uint64]uint
+	FptrMapUint64Uint    *map[uint64]uint
+	FMapUint64Uint8      map[uint64]uint8
+	FptrMapUint64Uint8   *map[uint64]uint8
+	FMapUint64Uint64     map[uint64]uint64
+	FptrMapUint64Uint64  *map[uint64]uint64
+	FMapUint64Uintptr    map[uint64]uintptr
+	FptrMapUint64Uintptr *map[uint64]uintptr
+	FMapUint64Int        map[uint64]int
+	FptrMapUint64Int     *map[uint64]int
+	FMapUint64Int64      map[uint64]int64
+	FptrMapUint64Int64   *map[uint64]int64
+	FMapUint64Float32    map[uint64]float32
+	FptrMapUint64Float32 *map[uint64]float32
+	FMapUint64Float64    map[uint64]float64
+	FptrMapUint64Float64 *map[uint64]float64
+	FMapUint64Bool       map[uint64]bool
+	FptrMapUint64Bool    *map[uint64]bool
+	FMapIntIntf          map[int]interface{}
+	FptrMapIntIntf       *map[int]interface{}
+	FMapIntString        map[int]string
+	FptrMapIntString     *map[int]string
+	FMapIntBytes         map[int][]byte
+	FptrMapIntBytes      *map[int][]byte
+	FMapIntUint          map[int]uint
+	FptrMapIntUint       *map[int]uint
+	FMapIntUint8         map[int]uint8
+	FptrMapIntUint8      *map[int]uint8
+	FMapIntUint64        map[int]uint64
+	FptrMapIntUint64     *map[int]uint64
+	FMapIntUintptr       map[int]uintptr
+	FptrMapIntUintptr    *map[int]uintptr
+	FMapIntInt           map[int]int
+	FptrMapIntInt        *map[int]int
+	FMapIntInt64         map[int]int64
+	FptrMapIntInt64      *map[int]int64
+	FMapIntFloat32       map[int]float32
+	FptrMapIntFloat32    *map[int]float32
+	FMapIntFloat64       map[int]float64
+	FptrMapIntFloat64    *map[int]float64
+	FMapIntBool          map[int]bool
+	FptrMapIntBool       *map[int]bool
+	FMapInt64Intf        map[int64]interface{}
+	FptrMapInt64Intf     *map[int64]interface{}
+	FMapInt64String      map[int64]string
+	FptrMapInt64String   *map[int64]string
+	FMapInt64Bytes       map[int64][]byte
+	FptrMapInt64Bytes    *map[int64][]byte
+	FMapInt64Uint        map[int64]uint
+	FptrMapInt64Uint     *map[int64]uint
+	FMapInt64Uint8       map[int64]uint8
+	FptrMapInt64Uint8    *map[int64]uint8
+	FMapInt64Uint64      map[int64]uint64
+	FptrMapInt64Uint64   *map[int64]uint64
+	FMapInt64Uintptr     map[int64]uintptr
+	FptrMapInt64Uintptr  *map[int64]uintptr
+	FMapInt64Int         map[int64]int
+	FptrMapInt64Int      *map[int64]int
+	FMapInt64Int64       map[int64]int64
+	FptrMapInt64Int64    *map[int64]int64
+	FMapInt64Float32     map[int64]float32
+	FptrMapInt64Float32  *map[int64]float32
+	FMapInt64Float64     map[int64]float64
+	FptrMapInt64Float64  *map[int64]float64
+	FMapInt64Bool        map[int64]bool
+	FptrMapInt64Bool     *map[int64]bool
 }
 
 // -----------

A különbségek nem kerülnek megjelenítésre, a fájl túl nagy
+ 267 - 336
codec/mammoth_generated_test.go


+ 5 - 5
codec/msgpack.go

@@ -969,23 +969,23 @@ func (h *MsgpackHandle) Name() string { return "msgpack" }
 
 // SetBytesExt sets an extension
 func (h *MsgpackHandle) SetBytesExt(rt reflect.Type, tag uint64, ext BytesExt) (err error) {
-	return h.SetExt(rt, tag, &extWrapper{ext, interfaceExtFailer{}})
+	return h.SetExt(rt, tag, &bytesExtWrapper{BytesExt: ext})
 }
 
 func (h *MsgpackHandle) newEncDriver(e *Encoder) encDriver {
-	return &msgpackEncDriver{e: e, w: e.w, h: h}
+	return &msgpackEncDriver{e: e, w: e.w(), h: h}
 }
 
 func (h *MsgpackHandle) newDecDriver(d *Decoder) decDriver {
-	return &msgpackDecDriver{d: d, h: h, r: d.r, br: d.bytes}
+	return &msgpackDecDriver{d: d, h: h, r: d.r(), br: d.bytes}
 }
 
 func (e *msgpackEncDriver) reset() {
-	e.w = e.e.w
+	e.w = e.e.w()
 }
 
 func (d *msgpackDecDriver) reset() {
-	d.r, d.br = d.d.r, d.d.bytes
+	d.r, d.br = d.d.r(), d.d.bytes
 	d.bd, d.bdRead = 0, false
 }
 

+ 2 - 0
codec/shared_test.go

@@ -140,6 +140,8 @@ func init() {
 	testHandles = append(testHandles,
 		// testNoopH,
 		testMsgpackH, testBincH, testSimpleH, testCborH, testJsonH)
+	// JSON should do HTMLCharsAsIs by default
+	testJsonH.HTMLCharsAsIs = true
 	// set ExplicitRelease on each handle
 	testMsgpackH.ExplicitRelease = true
 	testBincH.ExplicitRelease = true

+ 13 - 39
codec/simple.go

@@ -33,14 +33,14 @@ const (
 
 type simpleEncDriver struct {
 	noBuiltInTypes
+	encDriverNoopContainerWriter
 	// encNoSeparator
 	e *Encoder
 	h *SimpleHandle
 	w *encWriterSwitch
 	b [8]byte
 	// c containerState
-	encDriverTrackContainerWriter
-	// encDriverNoopContainerWriter
+	// encDriverTrackContainerWriter
 	_ [2]uint64 // padding (cache-aligned)
 }
 
@@ -49,7 +49,7 @@ func (e *simpleEncDriver) EncodeNil() {
 }
 
 func (e *simpleEncDriver) EncodeBool(b bool) {
-	if e.h.EncZeroValuesAsNil && e.c != containerMapKey && !b {
+	if e.h.EncZeroValuesAsNil && e.e.c != containerMapKey && !b {
 		e.EncodeNil()
 		return
 	}
@@ -61,7 +61,7 @@ func (e *simpleEncDriver) EncodeBool(b bool) {
 }
 
 func (e *simpleEncDriver) EncodeFloat32(f float32) {
-	if e.h.EncZeroValuesAsNil && e.c != containerMapKey && f == 0.0 {
+	if e.h.EncZeroValuesAsNil && e.e.c != containerMapKey && f == 0.0 {
 		e.EncodeNil()
 		return
 	}
@@ -70,7 +70,7 @@ func (e *simpleEncDriver) EncodeFloat32(f float32) {
 }
 
 func (e *simpleEncDriver) EncodeFloat64(f float64) {
-	if e.h.EncZeroValuesAsNil && e.c != containerMapKey && f == 0.0 {
+	if e.h.EncZeroValuesAsNil && e.e.c != containerMapKey && f == 0.0 {
 		e.EncodeNil()
 		return
 	}
@@ -91,7 +91,7 @@ func (e *simpleEncDriver) EncodeUint(v uint64) {
 }
 
 func (e *simpleEncDriver) encUint(v uint64, bd uint8) {
-	if e.h.EncZeroValuesAsNil && e.c != containerMapKey && v == 0 {
+	if e.h.EncZeroValuesAsNil && e.e.c != containerMapKey && v == 0 {
 		e.EncodeNil()
 		return
 	}
@@ -148,12 +148,10 @@ func (e *simpleEncDriver) encodeExtPreamble(xtag byte, length int) {
 }
 
 func (e *simpleEncDriver) WriteArrayStart(length int) {
-	e.c = containerArrayStart
 	e.encLen(simpleVdArray, length)
 }
 
 func (e *simpleEncDriver) WriteMapStart(length int) {
-	e.c = containerMapStart
 	e.encLen(simpleVdMap, length)
 }
 
@@ -162,7 +160,7 @@ func (e *simpleEncDriver) WriteMapStart(length int) {
 // }
 
 func (e *simpleEncDriver) EncodeStringEnc(c charEncoding, v string) {
-	if false && e.h.EncZeroValuesAsNil && e.c != containerMapKey && v == "" {
+	if false && e.h.EncZeroValuesAsNil && e.e.c != containerMapKey && v == "" {
 		e.EncodeNil()
 		return
 	}
@@ -205,7 +203,7 @@ type simpleDecDriver struct {
 	bdRead bool
 	bd     byte
 	br     bool // a bytes reader?
-	c      containerState
+	// c      containerState
 	// b      [scratchByteArrayLen]byte
 	noBuiltInTypes
 	// noStreamingCodec
@@ -362,7 +360,6 @@ func (d *simpleDecDriver) ReadMapStart() (length int) {
 		d.readNextBd()
 	}
 	d.bdRead = false
-	d.c = containerMapStart
 	return d.decLen()
 }
 
@@ -371,30 +368,9 @@ func (d *simpleDecDriver) ReadArrayStart() (length int) {
 		d.readNextBd()
 	}
 	d.bdRead = false
-	d.c = containerArrayStart
 	return d.decLen()
 }
 
-func (d *simpleDecDriver) ReadArrayElem() {
-	d.c = containerArrayElem
-}
-
-func (d *simpleDecDriver) ReadArrayEnd() {
-	d.c = containerArrayEnd
-}
-
-func (d *simpleDecDriver) ReadMapElemKey() {
-	d.c = containerMapKey
-}
-
-func (d *simpleDecDriver) ReadMapElemValue() {
-	d.c = containerMapValue
-}
-
-func (d *simpleDecDriver) ReadMapEnd() {
-	d.c = containerMapEnd
-}
-
 func (d *simpleDecDriver) decLen() int {
 	switch d.bd % 8 {
 	case 0:
@@ -630,27 +606,25 @@ func (h *SimpleHandle) Name() string { return "simple" }
 
 // SetBytesExt sets an extension
 func (h *SimpleHandle) SetBytesExt(rt reflect.Type, tag uint64, ext BytesExt) (err error) {
-	return h.SetExt(rt, tag, &extWrapper{ext, interfaceExtFailer{}})
+	return h.SetExt(rt, tag, &bytesExtWrapper{BytesExt: ext})
 }
 
 // func (h *SimpleHandle) hasElemSeparators() bool { return true } // as it implements Write(Map|Array)XXX
 
 func (h *SimpleHandle) newEncDriver(e *Encoder) encDriver {
-	return &simpleEncDriver{e: e, w: e.w, h: h}
+	return &simpleEncDriver{e: e, w: e.w(), h: h}
 }
 
 func (h *SimpleHandle) newDecDriver(d *Decoder) decDriver {
-	return &simpleDecDriver{d: d, h: h, r: d.r, br: d.bytes}
+	return &simpleDecDriver{d: d, h: h, r: d.r(), br: d.bytes}
 }
 
 func (e *simpleEncDriver) reset() {
-	e.c = 0
-	e.w = e.e.w
+	e.w = e.e.w()
 }
 
 func (d *simpleDecDriver) reset() {
-	d.c = 0
-	d.r, d.br = d.d.r, d.d.bytes
+	d.r, d.br = d.d.r(), d.d.bytes
 	d.bd, d.bdRead = 0, false
 }
 

A különbségek nem kerülnek megjelenítésre, a fájl túl nagy
+ 165 - 453
codec/values_codecgen_generated_test.go


+ 14 - 2
codec/values_test.go

@@ -124,6 +124,8 @@ type TestStrucCommon struct {
 	Bslice    []bool
 	Byslice   []byte
 
+	BytesSlice [][]byte
+
 	Iptrslice []*int64
 
 	WrapSliceInt64  wrapSliceUint64
@@ -131,6 +133,8 @@ type TestStrucCommon struct {
 
 	Msi64 map[string]int64
 
+	Msbytes map[string][]byte
+
 	Simplef testSimpleFields
 
 	SstrUi64T []stringUint64T
@@ -277,13 +281,21 @@ func populateTestStrucCommon(ts *TestStrucCommon, n int, bench, useInterface, us
 		Ui8slice:  []uint8{210, 211, 212},
 		Bslice:    []bool{true, false, true, false},
 		Byslice:   []byte{13, 14, 15},
-
+		BytesSlice: [][]byte{
+			[]byte(strRpt(n, "one")),
+			[]byte(strRpt(n, "two")),
+			[]byte(strRpt(n, "\"three\"")),
+		},
 		Msi64: map[string]int64{
 			strRpt(n, "one"):       1,
 			strRpt(n, "two"):       2,
 			strRpt(n, "\"three\""): 3,
 		},
-
+		Msbytes: map[string][]byte{
+			strRpt(n, "one"):       []byte(strRpt(n, "one")),
+			strRpt(n, "two"):       []byte(strRpt(n, "two")),
+			strRpt(n, "\"three\""): []byte(strRpt(n, "\"three\"")),
+		},
 		WrapSliceInt64:  []uint64{4, 16, 64, 256},
 		WrapSliceString: []string{strRpt(n, "4"), strRpt(n, "16"), strRpt(n, "64"), strRpt(n, "256")},
 

+ 1 - 1
codec/xml.go

@@ -501,7 +501,7 @@ func (h *XMLHandle) newDecDriver(d *Decoder) decDriver {
 }
 
 func (h *XMLHandle) SetInterfaceExt(rt reflect.Type, tag uint64, ext InterfaceExt) (err error) {
-	return h.SetExt(rt, tag, &extWrapper{bytesExtFailer{}, ext})
+	return h.SetExt(rt, tag, &interfaceExtWrapper{InterfaceExt: ext})
 }
 
 var _ decDriver = (*xmlDecDriver)(nil)

+ 0 - 1
go.mod

@@ -1,2 +1 @@
 module github.com/ugorji/go
-

Nem az összes módosított fájl került megjelenítésre, mert túl sok fájl változott