Browse Source

codec: Add canonical encoding support, handle EOF (as non-error) if Read successful, and export CborStreamBreak.

Canonical representation means that encoding a value will always result in the same sequence of bytes.
This mostly will apply to maps. In this case, codec will do more work to encode the
map keys out of band, and then sort them, before writing out the map to the stream.

Canonical flag is only honored within the standard runtime-introspection (reflection-based)
mode of encoding. It is ignored by codecgen (code generation). Also, if canonical flag is true,
then fast-path encoding of maps is skipped.

There is a slight performance hit if Canonical flag is on, as we have to encode the keys
out-of-band, and then sort them, before encoding the whole map.

Canonical mode is turned on via a flag in EncodeOptions (handler).

Also, export CborStreamBreak as a convenience and for symmetry (as other stream constants were exported).

Also, handle EOF if Read was successful.
If a successful Read happened, but an EOF was returned at same time,
we do not treat that as an error.

Fixes #56
Fixes #58
Fixes #59
Ugorji Nwoke 10 years ago
parent
commit
11c92bbf14

+ 2 - 0
codec/0doc.go

@@ -58,6 +58,8 @@ Rich Feature Set includes:
   - Support binary (e.g. messagepack, cbor) and text (e.g. json) formats
   - Support binary (e.g. messagepack, cbor) and text (e.g. json) formats
   - Support indefinite-length formats to enable true streaming 
   - Support indefinite-length formats to enable true streaming 
     (for formats which support it e.g. json, cbor)
     (for formats which support it e.g. json, cbor)
+  - Support canonical encoding, where a value is ALWAYS encoded as same sequence of bytes.
+    This mostly applies to maps, where iteration order is non-deterministic.
   - NIL in data stream decoded as zero value
   - NIL in data stream decoded as zero value
   - Never silently skip data when decoding.
   - Never silently skip data when decoding.
     User decides whether to return an error or silently skip data when keys or indexes
     User decides whether to return an error or silently skip data when keys or indexes

+ 2 - 0
codec/README.md

@@ -59,6 +59,8 @@ Rich Feature Set includes:
   - Support binary (e.g. messagepack, cbor) and text (e.g. json) formats
   - Support binary (e.g. messagepack, cbor) and text (e.g. json) formats
   - Support indefinite-length formats to enable true streaming 
   - Support indefinite-length formats to enable true streaming 
     (for formats which support it e.g. json, cbor)
     (for formats which support it e.g. json, cbor)
+  - Support canonical encoding, where a value is ALWAYS encoded as same sequence of bytes.
+    This mostly applies to maps, where iteration order is non-deterministic.
   - NIL in data stream decoded as zero value
   - NIL in data stream decoded as zero value
   - Never silently skip data when decoding.
   - Never silently skip data when decoding.
     User decides whether to return an error or silently skip data when keys or indexes
     User decides whether to return an error or silently skip data when keys or indexes

+ 1 - 1
codec/cbor.go

@@ -40,7 +40,7 @@ const (
 	CborStreamString      = 0x7f
 	CborStreamString      = 0x7f
 	CborStreamArray       = 0x9f
 	CborStreamArray       = 0x9f
 	CborStreamMap         = 0xbf
 	CborStreamMap         = 0xbf
-	cborStreamBreak       = 0xff
+	CborStreamBreak       = 0xff
 )
 )
 
 
 const (
 const (

+ 17 - 2
codec/codec_test.go

@@ -63,6 +63,7 @@ var (
 	testInitDebug      bool
 	testInitDebug      bool
 	testUseIoEncDec    bool
 	testUseIoEncDec    bool
 	testStructToArray  bool
 	testStructToArray  bool
+	testCanonical      bool
 	testWriteNoSymbols bool
 	testWriteNoSymbols bool
 	testSkipIntf       bool
 	testSkipIntf       bool
 
 
@@ -95,6 +96,7 @@ func testInitFlags() {
 	flag.BoolVar(&testUseIoEncDec, "ti", false, "Use IO Reader/Writer for Marshal/Unmarshal")
 	flag.BoolVar(&testUseIoEncDec, "ti", false, "Use IO Reader/Writer for Marshal/Unmarshal")
 	flag.BoolVar(&testStructToArray, "ts", false, "Set StructToArray option")
 	flag.BoolVar(&testStructToArray, "ts", false, "Set StructToArray option")
 	flag.BoolVar(&testWriteNoSymbols, "tn", false, "Set NoSymbols option")
 	flag.BoolVar(&testWriteNoSymbols, "tn", false, "Set NoSymbols option")
+	flag.BoolVar(&testCanonical, "tc", false, "Set Canonical option")
 	flag.BoolVar(&testSkipIntf, "tf", false, "Skip Interfaces")
 	flag.BoolVar(&testSkipIntf, "tf", false, "Skip Interfaces")
 }
 }
 
 
@@ -267,17 +269,26 @@ func testInit() {
 		fmt.Printf("====> depth: %v, ts: %#v\n", 2, ts0)
 		fmt.Printf("====> depth: %v, ts: %#v\n", 2, ts0)
 	}
 	}
 
 
+	testJsonH.Canonical = testCanonical
+	testCborH.Canonical = testCanonical
+	testSimpleH.Canonical = testCanonical
+	testBincH.Canonical = testCanonical
+	testMsgpackH.Canonical = testCanonical
+
 	testJsonH.StructToArray = testStructToArray
 	testJsonH.StructToArray = testStructToArray
 	testCborH.StructToArray = testStructToArray
 	testCborH.StructToArray = testStructToArray
 	testSimpleH.StructToArray = testStructToArray
 	testSimpleH.StructToArray = testStructToArray
 	testBincH.StructToArray = testStructToArray
 	testBincH.StructToArray = testStructToArray
+	testMsgpackH.StructToArray = testStructToArray
+
+	testMsgpackH.RawToString = true
+
 	if testWriteNoSymbols {
 	if testWriteNoSymbols {
 		testBincH.AsSymbols = AsSymbolNone
 		testBincH.AsSymbols = AsSymbolNone
 	} else {
 	} else {
 		testBincH.AsSymbols = AsSymbolAll
 		testBincH.AsSymbols = AsSymbolAll
 	}
 	}
-	testMsgpackH.StructToArray = testStructToArray
-	testMsgpackH.RawToString = true
+
 	// testMsgpackH.AddExt(byteSliceTyp, 0, testMsgpackH.BinaryEncodeExt, testMsgpackH.BinaryDecodeExt)
 	// testMsgpackH.AddExt(byteSliceTyp, 0, testMsgpackH.BinaryEncodeExt, testMsgpackH.BinaryDecodeExt)
 	// testMsgpackH.AddExt(timeTyp, 1, testMsgpackH.TimeEncodeExt, testMsgpackH.TimeDecodeExt)
 	// testMsgpackH.AddExt(timeTyp, 1, testMsgpackH.TimeEncodeExt, testMsgpackH.TimeDecodeExt)
 	timeEncExt := func(rv reflect.Value) (bs []byte, err error) {
 	timeEncExt := func(rv reflect.Value) (bs []byte, err error) {
@@ -1100,3 +1111,7 @@ func TestBincUnderlyingType(t *testing.T) {
 //   - interfaces: textMarshaler, binaryMarshaler, codecSelfer
 //   - interfaces: textMarshaler, binaryMarshaler, codecSelfer
 //   - struct tags:
 //   - struct tags:
 //     on anonymous fields, _struct (all fields), etc
 //     on anonymous fields, _struct (all fields), etc
+//   - codecgen of struct containing channels.
+//
+//   Cleanup tests:
+//   - The are brittle in their handling of validation and skipping

+ 17 - 11
codec/decode.go

@@ -141,6 +141,9 @@ func (z *ioDecByteScanner) Read(p []byte) (n int, err error) {
 	}
 	}
 	n, err = z.r.Read(p)
 	n, err = z.r.Read(p)
 	if n > 0 {
 	if n > 0 {
+		if err == io.EOF && n == len(p) {
+			err = nil // read was successful, so postpone EOF (till next time)
+		}
 		z.l = p[n-1]
 		z.l = p[n-1]
 		z.ls = 2
 		z.ls = 2
 	}
 	}
@@ -154,6 +157,9 @@ func (z *ioDecByteScanner) ReadByte() (c byte, err error) {
 	n, err := z.Read(z.b[:])
 	n, err := z.Read(z.b[:])
 	if n == 1 {
 	if n == 1 {
 		c = z.b[0]
 		c = z.b[0]
+		if err == io.EOF {
+			err = nil // read was successful, so postpone EOF (till next time)
+		}
 	}
 	}
 	return
 	return
 }
 }
@@ -707,7 +713,7 @@ func (f decFnInfo) kSlice(rv reflect.Value) {
 	for rtelem.Kind() == reflect.Ptr {
 	for rtelem.Kind() == reflect.Ptr {
 		rtelem = rtelem.Elem()
 		rtelem = rtelem.Elem()
 	}
 	}
-	fn := d.getDecFn(rtelem, true)
+	fn := d.getDecFn(rtelem, true, true)
 
 
 	rv0 := rv
 	rv0 := rv
 	rvChanged := false
 	rvChanged := false
@@ -816,10 +822,10 @@ func (f decFnInfo) kMap(rv reflect.Value) {
 	var xtyp reflect.Type
 	var xtyp reflect.Type
 	for xtyp = ktype; xtyp.Kind() == reflect.Ptr; xtyp = xtyp.Elem() {
 	for xtyp = ktype; xtyp.Kind() == reflect.Ptr; xtyp = xtyp.Elem() {
 	}
 	}
-	keyFn = d.getDecFn(xtyp, true)
+	keyFn = d.getDecFn(xtyp, true, true)
 	for xtyp = vtype; xtyp.Kind() == reflect.Ptr; xtyp = xtyp.Elem() {
 	for xtyp = vtype; xtyp.Kind() == reflect.Ptr; xtyp = xtyp.Elem() {
 	}
 	}
-	valFn = d.getDecFn(xtyp, true)
+	valFn = d.getDecFn(xtyp, true, true)
 	// for j := 0; j < containerLen; j++ {
 	// for j := 0; j < containerLen; j++ {
 	if containerLen > 0 {
 	if containerLen > 0 {
 		for j := 0; j < containerLen; j++ {
 		for j := 0; j < containerLen; j++ {
@@ -1153,7 +1159,7 @@ func (d *Decoder) decode(iv interface{}) {
 
 
 	default:
 	default:
 		if !fastpathDecodeTypeSwitch(iv, d) {
 		if !fastpathDecodeTypeSwitch(iv, d) {
-			d.decodeI(iv, true, false, false)
+			d.decodeI(iv, true, false, false, false)
 		}
 		}
 	}
 	}
 }
 }
@@ -1180,14 +1186,14 @@ func (d *Decoder) preDecodeValue(rv reflect.Value, tryNil bool) (rv2 reflect.Val
 	return rv, true
 	return rv, true
 }
 }
 
 
-func (d *Decoder) decodeI(iv interface{}, checkPtr, tryNil, decFnCheckAll bool) {
+func (d *Decoder) decodeI(iv interface{}, checkPtr, tryNil, checkFastpath, checkCodecSelfer bool) {
 	rv := reflect.ValueOf(iv)
 	rv := reflect.ValueOf(iv)
 	if checkPtr {
 	if checkPtr {
 		d.chkPtrValue(rv)
 		d.chkPtrValue(rv)
 	}
 	}
 	rv, proceed := d.preDecodeValue(rv, tryNil)
 	rv, proceed := d.preDecodeValue(rv, tryNil)
 	if proceed {
 	if proceed {
-		fn := d.getDecFn(rv.Type(), decFnCheckAll)
+		fn := d.getDecFn(rv.Type(), checkFastpath, checkCodecSelfer)
 		fn.f(fn.i, rv)
 		fn.f(fn.i, rv)
 	}
 	}
 }
 }
@@ -1195,7 +1201,7 @@ func (d *Decoder) decodeI(iv interface{}, checkPtr, tryNil, decFnCheckAll bool)
 func (d *Decoder) decodeValue(rv reflect.Value, fn decFn) {
 func (d *Decoder) decodeValue(rv reflect.Value, fn decFn) {
 	if rv, proceed := d.preDecodeValue(rv, true); proceed {
 	if rv, proceed := d.preDecodeValue(rv, true); proceed {
 		if fn.f == nil {
 		if fn.f == nil {
-			fn = d.getDecFn(rv.Type(), true)
+			fn = d.getDecFn(rv.Type(), true, true)
 		}
 		}
 		fn.f(fn.i, rv)
 		fn.f(fn.i, rv)
 	}
 	}
@@ -1204,13 +1210,13 @@ func (d *Decoder) decodeValue(rv reflect.Value, fn decFn) {
 func (d *Decoder) decodeValueNotNil(rv reflect.Value, fn decFn) {
 func (d *Decoder) decodeValueNotNil(rv reflect.Value, fn decFn) {
 	if rv, proceed := d.preDecodeValue(rv, false); proceed {
 	if rv, proceed := d.preDecodeValue(rv, false); proceed {
 		if fn.f == nil {
 		if fn.f == nil {
-			fn = d.getDecFn(rv.Type(), true)
+			fn = d.getDecFn(rv.Type(), true, true)
 		}
 		}
 		fn.f(fn.i, rv)
 		fn.f(fn.i, rv)
 	}
 	}
 }
 }
 
 
-func (d *Decoder) getDecFn(rt reflect.Type, checkAll bool) (fn decFn) {
+func (d *Decoder) getDecFn(rt reflect.Type, checkFastpath, checkCodecSelfer bool) (fn decFn) {
 	rtid := reflect.ValueOf(rt).Pointer()
 	rtid := reflect.ValueOf(rt).Pointer()
 
 
 	// retrieve or register a focus'ed function for this type
 	// retrieve or register a focus'ed function for this type
@@ -1247,7 +1253,7 @@ func (d *Decoder) getDecFn(rt reflect.Type, checkAll bool) (fn decFn) {
 	//
 	//
 	// NOTE: if decoding into a nil interface{}, we return a non-nil
 	// NOTE: if decoding into a nil interface{}, we return a non-nil
 	// value except even if the container registers a length of 0.
 	// value except even if the container registers a length of 0.
-	if checkAll && ti.cs {
+	if checkCodecSelfer && ti.cs {
 		fi.decFnInfoX = &decFnInfoX{d: d, ti: ti}
 		fi.decFnInfoX = &decFnInfoX{d: d, ti: ti}
 		fn.f = (decFnInfo).selferUnmarshal
 		fn.f = (decFnInfo).selferUnmarshal
 	} else if rtid == rawExtTypId {
 	} else if rtid == rawExtTypId {
@@ -1269,7 +1275,7 @@ func (d *Decoder) getDecFn(rt reflect.Type, checkAll bool) (fn decFn) {
 		fn.f = (decFnInfo).textUnmarshal
 		fn.f = (decFnInfo).textUnmarshal
 	} else {
 	} else {
 		rk := rt.Kind()
 		rk := rt.Kind()
-		if fastpathEnabled && checkAll && (rk == reflect.Map || rk == reflect.Slice) {
+		if fastpathEnabled && checkFastpath && (rk == reflect.Map || rk == reflect.Slice) {
 			if rt.PkgPath() == "" {
 			if rt.PkgPath() == "" {
 				if idx := fastpathAV.index(rtid); idx != -1 {
 				if idx := fastpathAV.index(rtid); idx != -1 {
 					fi.decFnInfoX = &decFnInfoX{d: d, ti: ti}
 					fi.decFnInfoX = &decFnInfoX{d: d, ti: ti}

+ 100 - 78
codec/encode.go

@@ -4,11 +4,13 @@
 package codec
 package codec
 
 
 import (
 import (
+	"bytes"
 	"encoding"
 	"encoding"
 	"errors"
 	"errors"
 	"fmt"
 	"fmt"
 	"io"
 	"io"
 	"reflect"
 	"reflect"
+	"sort"
 	"sync"
 	"sync"
 )
 )
 
 
@@ -83,6 +85,17 @@ func (_ encNoSeparator) EncodeArrayEntrySeparator() {}
 func (_ encNoSeparator) EncodeMapEntrySeparator()   {}
 func (_ encNoSeparator) EncodeMapEntrySeparator()   {}
 func (_ encNoSeparator) EncodeMapKVSeparator()      {}
 func (_ encNoSeparator) EncodeMapKVSeparator()      {}
 
 
+type encStructFieldBytesV struct {
+	b []byte
+	v reflect.Value
+}
+
+type encStructFieldBytesVslice []encStructFieldBytesV
+
+func (p encStructFieldBytesVslice) Len() int           { return len(p) }
+func (p encStructFieldBytesVslice) Less(i, j int) bool { return bytes.Compare(p[i].b, p[j].b) == -1 }
+func (p encStructFieldBytesVslice) Swap(i, j int)      { p[i], p[j] = p[j], p[i] }
+
 type ioEncWriterWriter interface {
 type ioEncWriterWriter interface {
 	WriteByte(c byte) error
 	WriteByte(c byte) error
 	WriteString(s string) (n int, err error)
 	WriteString(s string) (n int, err error)
@@ -94,9 +107,16 @@ type ioEncStringWriter interface {
 }
 }
 
 
 type EncodeOptions struct {
 type EncodeOptions struct {
-	// Encode a struct as an array, and not as a map.
+	// Encode a struct as an array, and not as a map
 	StructToArray bool
 	StructToArray bool
 
 
+	// Canonical representation means that encoding a value will always result in the same
+	// sequence of bytes.
+	//
+	// This mostly will apply to maps. In this case, codec will do more work to encode the
+	// map keys out of band, and then sort them, before writing out the map to the stream.
+	Canonical bool
+
 	// AsSymbols defines what should be encoded as symbols.
 	// AsSymbols defines what should be encoded as symbols.
 	//
 	//
 	// Encoding as symbols can reduce the encoded size significantly.
 	// Encoding as symbols can reduce the encoded size significantly.
@@ -454,7 +474,7 @@ func (f encFnInfo) kSlice(rv reflect.Value) {
 		var fn encFn
 		var fn encFn
 		if rtelem.Kind() != reflect.Interface {
 		if rtelem.Kind() != reflect.Interface {
 			rtelemid := reflect.ValueOf(rtelem).Pointer()
 			rtelemid := reflect.ValueOf(rtelem).Pointer()
-			fn = e.getEncFn(rtelemid, rtelem, true)
+			fn = e.getEncFn(rtelemid, rtelem, true, true)
 		}
 		}
 		// TODO: Consider perf implication of encoding odd index values as symbols if type is string
 		// TODO: Consider perf implication of encoding odd index values as symbols if type is string
 		if sep {
 		if sep {
@@ -509,8 +529,7 @@ func (f encFnInfo) kStruct(rv reflect.Value) {
 	// Use sync.Pool to reduce allocating slices unnecessarily.
 	// Use sync.Pool to reduce allocating slices unnecessarily.
 	// The cost of the occasional locking is less than the cost of locking.
 	// The cost of the occasional locking is less than the cost of locking.
 
 
-	var rvals []reflect.Value
-	var encnames []string
+	var fkvs []encStructFieldKV
 	var pool *sync.Pool
 	var pool *sync.Pool
 	var poolv interface{}
 	var poolv interface{}
 	idxpool := newlen / 8
 	idxpool := newlen / 8
@@ -521,62 +540,49 @@ func (f encFnInfo) kStruct(rv reflect.Value) {
 		pool = &encStructPool[idxpool]
 		pool = &encStructPool[idxpool]
 		poolv = pool.Get()
 		poolv = pool.Get()
 		switch vv := poolv.(type) {
 		switch vv := poolv.(type) {
-		case *encStructPool8:
-			rvals = vv.r[:newlen]
-			if toMap {
-				encnames = vv.s[:newlen]
-			}
-		case *encStructPool16:
-			rvals = vv.r[:newlen]
-			if toMap {
-				encnames = vv.s[:newlen]
-			}
-		case *encStructPool32:
-			rvals = vv.r[:newlen]
-			if toMap {
-				encnames = vv.s[:newlen]
-			}
-		case *encStructPool64:
-			rvals = vv.r[:newlen]
-			if toMap {
-				encnames = vv.s[:newlen]
-			}
+		case *[8]encStructFieldKV:
+			fkvs = vv[:newlen]
+		case *[16]encStructFieldKV:
+			fkvs = vv[:newlen]
+		case *[32]encStructFieldKV:
+			fkvs = vv[:newlen]
+		case *[64]encStructFieldKV:
+			fkvs = vv[:newlen]
 		}
 		}
 	}
 	}
-	if rvals == nil {
-		rvals = make([]reflect.Value, newlen)
+	if fkvs == nil {
+		fkvs = make([]encStructFieldKV, newlen)
 	}
 	}
 	// if toMap, use the sorted array. If toArray, use unsorted array (to match sequence in struct)
 	// if toMap, use the sorted array. If toArray, use unsorted array (to match sequence in struct)
 	if toMap {
 	if toMap {
 		tisfi = fti.sfi
 		tisfi = fti.sfi
-		if encnames == nil {
-			encnames = make([]string, newlen)
-		}
 	}
 	}
 	newlen = 0
 	newlen = 0
+	var kv encStructFieldKV
 	for _, si := range tisfi {
 	for _, si := range tisfi {
-		rvals[newlen] = si.field(rv, false)
+		kv.v = si.field(rv, false)
 		// if si.i != -1 {
 		// if si.i != -1 {
 		// 	rvals[newlen] = rv.Field(int(si.i))
 		// 	rvals[newlen] = rv.Field(int(si.i))
 		// } else {
 		// } else {
 		// 	rvals[newlen] = rv.FieldByIndex(si.is)
 		// 	rvals[newlen] = rv.FieldByIndex(si.is)
 		// }
 		// }
 		if toMap {
 		if toMap {
-			if si.omitEmpty && isEmptyValue(rvals[newlen]) {
+			if si.omitEmpty && isEmptyValue(kv.v) {
 				continue
 				continue
 			}
 			}
-			encnames[newlen] = si.encName
+			kv.k = si.encName
 		} else {
 		} else {
 			// use the zero value.
 			// use the zero value.
 			// if a reference or struct, set to nil (so you do not output too much)
 			// if a reference or struct, set to nil (so you do not output too much)
-			if si.omitEmpty && isEmptyValue(rvals[newlen]) {
-				switch rvals[newlen].Kind() {
+			if si.omitEmpty && isEmptyValue(kv.v) {
+				switch kv.v.Kind() {
 				case reflect.Struct, reflect.Interface, reflect.Ptr, reflect.Array,
 				case reflect.Struct, reflect.Interface, reflect.Ptr, reflect.Array,
 					reflect.Map, reflect.Slice:
 					reflect.Map, reflect.Slice:
-					rvals[newlen] = reflect.Value{} //encode as nil
+					kv.v = reflect.Value{} //encode as nil
 				}
 				}
 			}
 			}
 		}
 		}
+		fkvs[newlen] = kv
 		newlen++
 		newlen++
 	}
 	}
 
 
@@ -589,25 +595,27 @@ func (f encFnInfo) kStruct(rv reflect.Value) {
 			// asSymbols := e.h.AsSymbols&AsSymbolStructFieldNameFlag != 0
 			// asSymbols := e.h.AsSymbols&AsSymbolStructFieldNameFlag != 0
 			asSymbols := e.h.AsSymbols == AsSymbolDefault || e.h.AsSymbols&AsSymbolStructFieldNameFlag != 0
 			asSymbols := e.h.AsSymbols == AsSymbolDefault || e.h.AsSymbols&AsSymbolStructFieldNameFlag != 0
 			for j := 0; j < newlen; j++ {
 			for j := 0; j < newlen; j++ {
+				kv = fkvs[j]
 				if j > 0 {
 				if j > 0 {
 					ee.EncodeMapEntrySeparator()
 					ee.EncodeMapEntrySeparator()
 				}
 				}
 				if asSymbols {
 				if asSymbols {
-					ee.EncodeSymbol(encnames[j])
+					ee.EncodeSymbol(kv.k)
 				} else {
 				} else {
-					ee.EncodeString(c_UTF8, encnames[j])
+					ee.EncodeString(c_UTF8, kv.k)
 				}
 				}
 				ee.EncodeMapKVSeparator()
 				ee.EncodeMapKVSeparator()
-				e.encodeValue(rvals[j], encFn{})
+				e.encodeValue(kv.v, encFn{})
 			}
 			}
 			ee.EncodeMapEnd()
 			ee.EncodeMapEnd()
 		} else {
 		} else {
 			ee.EncodeArrayStart(newlen)
 			ee.EncodeArrayStart(newlen)
 			for j := 0; j < newlen; j++ {
 			for j := 0; j < newlen; j++ {
+				kv = fkvs[j]
 				if j > 0 {
 				if j > 0 {
 					ee.EncodeArrayEntrySeparator()
 					ee.EncodeArrayEntrySeparator()
 				}
 				}
-				e.encodeValue(rvals[j], encFn{})
+				e.encodeValue(kv.v, encFn{})
 			}
 			}
 			ee.EncodeArrayEnd()
 			ee.EncodeArrayEnd()
 		}
 		}
@@ -617,17 +625,19 @@ func (f encFnInfo) kStruct(rv reflect.Value) {
 			// asSymbols := e.h.AsSymbols&AsSymbolStructFieldNameFlag != 0
 			// asSymbols := e.h.AsSymbols&AsSymbolStructFieldNameFlag != 0
 			asSymbols := e.h.AsSymbols == AsSymbolDefault || e.h.AsSymbols&AsSymbolStructFieldNameFlag != 0
 			asSymbols := e.h.AsSymbols == AsSymbolDefault || e.h.AsSymbols&AsSymbolStructFieldNameFlag != 0
 			for j := 0; j < newlen; j++ {
 			for j := 0; j < newlen; j++ {
+				kv = fkvs[j]
 				if asSymbols {
 				if asSymbols {
-					ee.EncodeSymbol(encnames[j])
+					ee.EncodeSymbol(kv.k)
 				} else {
 				} else {
-					ee.EncodeString(c_UTF8, encnames[j])
+					ee.EncodeString(c_UTF8, kv.k)
 				}
 				}
-				e.encodeValue(rvals[j], encFn{})
+				e.encodeValue(kv.v, encFn{})
 			}
 			}
 		} else {
 		} else {
 			ee.EncodeArrayStart(newlen)
 			ee.EncodeArrayStart(newlen)
 			for j := 0; j < newlen; j++ {
 			for j := 0; j < newlen; j++ {
-				e.encodeValue(rvals[j], encFn{})
+				kv = fkvs[j]
+				e.encodeValue(kv.v, encFn{})
 			}
 			}
 		}
 		}
 	}
 	}
@@ -696,7 +706,7 @@ func (f encFnInfo) kMap(rv reflect.Value) {
 		}
 		}
 		if rtkey.Kind() != reflect.Interface {
 		if rtkey.Kind() != reflect.Interface {
 			rtkeyid = reflect.ValueOf(rtkey).Pointer()
 			rtkeyid = reflect.ValueOf(rtkey).Pointer()
-			keyFn = e.getEncFn(rtkeyid, rtkey, true)
+			keyFn = e.getEncFn(rtkeyid, rtkey, true, true)
 		}
 		}
 	}
 	}
 	for rtval.Kind() == reflect.Ptr {
 	for rtval.Kind() == reflect.Ptr {
@@ -704,12 +714,34 @@ func (f encFnInfo) kMap(rv reflect.Value) {
 	}
 	}
 	if rtval.Kind() != reflect.Interface {
 	if rtval.Kind() != reflect.Interface {
 		rtvalid := reflect.ValueOf(rtval).Pointer()
 		rtvalid := reflect.ValueOf(rtval).Pointer()
-		valFn = e.getEncFn(rtvalid, rtval, true)
+		valFn = e.getEncFn(rtvalid, rtval, true, true)
 	}
 	}
 	mks := rv.MapKeys()
 	mks := rv.MapKeys()
 	// for j, lmks := 0, len(mks); j < lmks; j++ {
 	// for j, lmks := 0, len(mks); j < lmks; j++ {
 	ee := f.ee //don't dereference everytime
 	ee := f.ee //don't dereference everytime
-	if sep {
+	if e.h.Canonical {
+		// first encode each key to a []byte first, then sort them, then record
+		// println(">>>>>>>> CANONICAL <<<<<<<<")
+		var mksv []byte // temporary byte slice for the encoding
+		e2 := NewEncoderBytes(&mksv, e.hh)
+		mksbv := make([]encStructFieldBytesV, len(mks))
+		for i, k := range mks {
+			l := len(mksv)
+			e2.MustEncode(k)
+			mksbv[i].v = k
+			mksbv[i].b = mksv[l:]
+		}
+		sort.Sort(encStructFieldBytesVslice(mksbv))
+		for j := range mksbv {
+			if j > 0 {
+				ee.EncodeMapEntrySeparator()
+			}
+			e.w.writeb(mksbv[j].b)
+			ee.EncodeMapKVSeparator()
+			e.encodeValue(rv.MapIndex(mksbv[j].v), valFn)
+		}
+		ee.EncodeMapEnd()
+	} else if sep {
 		for j := range mks {
 		for j := range mks {
 			if j > 0 {
 			if j > 0 {
 				ee.EncodeMapEntrySeparator()
 				ee.EncodeMapEntrySeparator()
@@ -977,17 +1009,22 @@ func (e *Encoder) encode(iv interface{}) {
 		e.e.EncodeStringBytes(c_RAW, *v)
 		e.e.EncodeStringBytes(c_RAW, *v)
 
 
 	default:
 	default:
-		if !fastpathEncodeTypeSwitch(iv, e) {
-			e.encodeI(iv, false)
+		// canonical mode is not supported for fastpath of maps (but is fine for slices)
+		if e.h.Canonical {
+			if !fastpathEncodeTypeSwitchSlice(iv, e) {
+				e.encodeI(iv, false, false)
+			}
+		} else if !fastpathEncodeTypeSwitch(iv, e) {
+			e.encodeI(iv, false, false)
 		}
 		}
 	}
 	}
 }
 }
 
 
-func (e *Encoder) encodeI(iv interface{}, encFnCheckAll bool) {
+func (e *Encoder) encodeI(iv interface{}, checkFastpath, checkCodecSelfer bool) {
 	if rv, proceed := e.preEncodeValue(reflect.ValueOf(iv)); proceed {
 	if rv, proceed := e.preEncodeValue(reflect.ValueOf(iv)); proceed {
 		rt := rv.Type()
 		rt := rv.Type()
 		rtid := reflect.ValueOf(rt).Pointer()
 		rtid := reflect.ValueOf(rt).Pointer()
-		fn := e.getEncFn(rtid, rt, encFnCheckAll)
+		fn := e.getEncFn(rtid, rt, checkFastpath, checkCodecSelfer)
 		fn.f(fn.i, rv)
 		fn.f(fn.i, rv)
 	}
 	}
 }
 }
@@ -1024,13 +1061,13 @@ func (e *Encoder) encodeValue(rv reflect.Value, fn encFn) {
 		if fn.f == nil {
 		if fn.f == nil {
 			rt := rv.Type()
 			rt := rv.Type()
 			rtid := reflect.ValueOf(rt).Pointer()
 			rtid := reflect.ValueOf(rt).Pointer()
-			fn = e.getEncFn(rtid, rt, true)
+			fn = e.getEncFn(rtid, rt, true, true)
 		}
 		}
 		fn.f(fn.i, rv)
 		fn.f(fn.i, rv)
 	}
 	}
 }
 }
 
 
-func (e *Encoder) getEncFn(rtid uintptr, rt reflect.Type, checkAll bool) (fn encFn) {
+func (e *Encoder) getEncFn(rtid uintptr, rt reflect.Type, checkFastpath, checkCodecSelfer bool) (fn encFn) {
 	// rtid := reflect.ValueOf(rt).Pointer()
 	// rtid := reflect.ValueOf(rt).Pointer()
 	var ok bool
 	var ok bool
 	if useMapForCodecCache {
 	if useMapForCodecCache {
@@ -1051,7 +1088,7 @@ func (e *Encoder) getEncFn(rtid uintptr, rt reflect.Type, checkAll bool) (fn enc
 	var fi encFnInfo
 	var fi encFnInfo
 	fi.ee = e.e
 	fi.ee = e.e
 
 
-	if checkAll && ti.cs {
+	if checkCodecSelfer && ti.cs {
 		fi.encFnInfoX = &encFnInfoX{e: e, ti: ti}
 		fi.encFnInfoX = &encFnInfoX{e: e, ti: ti}
 		fn.f = (encFnInfo).selferMarshal
 		fn.f = (encFnInfo).selferMarshal
 	} else if rtid == rawExtTypId {
 	} else if rtid == rawExtTypId {
@@ -1073,7 +1110,7 @@ func (e *Encoder) getEncFn(rtid uintptr, rt reflect.Type, checkAll bool) (fn enc
 		fn.f = (encFnInfo).textMarshal
 		fn.f = (encFnInfo).textMarshal
 	} else {
 	} else {
 		rk := rt.Kind()
 		rk := rt.Kind()
-		if fastpathEnabled && checkAll && (rk == reflect.Map || rk == reflect.Slice) {
+		if fastpathEnabled && checkFastpath && (rk == reflect.Map || rk == reflect.Slice) {
 			if rt.PkgPath() == "" {
 			if rt.PkgPath() == "" {
 				if idx := fastpathAV.index(rtid); idx != -1 {
 				if idx := fastpathAV.index(rtid); idx != -1 {
 					fi.encFnInfoX = &encFnInfoX{e: e, ti: ti}
 					fi.encFnInfoX = &encFnInfoX{e: e, ti: ti}
@@ -1163,6 +1200,11 @@ func (e *Encoder) errorf(format string, params ...interface{}) {
 
 
 // ----------------------------------------
 // ----------------------------------------
 
 
+type encStructFieldKV struct {
+	k string
+	v reflect.Value
+}
+
 const encStructPoolLen = 4
 const encStructPoolLen = 4
 
 
 // encStructPool is an array of sync.Pool.
 // encStructPool is an array of sync.Pool.
@@ -1176,31 +1218,11 @@ const encStructPoolLen = 4
 // enough to reduce thread contention.
 // enough to reduce thread contention.
 var encStructPool [encStructPoolLen]sync.Pool
 var encStructPool [encStructPoolLen]sync.Pool
 
 
-type encStructPool8 struct {
-	r [8]reflect.Value
-	s [8]string
-}
-
-type encStructPool16 struct {
-	r [16]reflect.Value
-	s [16]string
-}
-
-type encStructPool32 struct {
-	r [32]reflect.Value
-	s [32]string
-}
-
-type encStructPool64 struct {
-	r [64]reflect.Value
-	s [64]string
-}
-
 func init() {
 func init() {
-	encStructPool[0].New = func() interface{} { return new(encStructPool8) }
-	encStructPool[1].New = func() interface{} { return new(encStructPool16) }
-	encStructPool[2].New = func() interface{} { return new(encStructPool32) }
-	encStructPool[3].New = func() interface{} { return new(encStructPool64) }
+	encStructPool[0].New = func() interface{} { return new([8]encStructFieldKV) }
+	encStructPool[1].New = func() interface{} { return new([16]encStructFieldKV) }
+	encStructPool[2].New = func() interface{} { return new([32]encStructFieldKV) }
+	encStructPool[3].New = func() interface{} { return new([64]encStructFieldKV) }
 }
 }
 
 
 // ----------------------------------------
 // ----------------------------------------

+ 1213 - 0
codec/fast-path.generated.go

@@ -1544,6 +1544,1219 @@ func fastpathEncodeTypeSwitch(iv interface{}, e *Encoder) bool {
 	return true
 	return true
 }
 }
 
 
+func fastpathEncodeTypeSwitchSlice(iv interface{}, e *Encoder) bool {
+	switch v := iv.(type) {
+
+	case []interface{}:
+		fastpathTV.EncSliceIntfV(v, fastpathCheckNilTrue, e)
+	case *[]interface{}:
+		fastpathTV.EncSliceIntfV(*v, fastpathCheckNilTrue, e)
+
+	case []string:
+		fastpathTV.EncSliceStringV(v, fastpathCheckNilTrue, e)
+	case *[]string:
+		fastpathTV.EncSliceStringV(*v, fastpathCheckNilTrue, e)
+
+	case []float32:
+		fastpathTV.EncSliceFloat32V(v, fastpathCheckNilTrue, e)
+	case *[]float32:
+		fastpathTV.EncSliceFloat32V(*v, fastpathCheckNilTrue, e)
+
+	case []float64:
+		fastpathTV.EncSliceFloat64V(v, fastpathCheckNilTrue, e)
+	case *[]float64:
+		fastpathTV.EncSliceFloat64V(*v, fastpathCheckNilTrue, e)
+
+	case []uint:
+		fastpathTV.EncSliceUintV(v, fastpathCheckNilTrue, e)
+	case *[]uint:
+		fastpathTV.EncSliceUintV(*v, fastpathCheckNilTrue, e)
+
+	case []uint16:
+		fastpathTV.EncSliceUint16V(v, fastpathCheckNilTrue, e)
+	case *[]uint16:
+		fastpathTV.EncSliceUint16V(*v, fastpathCheckNilTrue, e)
+
+	case []uint32:
+		fastpathTV.EncSliceUint32V(v, fastpathCheckNilTrue, e)
+	case *[]uint32:
+		fastpathTV.EncSliceUint32V(*v, fastpathCheckNilTrue, e)
+
+	case []uint64:
+		fastpathTV.EncSliceUint64V(v, fastpathCheckNilTrue, e)
+	case *[]uint64:
+		fastpathTV.EncSliceUint64V(*v, fastpathCheckNilTrue, e)
+
+	case []int:
+		fastpathTV.EncSliceIntV(v, fastpathCheckNilTrue, e)
+	case *[]int:
+		fastpathTV.EncSliceIntV(*v, fastpathCheckNilTrue, e)
+
+	case []int8:
+		fastpathTV.EncSliceInt8V(v, fastpathCheckNilTrue, e)
+	case *[]int8:
+		fastpathTV.EncSliceInt8V(*v, fastpathCheckNilTrue, e)
+
+	case []int16:
+		fastpathTV.EncSliceInt16V(v, fastpathCheckNilTrue, e)
+	case *[]int16:
+		fastpathTV.EncSliceInt16V(*v, fastpathCheckNilTrue, e)
+
+	case []int32:
+		fastpathTV.EncSliceInt32V(v, fastpathCheckNilTrue, e)
+	case *[]int32:
+		fastpathTV.EncSliceInt32V(*v, fastpathCheckNilTrue, e)
+
+	case []int64:
+		fastpathTV.EncSliceInt64V(v, fastpathCheckNilTrue, e)
+	case *[]int64:
+		fastpathTV.EncSliceInt64V(*v, fastpathCheckNilTrue, e)
+
+	case []bool:
+		fastpathTV.EncSliceBoolV(v, fastpathCheckNilTrue, e)
+	case *[]bool:
+		fastpathTV.EncSliceBoolV(*v, fastpathCheckNilTrue, e)
+
+	default:
+		return false
+	}
+	return true
+}
+
+func fastpathEncodeTypeSwitchMap(iv interface{}, e *Encoder) bool {
+	switch v := iv.(type) {
+
+	case map[interface{}]interface{}:
+		fastpathTV.EncMapIntfIntfV(v, fastpathCheckNilTrue, e)
+	case *map[interface{}]interface{}:
+		fastpathTV.EncMapIntfIntfV(*v, fastpathCheckNilTrue, e)
+
+	case map[interface{}]string:
+		fastpathTV.EncMapIntfStringV(v, fastpathCheckNilTrue, e)
+	case *map[interface{}]string:
+		fastpathTV.EncMapIntfStringV(*v, fastpathCheckNilTrue, e)
+
+	case map[interface{}]uint:
+		fastpathTV.EncMapIntfUintV(v, fastpathCheckNilTrue, e)
+	case *map[interface{}]uint:
+		fastpathTV.EncMapIntfUintV(*v, fastpathCheckNilTrue, e)
+
+	case map[interface{}]uint8:
+		fastpathTV.EncMapIntfUint8V(v, fastpathCheckNilTrue, e)
+	case *map[interface{}]uint8:
+		fastpathTV.EncMapIntfUint8V(*v, fastpathCheckNilTrue, e)
+
+	case map[interface{}]uint16:
+		fastpathTV.EncMapIntfUint16V(v, fastpathCheckNilTrue, e)
+	case *map[interface{}]uint16:
+		fastpathTV.EncMapIntfUint16V(*v, fastpathCheckNilTrue, e)
+
+	case map[interface{}]uint32:
+		fastpathTV.EncMapIntfUint32V(v, fastpathCheckNilTrue, e)
+	case *map[interface{}]uint32:
+		fastpathTV.EncMapIntfUint32V(*v, fastpathCheckNilTrue, e)
+
+	case map[interface{}]uint64:
+		fastpathTV.EncMapIntfUint64V(v, fastpathCheckNilTrue, e)
+	case *map[interface{}]uint64:
+		fastpathTV.EncMapIntfUint64V(*v, fastpathCheckNilTrue, e)
+
+	case map[interface{}]int:
+		fastpathTV.EncMapIntfIntV(v, fastpathCheckNilTrue, e)
+	case *map[interface{}]int:
+		fastpathTV.EncMapIntfIntV(*v, fastpathCheckNilTrue, e)
+
+	case map[interface{}]int8:
+		fastpathTV.EncMapIntfInt8V(v, fastpathCheckNilTrue, e)
+	case *map[interface{}]int8:
+		fastpathTV.EncMapIntfInt8V(*v, fastpathCheckNilTrue, e)
+
+	case map[interface{}]int16:
+		fastpathTV.EncMapIntfInt16V(v, fastpathCheckNilTrue, e)
+	case *map[interface{}]int16:
+		fastpathTV.EncMapIntfInt16V(*v, fastpathCheckNilTrue, e)
+
+	case map[interface{}]int32:
+		fastpathTV.EncMapIntfInt32V(v, fastpathCheckNilTrue, e)
+	case *map[interface{}]int32:
+		fastpathTV.EncMapIntfInt32V(*v, fastpathCheckNilTrue, e)
+
+	case map[interface{}]int64:
+		fastpathTV.EncMapIntfInt64V(v, fastpathCheckNilTrue, e)
+	case *map[interface{}]int64:
+		fastpathTV.EncMapIntfInt64V(*v, fastpathCheckNilTrue, e)
+
+	case map[interface{}]float32:
+		fastpathTV.EncMapIntfFloat32V(v, fastpathCheckNilTrue, e)
+	case *map[interface{}]float32:
+		fastpathTV.EncMapIntfFloat32V(*v, fastpathCheckNilTrue, e)
+
+	case map[interface{}]float64:
+		fastpathTV.EncMapIntfFloat64V(v, fastpathCheckNilTrue, e)
+	case *map[interface{}]float64:
+		fastpathTV.EncMapIntfFloat64V(*v, fastpathCheckNilTrue, e)
+
+	case map[interface{}]bool:
+		fastpathTV.EncMapIntfBoolV(v, fastpathCheckNilTrue, e)
+	case *map[interface{}]bool:
+		fastpathTV.EncMapIntfBoolV(*v, fastpathCheckNilTrue, e)
+
+	case map[string]interface{}:
+		fastpathTV.EncMapStringIntfV(v, fastpathCheckNilTrue, e)
+	case *map[string]interface{}:
+		fastpathTV.EncMapStringIntfV(*v, fastpathCheckNilTrue, e)
+
+	case map[string]string:
+		fastpathTV.EncMapStringStringV(v, fastpathCheckNilTrue, e)
+	case *map[string]string:
+		fastpathTV.EncMapStringStringV(*v, fastpathCheckNilTrue, e)
+
+	case map[string]uint:
+		fastpathTV.EncMapStringUintV(v, fastpathCheckNilTrue, e)
+	case *map[string]uint:
+		fastpathTV.EncMapStringUintV(*v, fastpathCheckNilTrue, e)
+
+	case map[string]uint8:
+		fastpathTV.EncMapStringUint8V(v, fastpathCheckNilTrue, e)
+	case *map[string]uint8:
+		fastpathTV.EncMapStringUint8V(*v, fastpathCheckNilTrue, e)
+
+	case map[string]uint16:
+		fastpathTV.EncMapStringUint16V(v, fastpathCheckNilTrue, e)
+	case *map[string]uint16:
+		fastpathTV.EncMapStringUint16V(*v, fastpathCheckNilTrue, e)
+
+	case map[string]uint32:
+		fastpathTV.EncMapStringUint32V(v, fastpathCheckNilTrue, e)
+	case *map[string]uint32:
+		fastpathTV.EncMapStringUint32V(*v, fastpathCheckNilTrue, e)
+
+	case map[string]uint64:
+		fastpathTV.EncMapStringUint64V(v, fastpathCheckNilTrue, e)
+	case *map[string]uint64:
+		fastpathTV.EncMapStringUint64V(*v, fastpathCheckNilTrue, e)
+
+	case map[string]int:
+		fastpathTV.EncMapStringIntV(v, fastpathCheckNilTrue, e)
+	case *map[string]int:
+		fastpathTV.EncMapStringIntV(*v, fastpathCheckNilTrue, e)
+
+	case map[string]int8:
+		fastpathTV.EncMapStringInt8V(v, fastpathCheckNilTrue, e)
+	case *map[string]int8:
+		fastpathTV.EncMapStringInt8V(*v, fastpathCheckNilTrue, e)
+
+	case map[string]int16:
+		fastpathTV.EncMapStringInt16V(v, fastpathCheckNilTrue, e)
+	case *map[string]int16:
+		fastpathTV.EncMapStringInt16V(*v, fastpathCheckNilTrue, e)
+
+	case map[string]int32:
+		fastpathTV.EncMapStringInt32V(v, fastpathCheckNilTrue, e)
+	case *map[string]int32:
+		fastpathTV.EncMapStringInt32V(*v, fastpathCheckNilTrue, e)
+
+	case map[string]int64:
+		fastpathTV.EncMapStringInt64V(v, fastpathCheckNilTrue, e)
+	case *map[string]int64:
+		fastpathTV.EncMapStringInt64V(*v, fastpathCheckNilTrue, e)
+
+	case map[string]float32:
+		fastpathTV.EncMapStringFloat32V(v, fastpathCheckNilTrue, e)
+	case *map[string]float32:
+		fastpathTV.EncMapStringFloat32V(*v, fastpathCheckNilTrue, e)
+
+	case map[string]float64:
+		fastpathTV.EncMapStringFloat64V(v, fastpathCheckNilTrue, e)
+	case *map[string]float64:
+		fastpathTV.EncMapStringFloat64V(*v, fastpathCheckNilTrue, e)
+
+	case map[string]bool:
+		fastpathTV.EncMapStringBoolV(v, fastpathCheckNilTrue, e)
+	case *map[string]bool:
+		fastpathTV.EncMapStringBoolV(*v, fastpathCheckNilTrue, e)
+
+	case map[float32]interface{}:
+		fastpathTV.EncMapFloat32IntfV(v, fastpathCheckNilTrue, e)
+	case *map[float32]interface{}:
+		fastpathTV.EncMapFloat32IntfV(*v, fastpathCheckNilTrue, e)
+
+	case map[float32]string:
+		fastpathTV.EncMapFloat32StringV(v, fastpathCheckNilTrue, e)
+	case *map[float32]string:
+		fastpathTV.EncMapFloat32StringV(*v, fastpathCheckNilTrue, e)
+
+	case map[float32]uint:
+		fastpathTV.EncMapFloat32UintV(v, fastpathCheckNilTrue, e)
+	case *map[float32]uint:
+		fastpathTV.EncMapFloat32UintV(*v, fastpathCheckNilTrue, e)
+
+	case map[float32]uint8:
+		fastpathTV.EncMapFloat32Uint8V(v, fastpathCheckNilTrue, e)
+	case *map[float32]uint8:
+		fastpathTV.EncMapFloat32Uint8V(*v, fastpathCheckNilTrue, e)
+
+	case map[float32]uint16:
+		fastpathTV.EncMapFloat32Uint16V(v, fastpathCheckNilTrue, e)
+	case *map[float32]uint16:
+		fastpathTV.EncMapFloat32Uint16V(*v, fastpathCheckNilTrue, e)
+
+	case map[float32]uint32:
+		fastpathTV.EncMapFloat32Uint32V(v, fastpathCheckNilTrue, e)
+	case *map[float32]uint32:
+		fastpathTV.EncMapFloat32Uint32V(*v, fastpathCheckNilTrue, e)
+
+	case map[float32]uint64:
+		fastpathTV.EncMapFloat32Uint64V(v, fastpathCheckNilTrue, e)
+	case *map[float32]uint64:
+		fastpathTV.EncMapFloat32Uint64V(*v, fastpathCheckNilTrue, e)
+
+	case map[float32]int:
+		fastpathTV.EncMapFloat32IntV(v, fastpathCheckNilTrue, e)
+	case *map[float32]int:
+		fastpathTV.EncMapFloat32IntV(*v, fastpathCheckNilTrue, e)
+
+	case map[float32]int8:
+		fastpathTV.EncMapFloat32Int8V(v, fastpathCheckNilTrue, e)
+	case *map[float32]int8:
+		fastpathTV.EncMapFloat32Int8V(*v, fastpathCheckNilTrue, e)
+
+	case map[float32]int16:
+		fastpathTV.EncMapFloat32Int16V(v, fastpathCheckNilTrue, e)
+	case *map[float32]int16:
+		fastpathTV.EncMapFloat32Int16V(*v, fastpathCheckNilTrue, e)
+
+	case map[float32]int32:
+		fastpathTV.EncMapFloat32Int32V(v, fastpathCheckNilTrue, e)
+	case *map[float32]int32:
+		fastpathTV.EncMapFloat32Int32V(*v, fastpathCheckNilTrue, e)
+
+	case map[float32]int64:
+		fastpathTV.EncMapFloat32Int64V(v, fastpathCheckNilTrue, e)
+	case *map[float32]int64:
+		fastpathTV.EncMapFloat32Int64V(*v, fastpathCheckNilTrue, e)
+
+	case map[float32]float32:
+		fastpathTV.EncMapFloat32Float32V(v, fastpathCheckNilTrue, e)
+	case *map[float32]float32:
+		fastpathTV.EncMapFloat32Float32V(*v, fastpathCheckNilTrue, e)
+
+	case map[float32]float64:
+		fastpathTV.EncMapFloat32Float64V(v, fastpathCheckNilTrue, e)
+	case *map[float32]float64:
+		fastpathTV.EncMapFloat32Float64V(*v, fastpathCheckNilTrue, e)
+
+	case map[float32]bool:
+		fastpathTV.EncMapFloat32BoolV(v, fastpathCheckNilTrue, e)
+	case *map[float32]bool:
+		fastpathTV.EncMapFloat32BoolV(*v, fastpathCheckNilTrue, e)
+
+	case map[float64]interface{}:
+		fastpathTV.EncMapFloat64IntfV(v, fastpathCheckNilTrue, e)
+	case *map[float64]interface{}:
+		fastpathTV.EncMapFloat64IntfV(*v, fastpathCheckNilTrue, e)
+
+	case map[float64]string:
+		fastpathTV.EncMapFloat64StringV(v, fastpathCheckNilTrue, e)
+	case *map[float64]string:
+		fastpathTV.EncMapFloat64StringV(*v, fastpathCheckNilTrue, e)
+
+	case map[float64]uint:
+		fastpathTV.EncMapFloat64UintV(v, fastpathCheckNilTrue, e)
+	case *map[float64]uint:
+		fastpathTV.EncMapFloat64UintV(*v, fastpathCheckNilTrue, e)
+
+	case map[float64]uint8:
+		fastpathTV.EncMapFloat64Uint8V(v, fastpathCheckNilTrue, e)
+	case *map[float64]uint8:
+		fastpathTV.EncMapFloat64Uint8V(*v, fastpathCheckNilTrue, e)
+
+	case map[float64]uint16:
+		fastpathTV.EncMapFloat64Uint16V(v, fastpathCheckNilTrue, e)
+	case *map[float64]uint16:
+		fastpathTV.EncMapFloat64Uint16V(*v, fastpathCheckNilTrue, e)
+
+	case map[float64]uint32:
+		fastpathTV.EncMapFloat64Uint32V(v, fastpathCheckNilTrue, e)
+	case *map[float64]uint32:
+		fastpathTV.EncMapFloat64Uint32V(*v, fastpathCheckNilTrue, e)
+
+	case map[float64]uint64:
+		fastpathTV.EncMapFloat64Uint64V(v, fastpathCheckNilTrue, e)
+	case *map[float64]uint64:
+		fastpathTV.EncMapFloat64Uint64V(*v, fastpathCheckNilTrue, e)
+
+	case map[float64]int:
+		fastpathTV.EncMapFloat64IntV(v, fastpathCheckNilTrue, e)
+	case *map[float64]int:
+		fastpathTV.EncMapFloat64IntV(*v, fastpathCheckNilTrue, e)
+
+	case map[float64]int8:
+		fastpathTV.EncMapFloat64Int8V(v, fastpathCheckNilTrue, e)
+	case *map[float64]int8:
+		fastpathTV.EncMapFloat64Int8V(*v, fastpathCheckNilTrue, e)
+
+	case map[float64]int16:
+		fastpathTV.EncMapFloat64Int16V(v, fastpathCheckNilTrue, e)
+	case *map[float64]int16:
+		fastpathTV.EncMapFloat64Int16V(*v, fastpathCheckNilTrue, e)
+
+	case map[float64]int32:
+		fastpathTV.EncMapFloat64Int32V(v, fastpathCheckNilTrue, e)
+	case *map[float64]int32:
+		fastpathTV.EncMapFloat64Int32V(*v, fastpathCheckNilTrue, e)
+
+	case map[float64]int64:
+		fastpathTV.EncMapFloat64Int64V(v, fastpathCheckNilTrue, e)
+	case *map[float64]int64:
+		fastpathTV.EncMapFloat64Int64V(*v, fastpathCheckNilTrue, e)
+
+	case map[float64]float32:
+		fastpathTV.EncMapFloat64Float32V(v, fastpathCheckNilTrue, e)
+	case *map[float64]float32:
+		fastpathTV.EncMapFloat64Float32V(*v, fastpathCheckNilTrue, e)
+
+	case map[float64]float64:
+		fastpathTV.EncMapFloat64Float64V(v, fastpathCheckNilTrue, e)
+	case *map[float64]float64:
+		fastpathTV.EncMapFloat64Float64V(*v, fastpathCheckNilTrue, e)
+
+	case map[float64]bool:
+		fastpathTV.EncMapFloat64BoolV(v, fastpathCheckNilTrue, e)
+	case *map[float64]bool:
+		fastpathTV.EncMapFloat64BoolV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint]interface{}:
+		fastpathTV.EncMapUintIntfV(v, fastpathCheckNilTrue, e)
+	case *map[uint]interface{}:
+		fastpathTV.EncMapUintIntfV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint]string:
+		fastpathTV.EncMapUintStringV(v, fastpathCheckNilTrue, e)
+	case *map[uint]string:
+		fastpathTV.EncMapUintStringV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint]uint:
+		fastpathTV.EncMapUintUintV(v, fastpathCheckNilTrue, e)
+	case *map[uint]uint:
+		fastpathTV.EncMapUintUintV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint]uint8:
+		fastpathTV.EncMapUintUint8V(v, fastpathCheckNilTrue, e)
+	case *map[uint]uint8:
+		fastpathTV.EncMapUintUint8V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint]uint16:
+		fastpathTV.EncMapUintUint16V(v, fastpathCheckNilTrue, e)
+	case *map[uint]uint16:
+		fastpathTV.EncMapUintUint16V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint]uint32:
+		fastpathTV.EncMapUintUint32V(v, fastpathCheckNilTrue, e)
+	case *map[uint]uint32:
+		fastpathTV.EncMapUintUint32V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint]uint64:
+		fastpathTV.EncMapUintUint64V(v, fastpathCheckNilTrue, e)
+	case *map[uint]uint64:
+		fastpathTV.EncMapUintUint64V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint]int:
+		fastpathTV.EncMapUintIntV(v, fastpathCheckNilTrue, e)
+	case *map[uint]int:
+		fastpathTV.EncMapUintIntV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint]int8:
+		fastpathTV.EncMapUintInt8V(v, fastpathCheckNilTrue, e)
+	case *map[uint]int8:
+		fastpathTV.EncMapUintInt8V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint]int16:
+		fastpathTV.EncMapUintInt16V(v, fastpathCheckNilTrue, e)
+	case *map[uint]int16:
+		fastpathTV.EncMapUintInt16V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint]int32:
+		fastpathTV.EncMapUintInt32V(v, fastpathCheckNilTrue, e)
+	case *map[uint]int32:
+		fastpathTV.EncMapUintInt32V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint]int64:
+		fastpathTV.EncMapUintInt64V(v, fastpathCheckNilTrue, e)
+	case *map[uint]int64:
+		fastpathTV.EncMapUintInt64V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint]float32:
+		fastpathTV.EncMapUintFloat32V(v, fastpathCheckNilTrue, e)
+	case *map[uint]float32:
+		fastpathTV.EncMapUintFloat32V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint]float64:
+		fastpathTV.EncMapUintFloat64V(v, fastpathCheckNilTrue, e)
+	case *map[uint]float64:
+		fastpathTV.EncMapUintFloat64V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint]bool:
+		fastpathTV.EncMapUintBoolV(v, fastpathCheckNilTrue, e)
+	case *map[uint]bool:
+		fastpathTV.EncMapUintBoolV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint8]interface{}:
+		fastpathTV.EncMapUint8IntfV(v, fastpathCheckNilTrue, e)
+	case *map[uint8]interface{}:
+		fastpathTV.EncMapUint8IntfV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint8]string:
+		fastpathTV.EncMapUint8StringV(v, fastpathCheckNilTrue, e)
+	case *map[uint8]string:
+		fastpathTV.EncMapUint8StringV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint8]uint:
+		fastpathTV.EncMapUint8UintV(v, fastpathCheckNilTrue, e)
+	case *map[uint8]uint:
+		fastpathTV.EncMapUint8UintV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint8]uint8:
+		fastpathTV.EncMapUint8Uint8V(v, fastpathCheckNilTrue, e)
+	case *map[uint8]uint8:
+		fastpathTV.EncMapUint8Uint8V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint8]uint16:
+		fastpathTV.EncMapUint8Uint16V(v, fastpathCheckNilTrue, e)
+	case *map[uint8]uint16:
+		fastpathTV.EncMapUint8Uint16V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint8]uint32:
+		fastpathTV.EncMapUint8Uint32V(v, fastpathCheckNilTrue, e)
+	case *map[uint8]uint32:
+		fastpathTV.EncMapUint8Uint32V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint8]uint64:
+		fastpathTV.EncMapUint8Uint64V(v, fastpathCheckNilTrue, e)
+	case *map[uint8]uint64:
+		fastpathTV.EncMapUint8Uint64V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint8]int:
+		fastpathTV.EncMapUint8IntV(v, fastpathCheckNilTrue, e)
+	case *map[uint8]int:
+		fastpathTV.EncMapUint8IntV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint8]int8:
+		fastpathTV.EncMapUint8Int8V(v, fastpathCheckNilTrue, e)
+	case *map[uint8]int8:
+		fastpathTV.EncMapUint8Int8V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint8]int16:
+		fastpathTV.EncMapUint8Int16V(v, fastpathCheckNilTrue, e)
+	case *map[uint8]int16:
+		fastpathTV.EncMapUint8Int16V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint8]int32:
+		fastpathTV.EncMapUint8Int32V(v, fastpathCheckNilTrue, e)
+	case *map[uint8]int32:
+		fastpathTV.EncMapUint8Int32V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint8]int64:
+		fastpathTV.EncMapUint8Int64V(v, fastpathCheckNilTrue, e)
+	case *map[uint8]int64:
+		fastpathTV.EncMapUint8Int64V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint8]float32:
+		fastpathTV.EncMapUint8Float32V(v, fastpathCheckNilTrue, e)
+	case *map[uint8]float32:
+		fastpathTV.EncMapUint8Float32V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint8]float64:
+		fastpathTV.EncMapUint8Float64V(v, fastpathCheckNilTrue, e)
+	case *map[uint8]float64:
+		fastpathTV.EncMapUint8Float64V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint8]bool:
+		fastpathTV.EncMapUint8BoolV(v, fastpathCheckNilTrue, e)
+	case *map[uint8]bool:
+		fastpathTV.EncMapUint8BoolV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint16]interface{}:
+		fastpathTV.EncMapUint16IntfV(v, fastpathCheckNilTrue, e)
+	case *map[uint16]interface{}:
+		fastpathTV.EncMapUint16IntfV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint16]string:
+		fastpathTV.EncMapUint16StringV(v, fastpathCheckNilTrue, e)
+	case *map[uint16]string:
+		fastpathTV.EncMapUint16StringV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint16]uint:
+		fastpathTV.EncMapUint16UintV(v, fastpathCheckNilTrue, e)
+	case *map[uint16]uint:
+		fastpathTV.EncMapUint16UintV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint16]uint8:
+		fastpathTV.EncMapUint16Uint8V(v, fastpathCheckNilTrue, e)
+	case *map[uint16]uint8:
+		fastpathTV.EncMapUint16Uint8V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint16]uint16:
+		fastpathTV.EncMapUint16Uint16V(v, fastpathCheckNilTrue, e)
+	case *map[uint16]uint16:
+		fastpathTV.EncMapUint16Uint16V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint16]uint32:
+		fastpathTV.EncMapUint16Uint32V(v, fastpathCheckNilTrue, e)
+	case *map[uint16]uint32:
+		fastpathTV.EncMapUint16Uint32V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint16]uint64:
+		fastpathTV.EncMapUint16Uint64V(v, fastpathCheckNilTrue, e)
+	case *map[uint16]uint64:
+		fastpathTV.EncMapUint16Uint64V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint16]int:
+		fastpathTV.EncMapUint16IntV(v, fastpathCheckNilTrue, e)
+	case *map[uint16]int:
+		fastpathTV.EncMapUint16IntV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint16]int8:
+		fastpathTV.EncMapUint16Int8V(v, fastpathCheckNilTrue, e)
+	case *map[uint16]int8:
+		fastpathTV.EncMapUint16Int8V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint16]int16:
+		fastpathTV.EncMapUint16Int16V(v, fastpathCheckNilTrue, e)
+	case *map[uint16]int16:
+		fastpathTV.EncMapUint16Int16V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint16]int32:
+		fastpathTV.EncMapUint16Int32V(v, fastpathCheckNilTrue, e)
+	case *map[uint16]int32:
+		fastpathTV.EncMapUint16Int32V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint16]int64:
+		fastpathTV.EncMapUint16Int64V(v, fastpathCheckNilTrue, e)
+	case *map[uint16]int64:
+		fastpathTV.EncMapUint16Int64V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint16]float32:
+		fastpathTV.EncMapUint16Float32V(v, fastpathCheckNilTrue, e)
+	case *map[uint16]float32:
+		fastpathTV.EncMapUint16Float32V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint16]float64:
+		fastpathTV.EncMapUint16Float64V(v, fastpathCheckNilTrue, e)
+	case *map[uint16]float64:
+		fastpathTV.EncMapUint16Float64V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint16]bool:
+		fastpathTV.EncMapUint16BoolV(v, fastpathCheckNilTrue, e)
+	case *map[uint16]bool:
+		fastpathTV.EncMapUint16BoolV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint32]interface{}:
+		fastpathTV.EncMapUint32IntfV(v, fastpathCheckNilTrue, e)
+	case *map[uint32]interface{}:
+		fastpathTV.EncMapUint32IntfV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint32]string:
+		fastpathTV.EncMapUint32StringV(v, fastpathCheckNilTrue, e)
+	case *map[uint32]string:
+		fastpathTV.EncMapUint32StringV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint32]uint:
+		fastpathTV.EncMapUint32UintV(v, fastpathCheckNilTrue, e)
+	case *map[uint32]uint:
+		fastpathTV.EncMapUint32UintV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint32]uint8:
+		fastpathTV.EncMapUint32Uint8V(v, fastpathCheckNilTrue, e)
+	case *map[uint32]uint8:
+		fastpathTV.EncMapUint32Uint8V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint32]uint16:
+		fastpathTV.EncMapUint32Uint16V(v, fastpathCheckNilTrue, e)
+	case *map[uint32]uint16:
+		fastpathTV.EncMapUint32Uint16V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint32]uint32:
+		fastpathTV.EncMapUint32Uint32V(v, fastpathCheckNilTrue, e)
+	case *map[uint32]uint32:
+		fastpathTV.EncMapUint32Uint32V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint32]uint64:
+		fastpathTV.EncMapUint32Uint64V(v, fastpathCheckNilTrue, e)
+	case *map[uint32]uint64:
+		fastpathTV.EncMapUint32Uint64V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint32]int:
+		fastpathTV.EncMapUint32IntV(v, fastpathCheckNilTrue, e)
+	case *map[uint32]int:
+		fastpathTV.EncMapUint32IntV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint32]int8:
+		fastpathTV.EncMapUint32Int8V(v, fastpathCheckNilTrue, e)
+	case *map[uint32]int8:
+		fastpathTV.EncMapUint32Int8V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint32]int16:
+		fastpathTV.EncMapUint32Int16V(v, fastpathCheckNilTrue, e)
+	case *map[uint32]int16:
+		fastpathTV.EncMapUint32Int16V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint32]int32:
+		fastpathTV.EncMapUint32Int32V(v, fastpathCheckNilTrue, e)
+	case *map[uint32]int32:
+		fastpathTV.EncMapUint32Int32V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint32]int64:
+		fastpathTV.EncMapUint32Int64V(v, fastpathCheckNilTrue, e)
+	case *map[uint32]int64:
+		fastpathTV.EncMapUint32Int64V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint32]float32:
+		fastpathTV.EncMapUint32Float32V(v, fastpathCheckNilTrue, e)
+	case *map[uint32]float32:
+		fastpathTV.EncMapUint32Float32V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint32]float64:
+		fastpathTV.EncMapUint32Float64V(v, fastpathCheckNilTrue, e)
+	case *map[uint32]float64:
+		fastpathTV.EncMapUint32Float64V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint32]bool:
+		fastpathTV.EncMapUint32BoolV(v, fastpathCheckNilTrue, e)
+	case *map[uint32]bool:
+		fastpathTV.EncMapUint32BoolV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint64]interface{}:
+		fastpathTV.EncMapUint64IntfV(v, fastpathCheckNilTrue, e)
+	case *map[uint64]interface{}:
+		fastpathTV.EncMapUint64IntfV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint64]string:
+		fastpathTV.EncMapUint64StringV(v, fastpathCheckNilTrue, e)
+	case *map[uint64]string:
+		fastpathTV.EncMapUint64StringV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint64]uint:
+		fastpathTV.EncMapUint64UintV(v, fastpathCheckNilTrue, e)
+	case *map[uint64]uint:
+		fastpathTV.EncMapUint64UintV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint64]uint8:
+		fastpathTV.EncMapUint64Uint8V(v, fastpathCheckNilTrue, e)
+	case *map[uint64]uint8:
+		fastpathTV.EncMapUint64Uint8V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint64]uint16:
+		fastpathTV.EncMapUint64Uint16V(v, fastpathCheckNilTrue, e)
+	case *map[uint64]uint16:
+		fastpathTV.EncMapUint64Uint16V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint64]uint32:
+		fastpathTV.EncMapUint64Uint32V(v, fastpathCheckNilTrue, e)
+	case *map[uint64]uint32:
+		fastpathTV.EncMapUint64Uint32V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint64]uint64:
+		fastpathTV.EncMapUint64Uint64V(v, fastpathCheckNilTrue, e)
+	case *map[uint64]uint64:
+		fastpathTV.EncMapUint64Uint64V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint64]int:
+		fastpathTV.EncMapUint64IntV(v, fastpathCheckNilTrue, e)
+	case *map[uint64]int:
+		fastpathTV.EncMapUint64IntV(*v, fastpathCheckNilTrue, e)
+
+	case map[uint64]int8:
+		fastpathTV.EncMapUint64Int8V(v, fastpathCheckNilTrue, e)
+	case *map[uint64]int8:
+		fastpathTV.EncMapUint64Int8V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint64]int16:
+		fastpathTV.EncMapUint64Int16V(v, fastpathCheckNilTrue, e)
+	case *map[uint64]int16:
+		fastpathTV.EncMapUint64Int16V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint64]int32:
+		fastpathTV.EncMapUint64Int32V(v, fastpathCheckNilTrue, e)
+	case *map[uint64]int32:
+		fastpathTV.EncMapUint64Int32V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint64]int64:
+		fastpathTV.EncMapUint64Int64V(v, fastpathCheckNilTrue, e)
+	case *map[uint64]int64:
+		fastpathTV.EncMapUint64Int64V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint64]float32:
+		fastpathTV.EncMapUint64Float32V(v, fastpathCheckNilTrue, e)
+	case *map[uint64]float32:
+		fastpathTV.EncMapUint64Float32V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint64]float64:
+		fastpathTV.EncMapUint64Float64V(v, fastpathCheckNilTrue, e)
+	case *map[uint64]float64:
+		fastpathTV.EncMapUint64Float64V(*v, fastpathCheckNilTrue, e)
+
+	case map[uint64]bool:
+		fastpathTV.EncMapUint64BoolV(v, fastpathCheckNilTrue, e)
+	case *map[uint64]bool:
+		fastpathTV.EncMapUint64BoolV(*v, fastpathCheckNilTrue, e)
+
+	case map[int]interface{}:
+		fastpathTV.EncMapIntIntfV(v, fastpathCheckNilTrue, e)
+	case *map[int]interface{}:
+		fastpathTV.EncMapIntIntfV(*v, fastpathCheckNilTrue, e)
+
+	case map[int]string:
+		fastpathTV.EncMapIntStringV(v, fastpathCheckNilTrue, e)
+	case *map[int]string:
+		fastpathTV.EncMapIntStringV(*v, fastpathCheckNilTrue, e)
+
+	case map[int]uint:
+		fastpathTV.EncMapIntUintV(v, fastpathCheckNilTrue, e)
+	case *map[int]uint:
+		fastpathTV.EncMapIntUintV(*v, fastpathCheckNilTrue, e)
+
+	case map[int]uint8:
+		fastpathTV.EncMapIntUint8V(v, fastpathCheckNilTrue, e)
+	case *map[int]uint8:
+		fastpathTV.EncMapIntUint8V(*v, fastpathCheckNilTrue, e)
+
+	case map[int]uint16:
+		fastpathTV.EncMapIntUint16V(v, fastpathCheckNilTrue, e)
+	case *map[int]uint16:
+		fastpathTV.EncMapIntUint16V(*v, fastpathCheckNilTrue, e)
+
+	case map[int]uint32:
+		fastpathTV.EncMapIntUint32V(v, fastpathCheckNilTrue, e)
+	case *map[int]uint32:
+		fastpathTV.EncMapIntUint32V(*v, fastpathCheckNilTrue, e)
+
+	case map[int]uint64:
+		fastpathTV.EncMapIntUint64V(v, fastpathCheckNilTrue, e)
+	case *map[int]uint64:
+		fastpathTV.EncMapIntUint64V(*v, fastpathCheckNilTrue, e)
+
+	case map[int]int:
+		fastpathTV.EncMapIntIntV(v, fastpathCheckNilTrue, e)
+	case *map[int]int:
+		fastpathTV.EncMapIntIntV(*v, fastpathCheckNilTrue, e)
+
+	case map[int]int8:
+		fastpathTV.EncMapIntInt8V(v, fastpathCheckNilTrue, e)
+	case *map[int]int8:
+		fastpathTV.EncMapIntInt8V(*v, fastpathCheckNilTrue, e)
+
+	case map[int]int16:
+		fastpathTV.EncMapIntInt16V(v, fastpathCheckNilTrue, e)
+	case *map[int]int16:
+		fastpathTV.EncMapIntInt16V(*v, fastpathCheckNilTrue, e)
+
+	case map[int]int32:
+		fastpathTV.EncMapIntInt32V(v, fastpathCheckNilTrue, e)
+	case *map[int]int32:
+		fastpathTV.EncMapIntInt32V(*v, fastpathCheckNilTrue, e)
+
+	case map[int]int64:
+		fastpathTV.EncMapIntInt64V(v, fastpathCheckNilTrue, e)
+	case *map[int]int64:
+		fastpathTV.EncMapIntInt64V(*v, fastpathCheckNilTrue, e)
+
+	case map[int]float32:
+		fastpathTV.EncMapIntFloat32V(v, fastpathCheckNilTrue, e)
+	case *map[int]float32:
+		fastpathTV.EncMapIntFloat32V(*v, fastpathCheckNilTrue, e)
+
+	case map[int]float64:
+		fastpathTV.EncMapIntFloat64V(v, fastpathCheckNilTrue, e)
+	case *map[int]float64:
+		fastpathTV.EncMapIntFloat64V(*v, fastpathCheckNilTrue, e)
+
+	case map[int]bool:
+		fastpathTV.EncMapIntBoolV(v, fastpathCheckNilTrue, e)
+	case *map[int]bool:
+		fastpathTV.EncMapIntBoolV(*v, fastpathCheckNilTrue, e)
+
+	case map[int8]interface{}:
+		fastpathTV.EncMapInt8IntfV(v, fastpathCheckNilTrue, e)
+	case *map[int8]interface{}:
+		fastpathTV.EncMapInt8IntfV(*v, fastpathCheckNilTrue, e)
+
+	case map[int8]string:
+		fastpathTV.EncMapInt8StringV(v, fastpathCheckNilTrue, e)
+	case *map[int8]string:
+		fastpathTV.EncMapInt8StringV(*v, fastpathCheckNilTrue, e)
+
+	case map[int8]uint:
+		fastpathTV.EncMapInt8UintV(v, fastpathCheckNilTrue, e)
+	case *map[int8]uint:
+		fastpathTV.EncMapInt8UintV(*v, fastpathCheckNilTrue, e)
+
+	case map[int8]uint8:
+		fastpathTV.EncMapInt8Uint8V(v, fastpathCheckNilTrue, e)
+	case *map[int8]uint8:
+		fastpathTV.EncMapInt8Uint8V(*v, fastpathCheckNilTrue, e)
+
+	case map[int8]uint16:
+		fastpathTV.EncMapInt8Uint16V(v, fastpathCheckNilTrue, e)
+	case *map[int8]uint16:
+		fastpathTV.EncMapInt8Uint16V(*v, fastpathCheckNilTrue, e)
+
+	case map[int8]uint32:
+		fastpathTV.EncMapInt8Uint32V(v, fastpathCheckNilTrue, e)
+	case *map[int8]uint32:
+		fastpathTV.EncMapInt8Uint32V(*v, fastpathCheckNilTrue, e)
+
+	case map[int8]uint64:
+		fastpathTV.EncMapInt8Uint64V(v, fastpathCheckNilTrue, e)
+	case *map[int8]uint64:
+		fastpathTV.EncMapInt8Uint64V(*v, fastpathCheckNilTrue, e)
+
+	case map[int8]int:
+		fastpathTV.EncMapInt8IntV(v, fastpathCheckNilTrue, e)
+	case *map[int8]int:
+		fastpathTV.EncMapInt8IntV(*v, fastpathCheckNilTrue, e)
+
+	case map[int8]int8:
+		fastpathTV.EncMapInt8Int8V(v, fastpathCheckNilTrue, e)
+	case *map[int8]int8:
+		fastpathTV.EncMapInt8Int8V(*v, fastpathCheckNilTrue, e)
+
+	case map[int8]int16:
+		fastpathTV.EncMapInt8Int16V(v, fastpathCheckNilTrue, e)
+	case *map[int8]int16:
+		fastpathTV.EncMapInt8Int16V(*v, fastpathCheckNilTrue, e)
+
+	case map[int8]int32:
+		fastpathTV.EncMapInt8Int32V(v, fastpathCheckNilTrue, e)
+	case *map[int8]int32:
+		fastpathTV.EncMapInt8Int32V(*v, fastpathCheckNilTrue, e)
+
+	case map[int8]int64:
+		fastpathTV.EncMapInt8Int64V(v, fastpathCheckNilTrue, e)
+	case *map[int8]int64:
+		fastpathTV.EncMapInt8Int64V(*v, fastpathCheckNilTrue, e)
+
+	case map[int8]float32:
+		fastpathTV.EncMapInt8Float32V(v, fastpathCheckNilTrue, e)
+	case *map[int8]float32:
+		fastpathTV.EncMapInt8Float32V(*v, fastpathCheckNilTrue, e)
+
+	case map[int8]float64:
+		fastpathTV.EncMapInt8Float64V(v, fastpathCheckNilTrue, e)
+	case *map[int8]float64:
+		fastpathTV.EncMapInt8Float64V(*v, fastpathCheckNilTrue, e)
+
+	case map[int8]bool:
+		fastpathTV.EncMapInt8BoolV(v, fastpathCheckNilTrue, e)
+	case *map[int8]bool:
+		fastpathTV.EncMapInt8BoolV(*v, fastpathCheckNilTrue, e)
+
+	case map[int16]interface{}:
+		fastpathTV.EncMapInt16IntfV(v, fastpathCheckNilTrue, e)
+	case *map[int16]interface{}:
+		fastpathTV.EncMapInt16IntfV(*v, fastpathCheckNilTrue, e)
+
+	case map[int16]string:
+		fastpathTV.EncMapInt16StringV(v, fastpathCheckNilTrue, e)
+	case *map[int16]string:
+		fastpathTV.EncMapInt16StringV(*v, fastpathCheckNilTrue, e)
+
+	case map[int16]uint:
+		fastpathTV.EncMapInt16UintV(v, fastpathCheckNilTrue, e)
+	case *map[int16]uint:
+		fastpathTV.EncMapInt16UintV(*v, fastpathCheckNilTrue, e)
+
+	case map[int16]uint8:
+		fastpathTV.EncMapInt16Uint8V(v, fastpathCheckNilTrue, e)
+	case *map[int16]uint8:
+		fastpathTV.EncMapInt16Uint8V(*v, fastpathCheckNilTrue, e)
+
+	case map[int16]uint16:
+		fastpathTV.EncMapInt16Uint16V(v, fastpathCheckNilTrue, e)
+	case *map[int16]uint16:
+		fastpathTV.EncMapInt16Uint16V(*v, fastpathCheckNilTrue, e)
+
+	case map[int16]uint32:
+		fastpathTV.EncMapInt16Uint32V(v, fastpathCheckNilTrue, e)
+	case *map[int16]uint32:
+		fastpathTV.EncMapInt16Uint32V(*v, fastpathCheckNilTrue, e)
+
+	case map[int16]uint64:
+		fastpathTV.EncMapInt16Uint64V(v, fastpathCheckNilTrue, e)
+	case *map[int16]uint64:
+		fastpathTV.EncMapInt16Uint64V(*v, fastpathCheckNilTrue, e)
+
+	case map[int16]int:
+		fastpathTV.EncMapInt16IntV(v, fastpathCheckNilTrue, e)
+	case *map[int16]int:
+		fastpathTV.EncMapInt16IntV(*v, fastpathCheckNilTrue, e)
+
+	case map[int16]int8:
+		fastpathTV.EncMapInt16Int8V(v, fastpathCheckNilTrue, e)
+	case *map[int16]int8:
+		fastpathTV.EncMapInt16Int8V(*v, fastpathCheckNilTrue, e)
+
+	case map[int16]int16:
+		fastpathTV.EncMapInt16Int16V(v, fastpathCheckNilTrue, e)
+	case *map[int16]int16:
+		fastpathTV.EncMapInt16Int16V(*v, fastpathCheckNilTrue, e)
+
+	case map[int16]int32:
+		fastpathTV.EncMapInt16Int32V(v, fastpathCheckNilTrue, e)
+	case *map[int16]int32:
+		fastpathTV.EncMapInt16Int32V(*v, fastpathCheckNilTrue, e)
+
+	case map[int16]int64:
+		fastpathTV.EncMapInt16Int64V(v, fastpathCheckNilTrue, e)
+	case *map[int16]int64:
+		fastpathTV.EncMapInt16Int64V(*v, fastpathCheckNilTrue, e)
+
+	case map[int16]float32:
+		fastpathTV.EncMapInt16Float32V(v, fastpathCheckNilTrue, e)
+	case *map[int16]float32:
+		fastpathTV.EncMapInt16Float32V(*v, fastpathCheckNilTrue, e)
+
+	case map[int16]float64:
+		fastpathTV.EncMapInt16Float64V(v, fastpathCheckNilTrue, e)
+	case *map[int16]float64:
+		fastpathTV.EncMapInt16Float64V(*v, fastpathCheckNilTrue, e)
+
+	case map[int16]bool:
+		fastpathTV.EncMapInt16BoolV(v, fastpathCheckNilTrue, e)
+	case *map[int16]bool:
+		fastpathTV.EncMapInt16BoolV(*v, fastpathCheckNilTrue, e)
+
+	case map[int32]interface{}:
+		fastpathTV.EncMapInt32IntfV(v, fastpathCheckNilTrue, e)
+	case *map[int32]interface{}:
+		fastpathTV.EncMapInt32IntfV(*v, fastpathCheckNilTrue, e)
+
+	case map[int32]string:
+		fastpathTV.EncMapInt32StringV(v, fastpathCheckNilTrue, e)
+	case *map[int32]string:
+		fastpathTV.EncMapInt32StringV(*v, fastpathCheckNilTrue, e)
+
+	case map[int32]uint:
+		fastpathTV.EncMapInt32UintV(v, fastpathCheckNilTrue, e)
+	case *map[int32]uint:
+		fastpathTV.EncMapInt32UintV(*v, fastpathCheckNilTrue, e)
+
+	case map[int32]uint8:
+		fastpathTV.EncMapInt32Uint8V(v, fastpathCheckNilTrue, e)
+	case *map[int32]uint8:
+		fastpathTV.EncMapInt32Uint8V(*v, fastpathCheckNilTrue, e)
+
+	case map[int32]uint16:
+		fastpathTV.EncMapInt32Uint16V(v, fastpathCheckNilTrue, e)
+	case *map[int32]uint16:
+		fastpathTV.EncMapInt32Uint16V(*v, fastpathCheckNilTrue, e)
+
+	case map[int32]uint32:
+		fastpathTV.EncMapInt32Uint32V(v, fastpathCheckNilTrue, e)
+	case *map[int32]uint32:
+		fastpathTV.EncMapInt32Uint32V(*v, fastpathCheckNilTrue, e)
+
+	case map[int32]uint64:
+		fastpathTV.EncMapInt32Uint64V(v, fastpathCheckNilTrue, e)
+	case *map[int32]uint64:
+		fastpathTV.EncMapInt32Uint64V(*v, fastpathCheckNilTrue, e)
+
+	case map[int32]int:
+		fastpathTV.EncMapInt32IntV(v, fastpathCheckNilTrue, e)
+	case *map[int32]int:
+		fastpathTV.EncMapInt32IntV(*v, fastpathCheckNilTrue, e)
+
+	case map[int32]int8:
+		fastpathTV.EncMapInt32Int8V(v, fastpathCheckNilTrue, e)
+	case *map[int32]int8:
+		fastpathTV.EncMapInt32Int8V(*v, fastpathCheckNilTrue, e)
+
+	case map[int32]int16:
+		fastpathTV.EncMapInt32Int16V(v, fastpathCheckNilTrue, e)
+	case *map[int32]int16:
+		fastpathTV.EncMapInt32Int16V(*v, fastpathCheckNilTrue, e)
+
+	case map[int32]int32:
+		fastpathTV.EncMapInt32Int32V(v, fastpathCheckNilTrue, e)
+	case *map[int32]int32:
+		fastpathTV.EncMapInt32Int32V(*v, fastpathCheckNilTrue, e)
+
+	case map[int32]int64:
+		fastpathTV.EncMapInt32Int64V(v, fastpathCheckNilTrue, e)
+	case *map[int32]int64:
+		fastpathTV.EncMapInt32Int64V(*v, fastpathCheckNilTrue, e)
+
+	case map[int32]float32:
+		fastpathTV.EncMapInt32Float32V(v, fastpathCheckNilTrue, e)
+	case *map[int32]float32:
+		fastpathTV.EncMapInt32Float32V(*v, fastpathCheckNilTrue, e)
+
+	case map[int32]float64:
+		fastpathTV.EncMapInt32Float64V(v, fastpathCheckNilTrue, e)
+	case *map[int32]float64:
+		fastpathTV.EncMapInt32Float64V(*v, fastpathCheckNilTrue, e)
+
+	case map[int32]bool:
+		fastpathTV.EncMapInt32BoolV(v, fastpathCheckNilTrue, e)
+	case *map[int32]bool:
+		fastpathTV.EncMapInt32BoolV(*v, fastpathCheckNilTrue, e)
+
+	case map[int64]interface{}:
+		fastpathTV.EncMapInt64IntfV(v, fastpathCheckNilTrue, e)
+	case *map[int64]interface{}:
+		fastpathTV.EncMapInt64IntfV(*v, fastpathCheckNilTrue, e)
+
+	case map[int64]string:
+		fastpathTV.EncMapInt64StringV(v, fastpathCheckNilTrue, e)
+	case *map[int64]string:
+		fastpathTV.EncMapInt64StringV(*v, fastpathCheckNilTrue, e)
+
+	case map[int64]uint:
+		fastpathTV.EncMapInt64UintV(v, fastpathCheckNilTrue, e)
+	case *map[int64]uint:
+		fastpathTV.EncMapInt64UintV(*v, fastpathCheckNilTrue, e)
+
+	case map[int64]uint8:
+		fastpathTV.EncMapInt64Uint8V(v, fastpathCheckNilTrue, e)
+	case *map[int64]uint8:
+		fastpathTV.EncMapInt64Uint8V(*v, fastpathCheckNilTrue, e)
+
+	case map[int64]uint16:
+		fastpathTV.EncMapInt64Uint16V(v, fastpathCheckNilTrue, e)
+	case *map[int64]uint16:
+		fastpathTV.EncMapInt64Uint16V(*v, fastpathCheckNilTrue, e)
+
+	case map[int64]uint32:
+		fastpathTV.EncMapInt64Uint32V(v, fastpathCheckNilTrue, e)
+	case *map[int64]uint32:
+		fastpathTV.EncMapInt64Uint32V(*v, fastpathCheckNilTrue, e)
+
+	case map[int64]uint64:
+		fastpathTV.EncMapInt64Uint64V(v, fastpathCheckNilTrue, e)
+	case *map[int64]uint64:
+		fastpathTV.EncMapInt64Uint64V(*v, fastpathCheckNilTrue, e)
+
+	case map[int64]int:
+		fastpathTV.EncMapInt64IntV(v, fastpathCheckNilTrue, e)
+	case *map[int64]int:
+		fastpathTV.EncMapInt64IntV(*v, fastpathCheckNilTrue, e)
+
+	case map[int64]int8:
+		fastpathTV.EncMapInt64Int8V(v, fastpathCheckNilTrue, e)
+	case *map[int64]int8:
+		fastpathTV.EncMapInt64Int8V(*v, fastpathCheckNilTrue, e)
+
+	case map[int64]int16:
+		fastpathTV.EncMapInt64Int16V(v, fastpathCheckNilTrue, e)
+	case *map[int64]int16:
+		fastpathTV.EncMapInt64Int16V(*v, fastpathCheckNilTrue, e)
+
+	case map[int64]int32:
+		fastpathTV.EncMapInt64Int32V(v, fastpathCheckNilTrue, e)
+	case *map[int64]int32:
+		fastpathTV.EncMapInt64Int32V(*v, fastpathCheckNilTrue, e)
+
+	case map[int64]int64:
+		fastpathTV.EncMapInt64Int64V(v, fastpathCheckNilTrue, e)
+	case *map[int64]int64:
+		fastpathTV.EncMapInt64Int64V(*v, fastpathCheckNilTrue, e)
+
+	case map[int64]float32:
+		fastpathTV.EncMapInt64Float32V(v, fastpathCheckNilTrue, e)
+	case *map[int64]float32:
+		fastpathTV.EncMapInt64Float32V(*v, fastpathCheckNilTrue, e)
+
+	case map[int64]float64:
+		fastpathTV.EncMapInt64Float64V(v, fastpathCheckNilTrue, e)
+	case *map[int64]float64:
+		fastpathTV.EncMapInt64Float64V(*v, fastpathCheckNilTrue, e)
+
+	case map[int64]bool:
+		fastpathTV.EncMapInt64BoolV(v, fastpathCheckNilTrue, e)
+	case *map[int64]bool:
+		fastpathTV.EncMapInt64BoolV(*v, fastpathCheckNilTrue, e)
+
+	case map[bool]interface{}:
+		fastpathTV.EncMapBoolIntfV(v, fastpathCheckNilTrue, e)
+	case *map[bool]interface{}:
+		fastpathTV.EncMapBoolIntfV(*v, fastpathCheckNilTrue, e)
+
+	case map[bool]string:
+		fastpathTV.EncMapBoolStringV(v, fastpathCheckNilTrue, e)
+	case *map[bool]string:
+		fastpathTV.EncMapBoolStringV(*v, fastpathCheckNilTrue, e)
+
+	case map[bool]uint:
+		fastpathTV.EncMapBoolUintV(v, fastpathCheckNilTrue, e)
+	case *map[bool]uint:
+		fastpathTV.EncMapBoolUintV(*v, fastpathCheckNilTrue, e)
+
+	case map[bool]uint8:
+		fastpathTV.EncMapBoolUint8V(v, fastpathCheckNilTrue, e)
+	case *map[bool]uint8:
+		fastpathTV.EncMapBoolUint8V(*v, fastpathCheckNilTrue, e)
+
+	case map[bool]uint16:
+		fastpathTV.EncMapBoolUint16V(v, fastpathCheckNilTrue, e)
+	case *map[bool]uint16:
+		fastpathTV.EncMapBoolUint16V(*v, fastpathCheckNilTrue, e)
+
+	case map[bool]uint32:
+		fastpathTV.EncMapBoolUint32V(v, fastpathCheckNilTrue, e)
+	case *map[bool]uint32:
+		fastpathTV.EncMapBoolUint32V(*v, fastpathCheckNilTrue, e)
+
+	case map[bool]uint64:
+		fastpathTV.EncMapBoolUint64V(v, fastpathCheckNilTrue, e)
+	case *map[bool]uint64:
+		fastpathTV.EncMapBoolUint64V(*v, fastpathCheckNilTrue, e)
+
+	case map[bool]int:
+		fastpathTV.EncMapBoolIntV(v, fastpathCheckNilTrue, e)
+	case *map[bool]int:
+		fastpathTV.EncMapBoolIntV(*v, fastpathCheckNilTrue, e)
+
+	case map[bool]int8:
+		fastpathTV.EncMapBoolInt8V(v, fastpathCheckNilTrue, e)
+	case *map[bool]int8:
+		fastpathTV.EncMapBoolInt8V(*v, fastpathCheckNilTrue, e)
+
+	case map[bool]int16:
+		fastpathTV.EncMapBoolInt16V(v, fastpathCheckNilTrue, e)
+	case *map[bool]int16:
+		fastpathTV.EncMapBoolInt16V(*v, fastpathCheckNilTrue, e)
+
+	case map[bool]int32:
+		fastpathTV.EncMapBoolInt32V(v, fastpathCheckNilTrue, e)
+	case *map[bool]int32:
+		fastpathTV.EncMapBoolInt32V(*v, fastpathCheckNilTrue, e)
+
+	case map[bool]int64:
+		fastpathTV.EncMapBoolInt64V(v, fastpathCheckNilTrue, e)
+	case *map[bool]int64:
+		fastpathTV.EncMapBoolInt64V(*v, fastpathCheckNilTrue, e)
+
+	case map[bool]float32:
+		fastpathTV.EncMapBoolFloat32V(v, fastpathCheckNilTrue, e)
+	case *map[bool]float32:
+		fastpathTV.EncMapBoolFloat32V(*v, fastpathCheckNilTrue, e)
+
+	case map[bool]float64:
+		fastpathTV.EncMapBoolFloat64V(v, fastpathCheckNilTrue, e)
+	case *map[bool]float64:
+		fastpathTV.EncMapBoolFloat64V(*v, fastpathCheckNilTrue, e)
+
+	case map[bool]bool:
+		fastpathTV.EncMapBoolBoolV(v, fastpathCheckNilTrue, e)
+	case *map[bool]bool:
+		fastpathTV.EncMapBoolBoolV(*v, fastpathCheckNilTrue, e)
+
+	default:
+		return false
+	}
+	return true
+}
+
 // -- -- fast path functions
 // -- -- fast path functions
 
 
 func (f encFnInfo) fastpathEncSliceIntfR(rv reflect.Value) {
 func (f encFnInfo) fastpathEncSliceIntfR(rv reflect.Value) {

+ 28 - 0
codec/fast-path.go.tmpl

@@ -121,6 +121,34 @@ func fastpathEncodeTypeSwitch(iv interface{}, e *Encoder) bool {
 	return true
 	return true
 }
 }
 
 
+func fastpathEncodeTypeSwitchSlice(iv interface{}, e *Encoder) bool {
+	switch v := iv.(type) {
+{{range .Values}}{{if not .Primitive}}{{if .Slice }}
+	case []{{ .Elem }}:
+		fastpathTV.{{ .MethodNamePfx "Enc" false }}V(v, fastpathCheckNilTrue, e)
+	case *[]{{ .Elem }}:
+		fastpathTV.{{ .MethodNamePfx "Enc" false }}V(*v, fastpathCheckNilTrue, e)
+{{end}}{{end}}{{end}}
+	default:
+		return false
+	}
+	return true
+}
+
+func fastpathEncodeTypeSwitchMap(iv interface{}, e *Encoder) bool {
+	switch v := iv.(type) {
+{{range .Values}}{{if not .Primitive}}{{if not .Slice }}
+	case map[{{ .MapKey }}]{{ .Elem }}:
+		fastpathTV.{{ .MethodNamePfx "Enc" false }}V(v, fastpathCheckNilTrue, e)
+	case *map[{{ .MapKey }}]{{ .Elem }}:
+		fastpathTV.{{ .MethodNamePfx "Enc" false }}V(*v, fastpathCheckNilTrue, e)
+{{end}}{{end}}{{end}}
+	default:
+		return false
+	}
+	return true
+}
+
 // -- -- fast path functions
 // -- -- fast path functions
 {{range .Values}}{{if not .Primitive}}{{if .Slice }} 
 {{range .Values}}{{if not .Primitive}}{{if .Slice }} 
 
 

+ 2 - 2
codec/gen-helper.generated.go

@@ -57,7 +57,7 @@ func (f genHelperEncoder) EncBinary() bool {
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 func (f genHelperEncoder) EncFallback(iv interface{}) {
 func (f genHelperEncoder) EncFallback(iv interface{}) {
 	// println(">>>>>>>>> EncFallback")
 	// println(">>>>>>>>> EncFallback")
-	f.e.encodeI(iv, false)
+	f.e.encodeI(iv, false, false)
 }
 }
 
 
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
@@ -83,7 +83,7 @@ func (f genHelperDecoder) DecScratchBuffer() []byte {
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 func (f genHelperDecoder) DecFallback(iv interface{}, chkPtr bool) {
 func (f genHelperDecoder) DecFallback(iv interface{}, chkPtr bool) {
 	// println(">>>>>>>>> DecFallback")
 	// println(">>>>>>>>> DecFallback")
-	f.d.decodeI(iv, chkPtr, false, false)
+	f.d.decodeI(iv, chkPtr, false, false, false)
 }
 }
 
 
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*

+ 2 - 2
codec/gen-helper.go.tmpl

@@ -55,7 +55,7 @@ func (f genHelperEncoder) EncBinary() bool {
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 func (f genHelperEncoder) EncFallback(iv interface{}) {
 func (f genHelperEncoder) EncFallback(iv interface{}) {
 	// println(">>>>>>>>> EncFallback")
 	// println(">>>>>>>>> EncFallback")
-	f.e.encodeI(iv, false)
+	f.e.encodeI(iv, false, false)
 }
 }
 
 
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
@@ -77,7 +77,7 @@ func (f genHelperDecoder) DecScratchBuffer() []byte {
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 func (f genHelperDecoder) DecFallback(iv interface{}, chkPtr bool) {
 func (f genHelperDecoder) DecFallback(iv interface{}, chkPtr bool) {
 	// println(">>>>>>>>> DecFallback")
 	// println(">>>>>>>>> DecFallback")
-	f.d.decodeI(iv, chkPtr, false, false)
+	f.d.decodeI(iv, chkPtr, false, false, false)
 }
 }
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
 func (f genHelperDecoder) DecSliceHelperStart() (decSliceHelper, int) {
 func (f genHelperDecoder) DecSliceHelperStart() (decSliceHelper, int) {

+ 20 - 6
codec/prebuild.sh

@@ -49,7 +49,7 @@ _build() {
         # [ -e "safe${_gg}" ] && mv safe${_gg} safe${_gg}__${_zts}.bak
         # [ -e "safe${_gg}" ] && mv safe${_gg} safe${_gg}__${_zts}.bak
         # [ -e "unsafe${_gg}" ] && mv unsafe${_gg} unsafe${_gg}__${_zts}.bak
         # [ -e "unsafe${_gg}" ] && mv unsafe${_gg} unsafe${_gg}__${_zts}.bak
     else 
     else 
-        rm -f fast-path.generated.go gen.generated.go gen-helper.generated.go *safe.generated.go *_generated_test.go
+        rm -f fast-path.generated.go gen.generated.go gen-helper.generated.go *safe.generated.go *_generated_test.go *.generated_ffjson_expose.go
     fi
     fi
 
 
     cat > gen.generated.go <<EOF
     cat > gen.generated.go <<EOF
@@ -85,6 +85,8 @@ import "reflect"
 // func GenBytesToStringRO(b []byte) string { return string(b) }
 // func GenBytesToStringRO(b []byte) string { return string(b) }
 func fastpathDecodeTypeSwitch(iv interface{}, d *Decoder) bool { return false }
 func fastpathDecodeTypeSwitch(iv interface{}, d *Decoder) bool { return false }
 func fastpathEncodeTypeSwitch(iv interface{}, e *Encoder) bool { return false }
 func fastpathEncodeTypeSwitch(iv interface{}, e *Encoder) bool { return false }
+func fastpathEncodeTypeSwitchSlice(iv interface{}, e *Encoder) bool { return false }
+func fastpathEncodeTypeSwitchMap(iv interface{}, e *Encoder) bool { return false }
 type fastpathE struct {
 type fastpathE struct {
 	rtid uintptr
 	rtid uintptr
 	rt reflect.Type 
 	rt reflect.Type 
@@ -113,7 +115,8 @@ defer fin.Close()
 fout, err := os.Create(fnameOut)
 fout, err := os.Create(fnameOut)
 if err != nil { panic(err) }
 if err != nil { panic(err) }
 defer fout.Close()
 defer fout.Close()
-codec.GenInternalGoFile(fin, fout, safe)
+err = codec.GenInternalGoFile(fin, fout, safe)
+if err != nil { panic(err) }
 }
 }
 
 
 func main() {
 func main() {
@@ -130,14 +133,25 @@ EOF
         rm -f gen-from-tmpl.generated.go 
         rm -f gen-from-tmpl.generated.go 
 }
 }
 
 
-_msgp_and_codecgen() {
+_codegenerators() {
     if [[ $zforce == "1" || 
     if [[ $zforce == "1" || 
                 "1" == $( _needgen "values_msgp${zsfx}" ) 
                 "1" == $( _needgen "values_msgp${zsfx}" ) 
                 || "1" == $( _needgen "values_codecgen${zsfx}" ) ]] 
                 || "1" == $( _needgen "values_codecgen${zsfx}" ) ]] 
     then
     then
-        msgp -tests=false -pkg=codec -o=values_msgp${zsfx} -file=$zfin && \
+        true && \
+            echo "msgp ... " && \
+            msgp -tests=false -pkg=codec -o=values_msgp${zsfx} -file=$zfin && \
+            echo "codecgen - !unsafe ... " && \
             codecgen -rt codecgen -t 'x,codecgen,!unsafe' -o values_codecgen${zsfx} $zfin && \
             codecgen -rt codecgen -t 'x,codecgen,!unsafe' -o values_codecgen${zsfx} $zfin && \
-            codecgen -u -rt codecgen -t 'x,codecgen,unsafe' -o values_codecgen_unsafe${zsfx} $zfin 
+            echo "codecgen - unsafe ... " && \
+            codecgen -u -rt codecgen -t 'x,codecgen,unsafe' -o values_codecgen_unsafe${zsfx} $zfin && \
+            echo "ffjson ... " && \
+            ffjson -w values_ffjson${zsfx} $zfin && \
+            # remove (M|Unm)arshalJSON implementations, so they don't conflict with encoding/json bench \
+            sed -i 's+ MarshalJSON(+ _MarshalJSON(+g' values_ffjson${zsfx} && \
+            sed -i 's+ UnmarshalJSON(+ _UnmarshalJSON(+g' values_ffjson${zsfx} && \
+            echo "generators done!" && \
+            true
     fi 
     fi 
 }
 }
 
 
@@ -168,7 +182,7 @@ then
         _init "$@" && \
         _init "$@" && \
         _build && \
         _build && \
         cp $zmydir/values_test.go $zmydir/$zfin && \
         cp $zmydir/values_test.go $zmydir/$zfin && \
-        _msgp_and_codecgen && \
+        _codegenerators && \
         echo prebuild done successfully
         echo prebuild done successfully
     rm -f $zmydir/$zfin
     rm -f $zmydir/$zfin
 else
 else