Browse Source

#53 move current config EnableXXX

Tao Wen 8 years ago
parent
commit
d0418857ce

+ 1 - 3
feature_adapter.go

@@ -172,9 +172,7 @@ func (decoder *AdaptedDecoder) UseNumber() {
 }
 
 func NewEncoder(writer io.Writer) *AdaptedEncoder {
-	newCfg := &Config{}
-	initConfig(newCfg)
-	stream := NewStream(newCfg, writer, 512)
+	stream := NewStream(&Config{}, writer, 512)
 	return &AdaptedEncoder{stream}
 }
 

+ 81 - 7
feature_config.go

@@ -1,27 +1,66 @@
 package jsoniter
 
 import (
+	"io"
 	"reflect"
 	"sync/atomic"
 	"unsafe"
 )
 
 type Config struct {
-	IndentionStep int
-	decoderCache unsafe.Pointer
-	encoderCache unsafe.Pointer
+	IndentionStep                 int
+	MarshalFloatWith6Digits       bool
+	SupportUnexportedStructFields bool
+	decoderCache                  unsafe.Pointer
+	encoderCache                  unsafe.Pointer
+	extensions                    []ExtensionFunc
 }
 
 var DEFAULT_CONFIG = &Config{}
 
 func init() {
-	initConfig(DEFAULT_CONFIG)
+	DEFAULT_CONFIG.init()
 }
 
-func initConfig(cfg *Config) {
-	atomic.StorePointer(&cfg.decoderCache, unsafe.Pointer(&map[string]Decoder{}))
-	atomic.StorePointer(&cfg.encoderCache, unsafe.Pointer(&map[string]Encoder{}))
+func (cfg *Config) init() *Config {
+	if cfg.encoderCache == nil {
+		atomic.StorePointer(&cfg.decoderCache, unsafe.Pointer(&map[string]Decoder{}))
+		atomic.StorePointer(&cfg.encoderCache, unsafe.Pointer(&map[string]Encoder{}))
+		if cfg.MarshalFloatWith6Digits {
+			cfg.marshalFloatWith6Digits()
+		}
+		if cfg.SupportUnexportedStructFields {
+			cfg.supportUnexportedStructFields()
+		}
+	}
+	return cfg
+}
+
+// RegisterExtension can register a custom extension
+func (cfg *Config) RegisterExtension(extension ExtensionFunc) {
+	cfg.extensions = append(cfg.extensions, extension)
+}
+
+func (cfg *Config) supportUnexportedStructFields() {
+	cfg.RegisterExtension(func(type_ reflect.Type, field *reflect.StructField) ([]string, EncoderFunc, DecoderFunc) {
+		return []string{field.Name}, nil, nil
+	})
 }
+
+// EnableLossyFloatMarshalling keeps 10**(-6) precision
+// for float variables for better performance.
+func (cfg *Config) marshalFloatWith6Digits() {
+	// for better performance
+	cfg.addEncoderToCache(reflect.TypeOf((*float32)(nil)).Elem(), &funcEncoder{func(ptr unsafe.Pointer, stream *Stream) {
+		val := *((*float32)(ptr))
+		stream.WriteFloat32Lossy(val)
+	}})
+	cfg.addEncoderToCache(reflect.TypeOf((*float64)(nil)).Elem(), &funcEncoder{func(ptr unsafe.Pointer, stream *Stream) {
+		val := *((*float64)(ptr))
+		stream.WriteFloat64Lossy(val)
+	}})
+}
+
 func (cfg *Config) addDecoderToCache(cacheKey reflect.Type, decoder Decoder) {
 	done := false
 	for !done {
@@ -75,3 +114,38 @@ func (cfg *Config) CleanEncoders() {
 	fieldEncoders = map[string]Encoder{}
 	atomic.StorePointer(&cfg.encoderCache, unsafe.Pointer(&map[string]Encoder{}))
 }
+
+func (cfg *Config) MarshalToString(v interface{}) (string, error) {
+	buf, err := cfg.Marshal(v)
+	if err != nil {
+		return "", err
+	}
+	return string(buf), nil
+}
+
+func (cfg *Config) Marshal(v interface{}) ([]byte, error) {
+	cfg.init()
+	stream := NewStream(cfg, nil, 256)
+	stream.WriteVal(v)
+	if stream.Error != nil {
+		return nil, stream.Error
+	}
+	return stream.Buffer(), nil
+}
+
+func (cfg *Config) UnmarshalFromString(str string, v interface{}) error {
+	data := []byte(str)
+	data = data[:lastNotSpacePos(data)]
+	iter := ParseBytes(cfg, data)
+	iter.ReadVal(v)
+	if iter.head == iter.tail {
+		iter.loadMore()
+	}
+	if iter.Error == io.EOF {
+		return nil
+	}
+	if iter.Error == nil {
+		iter.reportError("UnmarshalFromString", "there are bytes left after unmarshal")
+	}
+	return iter.Error
+}

+ 3 - 0
feature_iter.go

@@ -76,6 +76,7 @@ type Iterator struct {
 
 // Create creates an empty Iterator instance
 func NewIterator(cfg *Config) *Iterator {
+	cfg.init()
 	return &Iterator{
 		cfg:    cfg,
 		reader: nil,
@@ -87,6 +88,7 @@ func NewIterator(cfg *Config) *Iterator {
 
 // Parse parses a json buffer in io.Reader into an Iterator instance
 func Parse(cfg *Config, reader io.Reader, bufSize int) *Iterator {
+	cfg.init()
 	return &Iterator{
 		cfg:    cfg,
 		reader: reader,
@@ -98,6 +100,7 @@ func Parse(cfg *Config, reader io.Reader, bufSize int) *Iterator {
 
 // ParseBytes parses a json byte slice into an Iterator instance
 func ParseBytes(cfg *Config, input []byte) *Iterator {
+	cfg.init()
 	return &Iterator{
 		cfg:    cfg,
 		reader: nil,

+ 4 - 4
feature_iter_float.go

@@ -102,9 +102,9 @@ non_decimal_loop:
 			ind := floatDigits[c]
 			switch ind {
 			case endOfNumber:
-				if decimalPlaces > 0 && decimalPlaces < len(POW10) {
+				if decimalPlaces > 0 && decimalPlaces < len(_POW10) {
 					iter.head = i
-					return float32(float64(value) / float64(POW10[decimalPlaces]))
+					return float32(float64(value) / float64(_POW10[decimalPlaces]))
 				}
 				// too many decimal places
 				return iter.readFloat32SlowPath()
@@ -205,9 +205,9 @@ non_decimal_loop:
 			ind := floatDigits[c]
 			switch ind {
 			case endOfNumber:
-				if decimalPlaces > 0 && decimalPlaces < len(POW10) {
+				if decimalPlaces > 0 && decimalPlaces < len(_POW10) {
 					iter.head = i
-					return float64(value) / float64(POW10[decimalPlaces])
+					return float64(value) / float64(_POW10[decimalPlaces])
 				}
 				// too many decimal places
 				return iter.readFloat64SlowPath()

+ 18 - 6
feature_reflect_object.go

@@ -24,6 +24,15 @@ func encoderOfStruct(cfg *Config, typ reflect.Type) (Encoder, error) {
 				fieldEncoders[fieldEncoderKey] = &funcEncoder{fun}
 			}
 		}
+		for _, extension := range cfg.extensions {
+			alternativeFieldNames, fun, _ := extension(typ, field)
+			if alternativeFieldNames != nil {
+				extensionProvidedFieldNames = alternativeFieldNames
+			}
+			if fun != nil {
+				fieldEncoders[fieldEncoderKey] = &funcEncoder{fun}
+			}
+		}
 		tagParts := strings.Split(field.Tag.Get("json"), ",")
 		// if fieldNames set by extension, use theirs, otherwise try tags
 		fieldNames := calcFieldNames(field.Name, tagParts[0], extensionProvidedFieldNames)
@@ -86,6 +95,15 @@ func decoderOfStruct(cfg *Config, typ reflect.Type) (Decoder, error) {
 				fieldDecoders[fieldDecoderKey] = &funcDecoder{fun}
 			}
 		}
+		for _, extension := range cfg.extensions {
+			alternativeFieldNames, _, fun := extension(typ, &field)
+			if alternativeFieldNames != nil {
+				extensionProviedFieldNames = alternativeFieldNames
+			}
+			if fun != nil {
+				fieldDecoders[fieldDecoderKey] = &funcDecoder{fun}
+			}
+		}
 		decoder := fieldDecoders[fieldDecoderKey]
 		tagParts := strings.Split(field.Tag.Get("json"), ",")
 		fieldNames := calcFieldNames(field.Name, tagParts[0], extensionProviedFieldNames)
@@ -130,12 +148,6 @@ func calcFieldNames(originalFieldName string, tagProvidedFieldName string, exten
 	return fieldNames
 }
 
-func EnableUnexportedStructFieldsSupport() {
-	RegisterExtension(func(type_ reflect.Type, field *reflect.StructField) ([]string, EncoderFunc, DecoderFunc) {
-		return []string{field.Name}, nil, nil
-	})
-}
-
 func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder) (Decoder, error) {
 	knownHash := map[int32]struct{}{
 		0: {},

+ 13 - 12
feature_stream.go

@@ -5,22 +5,23 @@ import (
 )
 
 type Stream struct {
-	cfg           *Config
-	out           io.Writer
-	buf           []byte
-	n             int
-	Error         error
-	indention     int
+	cfg       *Config
+	out       io.Writer
+	buf       []byte
+	n         int
+	Error     error
+	indention int
 }
 
 func NewStream(cfg *Config, out io.Writer, bufSize int) *Stream {
+	cfg.init()
 	return &Stream{
-		cfg:           cfg,
-		out:           out,
-		buf:           make([]byte, bufSize),
-		n:             0,
-		Error:         nil,
-		indention:     0,
+		cfg:       cfg,
+		out:       out,
+		buf:       make([]byte, bufSize),
+		n:         0,
+		Error:     nil,
+		indention: 0,
 	}
 }
 

+ 4 - 19
feature_stream_float.go

@@ -2,13 +2,12 @@ package jsoniter
 
 import (
 	"strconv"
-	"unsafe"
 )
 
-var POW10 []uint64
+var _POW10 []uint64
 
 func init() {
-	POW10 = []uint64{1, 10, 100, 1000, 10000, 100000, 1000000}
+	_POW10 = []uint64{1, 10, 100, 1000, 10000, 100000, 1000000}
 }
 
 func (stream *Stream) WriteFloat32(val float32) {
@@ -34,7 +33,7 @@ func (stream *Stream) WriteFloat32Lossy(val float32) {
 	}
 	stream.writeByte('.')
 	stream.ensure(10)
-	for p := precision - 1; p > 0 && fval < POW10[p]; p-- {
+	for p := precision - 1; p > 0 && fval < _POW10[p]; p-- {
 		stream.writeByte('0')
 	}
 	stream.WriteUint64(fval)
@@ -66,7 +65,7 @@ func (stream *Stream) WriteFloat64Lossy(val float64) {
 	}
 	stream.writeByte('.')
 	stream.ensure(10)
-	for p := precision - 1; p > 0 && fval < POW10[p]; p-- {
+	for p := precision - 1; p > 0 && fval < _POW10[p]; p-- {
 		stream.writeByte('0')
 	}
 	stream.WriteUint64(fval)
@@ -74,17 +73,3 @@ func (stream *Stream) WriteFloat64Lossy(val float64) {
 		stream.n--
 	}
 }
-
-// EnableLossyFloatMarshalling keeps 10**(-6) precision
-// for float variables for better performance.
-func EnableLossyFloatMarshalling() {
-	// for better performance
-	RegisterTypeEncoder("float32", func(ptr unsafe.Pointer, stream *Stream) {
-		val := *((*float32)(ptr))
-		stream.WriteFloat32Lossy(val)
-	})
-	RegisterTypeEncoder("float64", func(ptr unsafe.Pointer, stream *Stream) {
-		val := *((*float64)(ptr))
-		stream.WriteFloat64Lossy(val)
-	})
-}

+ 72 - 72
feature_stream_int.go

@@ -1,15 +1,15 @@
 package jsoniter
 
-var DIGITS []uint32
+var _DIGITS []uint32
 
 func init() {
-	DIGITS = make([]uint32, 1000)
+	_DIGITS = make([]uint32, 1000)
 	for i := uint32(0); i < 1000; i++ {
-		DIGITS[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i%10 + '0'
+		_DIGITS[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i%10 + '0'
 		if i < 10 {
-			DIGITS[i] += 2 << 24
+			_DIGITS[i] += 2 << 24
 		} else if i < 100 {
-			DIGITS[i] += 1 << 24
+			_DIGITS[i] += 1 << 24
 		}
 	}
 }
@@ -38,7 +38,7 @@ func writeBuf(buf []byte, v uint32, n int) {
 
 func (stream *Stream) WriteUint8(val uint8) {
 	stream.ensure(3)
-	stream.n = writeFirstBuf(stream.buf, DIGITS[val], stream.n)
+	stream.n = writeFirstBuf(stream.buf, _DIGITS[val], stream.n)
 }
 
 func (stream *Stream) WriteInt8(nval int8) {
@@ -52,19 +52,19 @@ func (stream *Stream) WriteInt8(nval int8) {
 	} else {
 		val = uint8(nval)
 	}
-	stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
+	stream.n = writeFirstBuf(stream.buf, _DIGITS[val], n)
 }
 
 func (stream *Stream) WriteUint16(val uint16) {
 	stream.ensure(5)
 	q1 := val / 1000
 	if q1 == 0 {
-		stream.n = writeFirstBuf(stream.buf, DIGITS[val], stream.n)
+		stream.n = writeFirstBuf(stream.buf, _DIGITS[val], stream.n)
 		return
 	}
 	r1 := val - q1*1000
-	n := writeFirstBuf(stream.buf, DIGITS[q1], stream.n)
-	writeBuf(stream.buf, DIGITS[r1], n)
+	n := writeFirstBuf(stream.buf, _DIGITS[q1], stream.n)
+	writeBuf(stream.buf, _DIGITS[r1], n)
 	stream.n = n + 3
 	return
 }
@@ -82,12 +82,12 @@ func (stream *Stream) WriteInt16(nval int16) {
 	}
 	q1 := val / 1000
 	if q1 == 0 {
-		stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
+		stream.n = writeFirstBuf(stream.buf, _DIGITS[val], n)
 		return
 	}
 	r1 := val - q1*1000
-	n = writeFirstBuf(stream.buf, DIGITS[q1], n)
-	writeBuf(stream.buf, DIGITS[r1], n)
+	n = writeFirstBuf(stream.buf, _DIGITS[q1], n)
+	writeBuf(stream.buf, _DIGITS[r1], n)
 	stream.n = n + 3
 	return
 }
@@ -97,30 +97,30 @@ func (stream *Stream) WriteUint32(val uint32) {
 	n := stream.n
 	q1 := val / 1000
 	if q1 == 0 {
-		stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
+		stream.n = writeFirstBuf(stream.buf, _DIGITS[val], n)
 		return
 	}
 	r1 := val - q1*1000
 	q2 := q1 / 1000
 	if q2 == 0 {
-		n := writeFirstBuf(stream.buf, DIGITS[q1], n)
-		writeBuf(stream.buf, DIGITS[r1], n)
+		n := writeFirstBuf(stream.buf, _DIGITS[q1], n)
+		writeBuf(stream.buf, _DIGITS[r1], n)
 		stream.n = n + 3
 		return
 	}
 	r2 := q1 - q2*1000
 	q3 := q2 / 1000
 	if q3 == 0 {
-		n = writeFirstBuf(stream.buf, DIGITS[q2], n)
+		n = writeFirstBuf(stream.buf, _DIGITS[q2], n)
 	} else {
 		r3 := q2 - q3*1000
 		stream.buf[n] = byte(q3 + '0')
 		n++
-		writeBuf(stream.buf, DIGITS[r3], n)
+		writeBuf(stream.buf, _DIGITS[r3], n)
 		n += 3
 	}
-	writeBuf(stream.buf, DIGITS[r2], n)
-	writeBuf(stream.buf, DIGITS[r1], n+3)
+	writeBuf(stream.buf, _DIGITS[r2], n)
+	writeBuf(stream.buf, _DIGITS[r1], n+3)
 	stream.n = n + 6
 }
 
@@ -137,30 +137,30 @@ func (stream *Stream) WriteInt32(nval int32) {
 	}
 	q1 := val / 1000
 	if q1 == 0 {
-		stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
+		stream.n = writeFirstBuf(stream.buf, _DIGITS[val], n)
 		return
 	}
 	r1 := val - q1*1000
 	q2 := q1 / 1000
 	if q2 == 0 {
-		n := writeFirstBuf(stream.buf, DIGITS[q1], n)
-		writeBuf(stream.buf, DIGITS[r1], n)
+		n := writeFirstBuf(stream.buf, _DIGITS[q1], n)
+		writeBuf(stream.buf, _DIGITS[r1], n)
 		stream.n = n + 3
 		return
 	}
 	r2 := q1 - q2*1000
 	q3 := q2 / 1000
 	if q3 == 0 {
-		n = writeFirstBuf(stream.buf, DIGITS[q2], n)
+		n = writeFirstBuf(stream.buf, _DIGITS[q2], n)
 	} else {
 		r3 := q2 - q3*1000
 		stream.buf[n] = byte(q3 + '0')
 		n++
-		writeBuf(stream.buf, DIGITS[r3], n)
+		writeBuf(stream.buf, _DIGITS[r3], n)
 		n += 3
 	}
-	writeBuf(stream.buf, DIGITS[r2], n)
-	writeBuf(stream.buf, DIGITS[r1], n+3)
+	writeBuf(stream.buf, _DIGITS[r2], n)
+	writeBuf(stream.buf, _DIGITS[r1], n+3)
 	stream.n = n + 6
 }
 
@@ -169,62 +169,62 @@ func (stream *Stream) WriteUint64(val uint64) {
 	n := stream.n
 	q1 := val / 1000
 	if q1 == 0 {
-		stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
+		stream.n = writeFirstBuf(stream.buf, _DIGITS[val], n)
 		return
 	}
 	r1 := val - q1*1000
 	q2 := q1 / 1000
 	if q2 == 0 {
-		n := writeFirstBuf(stream.buf, DIGITS[q1], n)
-		writeBuf(stream.buf, DIGITS[r1], n)
+		n := writeFirstBuf(stream.buf, _DIGITS[q1], n)
+		writeBuf(stream.buf, _DIGITS[r1], n)
 		stream.n = n + 3
 		return
 	}
 	r2 := q1 - q2*1000
 	q3 := q2 / 1000
 	if q3 == 0 {
-		n = writeFirstBuf(stream.buf, DIGITS[q2], n)
-		writeBuf(stream.buf, DIGITS[r2], n)
-		writeBuf(stream.buf, DIGITS[r1], n+3)
+		n = writeFirstBuf(stream.buf, _DIGITS[q2], n)
+		writeBuf(stream.buf, _DIGITS[r2], n)
+		writeBuf(stream.buf, _DIGITS[r1], n+3)
 		stream.n = n + 6
 		return
 	}
 	r3 := q2 - q3*1000
 	q4 := q3 / 1000
 	if q4 == 0 {
-		n = writeFirstBuf(stream.buf, DIGITS[q3], n)
-		writeBuf(stream.buf, DIGITS[r3], n)
-		writeBuf(stream.buf, DIGITS[r2], n+3)
-		writeBuf(stream.buf, DIGITS[r1], n+6)
+		n = writeFirstBuf(stream.buf, _DIGITS[q3], n)
+		writeBuf(stream.buf, _DIGITS[r3], n)
+		writeBuf(stream.buf, _DIGITS[r2], n+3)
+		writeBuf(stream.buf, _DIGITS[r1], n+6)
 		stream.n = n + 9
 		return
 	}
 	r4 := q3 - q4*1000
 	q5 := q4 / 1000
 	if q5 == 0 {
-		n = writeFirstBuf(stream.buf, DIGITS[q4], n)
-		writeBuf(stream.buf, DIGITS[r4], n)
-		writeBuf(stream.buf, DIGITS[r3], n+3)
-		writeBuf(stream.buf, DIGITS[r2], n+6)
-		writeBuf(stream.buf, DIGITS[r1], n+9)
+		n = writeFirstBuf(stream.buf, _DIGITS[q4], n)
+		writeBuf(stream.buf, _DIGITS[r4], n)
+		writeBuf(stream.buf, _DIGITS[r3], n+3)
+		writeBuf(stream.buf, _DIGITS[r2], n+6)
+		writeBuf(stream.buf, _DIGITS[r1], n+9)
 		stream.n = n + 12
 		return
 	}
 	r5 := q4 - q5*1000
 	q6 := q5 / 1000
 	if q6 == 0 {
-		n = writeFirstBuf(stream.buf, DIGITS[q5], n)
+		n = writeFirstBuf(stream.buf, _DIGITS[q5], n)
 	} else {
-		n = writeFirstBuf(stream.buf, DIGITS[q6], n)
+		n = writeFirstBuf(stream.buf, _DIGITS[q6], n)
 		r6 := q5 - q6*1000
-		writeBuf(stream.buf, DIGITS[r6], n)
+		writeBuf(stream.buf, _DIGITS[r6], n)
 		n += 3
 	}
-	writeBuf(stream.buf, DIGITS[r5], n)
-	writeBuf(stream.buf, DIGITS[r4], n+3)
-	writeBuf(stream.buf, DIGITS[r3], n+6)
-	writeBuf(stream.buf, DIGITS[r2], n+9)
-	writeBuf(stream.buf, DIGITS[r1], n+12)
+	writeBuf(stream.buf, _DIGITS[r5], n)
+	writeBuf(stream.buf, _DIGITS[r4], n+3)
+	writeBuf(stream.buf, _DIGITS[r3], n+6)
+	writeBuf(stream.buf, _DIGITS[r2], n+9)
+	writeBuf(stream.buf, _DIGITS[r1], n+12)
 	stream.n = n + 15
 }
 
@@ -241,63 +241,63 @@ func (stream *Stream) WriteInt64(nval int64) {
 	}
 	q1 := val / 1000
 	if q1 == 0 {
-		stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
+		stream.n = writeFirstBuf(stream.buf, _DIGITS[val], n)
 		return
 	}
 	r1 := val - q1*1000
 	q2 := q1 / 1000
 	if q2 == 0 {
-		n := writeFirstBuf(stream.buf, DIGITS[q1], n)
-		writeBuf(stream.buf, DIGITS[r1], n)
+		n := writeFirstBuf(stream.buf, _DIGITS[q1], n)
+		writeBuf(stream.buf, _DIGITS[r1], n)
 		stream.n = n + 3
 		return
 	}
 	r2 := q1 - q2*1000
 	q3 := q2 / 1000
 	if q3 == 0 {
-		n = writeFirstBuf(stream.buf, DIGITS[q2], n)
-		writeBuf(stream.buf, DIGITS[r2], n)
-		writeBuf(stream.buf, DIGITS[r1], n+3)
+		n = writeFirstBuf(stream.buf, _DIGITS[q2], n)
+		writeBuf(stream.buf, _DIGITS[r2], n)
+		writeBuf(stream.buf, _DIGITS[r1], n+3)
 		stream.n = n + 6
 		return
 	}
 	r3 := q2 - q3*1000
 	q4 := q3 / 1000
 	if q4 == 0 {
-		n = writeFirstBuf(stream.buf, DIGITS[q3], n)
-		writeBuf(stream.buf, DIGITS[r3], n)
-		writeBuf(stream.buf, DIGITS[r2], n+3)
-		writeBuf(stream.buf, DIGITS[r1], n+6)
+		n = writeFirstBuf(stream.buf, _DIGITS[q3], n)
+		writeBuf(stream.buf, _DIGITS[r3], n)
+		writeBuf(stream.buf, _DIGITS[r2], n+3)
+		writeBuf(stream.buf, _DIGITS[r1], n+6)
 		stream.n = n + 9
 		return
 	}
 	r4 := q3 - q4*1000
 	q5 := q4 / 1000
 	if q5 == 0 {
-		n = writeFirstBuf(stream.buf, DIGITS[q4], n)
-		writeBuf(stream.buf, DIGITS[r4], n)
-		writeBuf(stream.buf, DIGITS[r3], n+3)
-		writeBuf(stream.buf, DIGITS[r2], n+6)
-		writeBuf(stream.buf, DIGITS[r1], n+9)
+		n = writeFirstBuf(stream.buf, _DIGITS[q4], n)
+		writeBuf(stream.buf, _DIGITS[r4], n)
+		writeBuf(stream.buf, _DIGITS[r3], n+3)
+		writeBuf(stream.buf, _DIGITS[r2], n+6)
+		writeBuf(stream.buf, _DIGITS[r1], n+9)
 		stream.n = n + 12
 		return
 	}
 	r5 := q4 - q5*1000
 	q6 := q5 / 1000
 	if q6 == 0 {
-		n = writeFirstBuf(stream.buf, DIGITS[q5], n)
+		n = writeFirstBuf(stream.buf, _DIGITS[q5], n)
 	} else {
 		stream.buf[n] = byte(q6 + '0')
 		n++
 		r6 := q5 - q6*1000
-		writeBuf(stream.buf, DIGITS[r6], n)
+		writeBuf(stream.buf, _DIGITS[r6], n)
 		n += 3
 	}
-	writeBuf(stream.buf, DIGITS[r5], n)
-	writeBuf(stream.buf, DIGITS[r4], n+3)
-	writeBuf(stream.buf, DIGITS[r3], n+6)
-	writeBuf(stream.buf, DIGITS[r2], n+9)
-	writeBuf(stream.buf, DIGITS[r1], n+12)
+	writeBuf(stream.buf, _DIGITS[r5], n)
+	writeBuf(stream.buf, _DIGITS[r4], n+3)
+	writeBuf(stream.buf, _DIGITS[r3], n+6)
+	writeBuf(stream.buf, _DIGITS[r2], n+9)
+	writeBuf(stream.buf, _DIGITS[r1], n+12)
 	stream.n = n + 15
 }
 

+ 1 - 3
jsoniter_array_test.go

@@ -213,9 +213,7 @@ func Test_whitespace_before_comma(t *testing.T) {
 func Test_write_array(t *testing.T) {
 	should := require.New(t)
 	buf := &bytes.Buffer{}
-	newCfg := &Config{IndentionStep: 2}
-	initConfig(newCfg)
-	stream := NewStream(newCfg, buf, 4096)
+	stream := NewStream(&Config{IndentionStep: 2}, buf, 4096)
 	stream.WriteArrayStart()
 	stream.WriteInt(1)
 	stream.WriteMore()

+ 6 - 7
jsoniter_customize_test.go

@@ -60,9 +60,8 @@ func Test_customize_byte_array_encoder(t *testing.T) {
 
 func Test_customize_float_marshal(t *testing.T) {
 	should := require.New(t)
-	EnableLossyFloatMarshalling()
-	defer DEFAULT_CONFIG.CleanEncoders()
-	str, err := MarshalToString(float32(1.23456789))
+	json := Config{MarshalFloatWith6Digits: true}
+	str, err := json.MarshalToString(float32(1.23456789))
 	should.Nil(err)
 	should.Equal("1.234568", str)
 }
@@ -113,7 +112,7 @@ func Test_customize_field_by_extension(t *testing.T) {
 }
 
 func Test_unexported_fields(t *testing.T) {
-	EnableUnexportedStructFieldsSupport()
+	jsoniter := &Config{SupportUnexportedStructFields: true}
 	should := require.New(t)
 	type TestObject struct {
 		field1 string
@@ -121,12 +120,12 @@ func Test_unexported_fields(t *testing.T) {
 	}
 	obj := TestObject{}
 	obj.field1 = "hello"
-	should.Nil(UnmarshalFromString(`{}`, &obj))
+	should.Nil(jsoniter.UnmarshalFromString(`{}`, &obj))
 	should.Equal("hello", obj.field1)
-	should.Nil(UnmarshalFromString(`{"field1": "world", "field-2": "abc"}`, &obj))
+	should.Nil(jsoniter.UnmarshalFromString(`{"field1": "world", "field-2": "abc"}`, &obj))
 	should.Equal("world", obj.field1)
 	should.Equal("abc", obj.field2)
-	str, err := MarshalToString(obj)
+	str, err := jsoniter.MarshalToString(obj)
 	should.Nil(err)
 	should.Contains(str, `"field-2":"abc"`)
 }

+ 1 - 3
jsoniter_object_test.go

@@ -210,9 +210,7 @@ func Test_object_wrapper_any_get_all(t *testing.T) {
 func Test_write_object(t *testing.T) {
 	should := require.New(t)
 	buf := &bytes.Buffer{}
-	newCfg := &Config{IndentionStep: 2}
-	initConfig(newCfg)
-	stream := NewStream(newCfg, buf, 4096)
+	stream := NewStream(&Config{IndentionStep: 2}, buf, 4096)
 	stream.WriteObjectStart()
 	stream.WriteObjectField("hello")
 	stream.WriteInt(1)

+ 9 - 9
jsoniter_optional_test.go

@@ -21,26 +21,26 @@ func Test_encode_optional_int_pointer(t *testing.T) {
 func Test_decode_struct_with_optional_field(t *testing.T) {
 	should := require.New(t)
 	type TestObject struct {
-		field1 *string
-		field2 *string
+		Field1 *string
+		Field2 *string
 	}
 	obj := TestObject{}
 	UnmarshalFromString(`{"field1": null, "field2": "world"}`, &obj)
-	should.Nil(obj.field1)
-	should.Equal("world", *obj.field2)
+	should.Nil(obj.Field1)
+	should.Equal("world", *obj.Field2)
 }
 
 func Test_encode_struct_with_optional_field(t *testing.T) {
 	should := require.New(t)
 	type TestObject struct {
-		field1 *string
-		field2 *string
+		Field1 *string
+		Field2 *string
 	}
 	obj := TestObject{}
 	world := "world"
-	obj.field2 = &world
+	obj.Field2 = &world
 	str, err := MarshalToString(obj)
 	should.Nil(err)
-	should.Contains(str, `"field1":null`)
-	should.Contains(str, `"field2":"world"`)
+	should.Contains(str, `"Field1":null`)
+	should.Contains(str, `"Field2":"world"`)
 }

+ 4 - 4
jsoniter_reflect_test.go

@@ -24,8 +24,8 @@ func Test_decode_large_slice(t *testing.T) {
 
 func Test_decode_nested(t *testing.T) {
 	type StructOfString struct {
-		field1 string
-		field2 string
+		Field1 string
+		Field2 string
 	}
 	iter := ParseString(DEFAULT_CONFIG, `[{"field1": "hello"}, null, {"field2": "world"}]`)
 	slice := []*StructOfString{}
@@ -34,7 +34,7 @@ func Test_decode_nested(t *testing.T) {
 		fmt.Println(iter.Error)
 		t.Fatal(len(slice))
 	}
-	if slice[0].field1 != "hello" {
+	if slice[0].Field1 != "hello" {
 		fmt.Println(iter.Error)
 		t.Fatal(slice[0])
 	}
@@ -42,7 +42,7 @@ func Test_decode_nested(t *testing.T) {
 		fmt.Println(iter.Error)
 		t.Fatal(slice[1])
 	}
-	if slice[2].field2 != "world" {
+	if slice[2].Field2 != "world" {
 		fmt.Println(iter.Error)
 		t.Fatal(slice[2])
 	}

+ 1 - 3
jsoniter_stream_test.go

@@ -31,9 +31,7 @@ func Test_writeBytes_should_grow_buffer(t *testing.T) {
 
 func Test_writeIndention_should_grow_buffer(t *testing.T) {
 	should := require.New(t)
-	newCfg := &Config{IndentionStep: 2}
-	initConfig(newCfg)
-	stream := NewStream(newCfg, nil, 1)
+	stream := NewStream(&Config{IndentionStep: 2}, nil, 1)
 	stream.WriteVal([]int{1, 2, 3})
 	should.Equal("[\n  1,\n  2,\n  3\n]", string(stream.Buffer()))
 }