1
0
Fork 0
forked from forgejo/forgejo

Vendor Update Go Libs (#13166)

* update github.com/alecthomas/chroma v0.8.0 -> v0.8.1

* github.com/blevesearch/bleve v1.0.10 -> v1.0.12

* editorconfig-core-go v2.1.1 -> v2.3.7

* github.com/gliderlabs/ssh v0.2.2 -> v0.3.1

* migrate editorconfig.ParseBytes to Parse

* github.com/shurcooL/vfsgen to 0d455de96546

* github.com/go-git/go-git/v5 v5.1.0 -> v5.2.0

* github.com/google/uuid v1.1.1 -> v1.1.2

* github.com/huandu/xstrings v1.3.0 -> v1.3.2

* github.com/klauspost/compress v1.10.11 -> v1.11.1

* github.com/markbates/goth v1.61.2 -> v1.65.0

* github.com/mattn/go-sqlite3 v1.14.0 -> v1.14.4

* github.com/mholt/archiver v3.3.0 -> v3.3.2

* github.com/microcosm-cc/bluemonday 4f7140c49acb -> v1.0.4

* github.com/minio/minio-go v7.0.4 -> v7.0.5

* github.com/olivere/elastic v7.0.9 -> v7.0.20

* github.com/urfave/cli v1.20.0 -> v1.22.4

* github.com/prometheus/client_golang v1.1.0 -> v1.8.0

* github.com/xanzy/go-gitlab v0.37.0 -> v0.38.1

* mvdan.cc/xurls v2.1.0 -> v2.2.0

Co-authored-by: Lauris BH <lauris@nix.lv>
This commit is contained in:
6543 2020-10-16 07:06:27 +02:00 committed by GitHub
parent 91f2afdb54
commit 12a1f914f4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
656 changed files with 52967 additions and 25229 deletions

View file

@ -20,22 +20,38 @@ generate: build
./tests/reference_to_pointer.go \
./tests/html.go \
./tests/unknown_fields.go \
bin/easyjson -all ./tests/data.go
bin/easyjson -all ./tests/nothing.go
bin/easyjson -all ./tests/errors.go
bin/easyjson -all ./tests/html.go
./tests/type_declaration.go \
./tests/type_declaration_skip.go \
./tests/members_escaped.go \
./tests/members_unescaped.go \
./tests/intern.go \
./tests/nocopy.go \
./tests/escaping.go
bin/easyjson -all \
./tests/data.go \
./tests/nothing.go \
./tests/errors.go \
./tests/html.go \
./tests/type_declaration_skip.go
bin/easyjson \
./tests/nested_easy.go \
./tests/named_type.go \
./tests/custom_map_key_type.go \
./tests/embedded_type.go \
./tests/reference_to_pointer.go \
./tests/key_marshaler_map.go \
./tests/unknown_fields.go \
./tests/type_declaration.go \
./tests/members_escaped.go \
./tests/intern.go \
./tests/nocopy.go \
./tests/escaping.go \
./tests/nested_marshaler.go
bin/easyjson -snake_case ./tests/snake.go
bin/easyjson -omit_empty ./tests/omitempty.go
bin/easyjson -build_tags=use_easyjson ./benchmark/data.go
bin/easyjson ./tests/nested_easy.go
bin/easyjson ./tests/named_type.go
bin/easyjson ./tests/custom_map_key_type.go
bin/easyjson ./tests/embedded_type.go
bin/easyjson ./tests/reference_to_pointer.go
bin/easyjson ./tests/key_marshaler_map.go
bin/easyjson -build_tags=use_easyjson -disable_members_unescape ./benchmark/data.go
bin/easyjson -disallow_unknown_fields ./tests/disallow_unknown.go
bin/easyjson ./tests/unknown_fields.go
bin/easyjson -disable_members_unescape ./tests/members_unescaped.go
test: generate
go test \

View file

@ -34,7 +34,11 @@ Usage of easyjson:
-all
generate marshaler/unmarshalers for all structs in a file
-build_tags string
build tags to add to generated file
build tags to add to generated file
-gen_build_flags string
build flags when running the generator while bootstrapping
-byte
use simple bytes instead of Base64Bytes for slice of bytes
-leave_temps
do not delete temporary files
-no_std_marshalers
@ -55,10 +59,20 @@ Usage of easyjson:
only generate stubs for marshaler/unmarshaler funcs
-disallow_unknown_fields
return error if some unknown field in json appeared
-disable_members_unescape
disable unescaping of \uXXXX string sequences in member names
```
Using `-all` will generate marshalers/unmarshalers for all Go structs in the
file. If `-all` is not provided, then only those structs whose preceding
file excluding those structs whose preceding comment starts with `easyjson:skip`.
For example:
```go
//easyjson:skip
type A struct {}
```
If `-all` is not provided, then only those structs whose preceding
comment starts with `easyjson:json` will have marshalers/unmarshalers
generated. For example:
@ -76,10 +90,26 @@ Additional option notes:
* `-build_tags` will add the specified build tags to generated Go sources.
* `-gen_build_flags` will execute the easyjson bootstapping code to launch the
actual generator command with provided flags. Multiple arguments should be
separated by space e.g. `-gen_build_flags="-mod=mod -x"`.
## Structure json tag options
Besides standart json tag options like 'omitempty' the following are supported:
* 'nocopy' - disables allocation and copying of string values, making them
refer to original json buffer memory. This works great for short lived
objects which are not hold in memory after decoding and immediate usage.
Note if string requires unescaping it will be processed as normally.
* 'intern' - string "interning" (deduplication) to save memory when the very
same string dictionary values are often met all over the structure.
See below for more details.
## Generated Marshaler/Unmarshaler Funcs
For Go struct types, easyjson generates the funcs `MarshalEasyJSON` /
`UnmarshalEasyJSON` for marshaling/unmarshaling JSON. In turn, these satisify
`UnmarshalEasyJSON` for marshaling/unmarshaling JSON. In turn, these satisfy
the `easyjson.Marshaler` and `easyjson.Unmarshaler` interfaces and when used in
conjunction with `easyjson.Marshal` / `easyjson.Unmarshal` avoid unnecessary
reflection / type assertions during marshaling/unmarshaling to/from JSON for Go
@ -102,17 +132,17 @@ utility funcs that are available.
## Controlling easyjson Marshaling and Unmarshaling Behavior
Go types can provide their own `MarshalEasyJSON` and `UnmarshalEasyJSON` funcs
that satisify the `easyjson.Marshaler` / `easyjson.Unmarshaler` interfaces.
that satisfy the `easyjson.Marshaler` / `easyjson.Unmarshaler` interfaces.
These will be used by `easyjson.Marshal` and `easyjson.Unmarshal` when defined
for a Go type.
Go types can also satisify the `easyjson.Optional` interface, which allows the
Go types can also satisfy the `easyjson.Optional` interface, which allows the
type to define its own `omitempty` logic.
## Type Wrappers
easyjson provides additional type wrappers defined in the `easyjson/opt`
package. These wrap the standard Go primitives and in turn satisify the
package. These wrap the standard Go primitives and in turn satisfy the
easyjson interfaces.
The `easyjson/opt` type wrappers are useful when needing to distinguish between
@ -133,6 +163,27 @@ through a call to `buffer.Init()` prior to any marshaling or unmarshaling.
Please see the [GoDoc listing](https://godoc.org/github.com/mailru/easyjson/buffer)
for more information.
## String interning
During unmarshaling, `string` field values can be optionally
[interned](https://en.wikipedia.org/wiki/String_interning) to reduce memory
allocations and usage by deduplicating strings in memory, at the expense of slightly
increased CPU usage.
This will work effectively only for `string` fields being decoded that have frequently
the same value (e.g. if you have a string field that can only assume a small number
of possible values).
To enable string interning, add the `intern` keyword tag to your `json` tag on `string`
fields, e.g.:
```go
type Foo struct {
UUID string `json:"uuid"` // will not be interned during unmarshaling
State string `json:"state,intern"` // will be interned during unmarshaling
}
```
## Issues, Notes, and Limitations
* easyjson is still early in its development. As such, there are likely to be
@ -174,7 +225,7 @@ for more information.
needs to be known prior to sending the data. Currently this is not possible
with easyjson's architecture.
* easyjson parser and codegen based on reflection, so it wont works on `package main`
* easyjson parser and codegen based on reflection, so it won't work on `package main`
files, because they cant be imported by parser.
## Benchmarks
@ -239,7 +290,7 @@ since the memory is not freed between marshaling operations.
### easyjson vs 'ujson' python module
[ujson](https://github.com/esnme/ultrajson) is using C code for parsing, so it
is interesting to see how plain golang compares to that. It is imporant to note
is interesting to see how plain golang compares to that. It is important to note
that the resulting object for python is slower to access, since the library
parses JSON object into dictionaries.

View file

@ -4,6 +4,7 @@ package buffer
import (
"io"
"net"
"sync"
)
@ -52,14 +53,12 @@ func putBuf(buf []byte) {
// getBuf gets a chunk from reuse pool or creates a new one if reuse failed.
func getBuf(size int) []byte {
if size < config.PooledSize {
return make([]byte, 0, size)
}
if c := buffers[size]; c != nil {
v := c.Get()
if v != nil {
return v.([]byte)
if size >= config.PooledSize {
if c := buffers[size]; c != nil {
v := c.Get()
if v != nil {
return v.([]byte)
}
}
}
return make([]byte, 0, size)
@ -78,9 +77,12 @@ type Buffer struct {
// EnsureSpace makes sure that the current chunk contains at least s free bytes,
// possibly creating a new chunk.
func (b *Buffer) EnsureSpace(s int) {
if cap(b.Buf)-len(b.Buf) >= s {
return
if cap(b.Buf)-len(b.Buf) < s {
b.ensureSpaceSlow(s)
}
}
func (b *Buffer) ensureSpaceSlow(s int) {
l := len(b.Buf)
if l > 0 {
if cap(b.toPool) != cap(b.Buf) {
@ -105,18 +107,22 @@ func (b *Buffer) EnsureSpace(s int) {
// AppendByte appends a single byte to buffer.
func (b *Buffer) AppendByte(data byte) {
if cap(b.Buf) == len(b.Buf) { // EnsureSpace won't be inlined.
b.EnsureSpace(1)
}
b.EnsureSpace(1)
b.Buf = append(b.Buf, data)
}
// AppendBytes appends a byte slice to buffer.
func (b *Buffer) AppendBytes(data []byte) {
if len(data) <= cap(b.Buf)-len(b.Buf) {
b.Buf = append(b.Buf, data...) // fast path
} else {
b.appendBytesSlow(data)
}
}
func (b *Buffer) appendBytesSlow(data []byte) {
for len(data) > 0 {
if cap(b.Buf) == len(b.Buf) { // EnsureSpace won't be inlined.
b.EnsureSpace(1)
}
b.EnsureSpace(1)
sz := cap(b.Buf) - len(b.Buf)
if sz > len(data) {
@ -128,12 +134,18 @@ func (b *Buffer) AppendBytes(data []byte) {
}
}
// AppendBytes appends a string to buffer.
// AppendString appends a string to buffer.
func (b *Buffer) AppendString(data string) {
if len(data) <= cap(b.Buf)-len(b.Buf) {
b.Buf = append(b.Buf, data...) // fast path
} else {
b.appendStringSlow(data)
}
}
func (b *Buffer) appendStringSlow(data string) {
for len(data) > 0 {
if cap(b.Buf) == len(b.Buf) { // EnsureSpace won't be inlined.
b.EnsureSpace(1)
}
b.EnsureSpace(1)
sz := cap(b.Buf) - len(b.Buf)
if sz > len(data) {
@ -156,18 +168,14 @@ func (b *Buffer) Size() int {
// DumpTo outputs the contents of a buffer to a writer and resets the buffer.
func (b *Buffer) DumpTo(w io.Writer) (written int, err error) {
var n int
for _, buf := range b.bufs {
if err == nil {
n, err = w.Write(buf)
written += n
}
putBuf(buf)
bufs := net.Buffers(b.bufs)
if len(b.Buf) > 0 {
bufs = append(bufs, b.Buf)
}
n, err := bufs.WriteTo(w)
if err == nil {
n, err = w.Write(b.Buf)
written += n
for _, buf := range b.bufs {
putBuf(buf)
}
putBuf(b.toPool)
@ -175,7 +183,7 @@ func (b *Buffer) DumpTo(w io.Writer) (written int, err error) {
b.Buf = nil
b.toPool = nil
return
return int(n), err
}
// BuildBytes creates a single byte slice with all the contents of the buffer. Data is
@ -192,7 +200,7 @@ func (b *Buffer) BuildBytes(reuse ...[]byte) []byte {
var ret []byte
size := b.Size()
// If we got a buffer as argument and it is big enought, reuse it.
// If we got a buffer as argument and it is big enough, reuse it.
if len(reuse) == 1 && cap(reuse[0]) >= size {
ret = reuse[0][:0]
} else {

View file

@ -1,3 +1,5 @@
module github.com/mailru/easyjson
go 1.12
require github.com/josharian/intern v1.0.0

2
vendor/github.com/mailru/easyjson/go.sum generated vendored Normal file
View file

@ -0,0 +1,2 @@
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=

View file

@ -6,6 +6,7 @@ import (
"io/ioutil"
"net/http"
"strconv"
"unsafe"
"github.com/mailru/easyjson/jlexer"
"github.com/mailru/easyjson/jwriter"
@ -21,6 +22,12 @@ type Unmarshaler interface {
UnmarshalEasyJSON(w *jlexer.Lexer)
}
// MarshalerUnmarshaler is an easyjson-compatible marshaler/unmarshaler interface.
type MarshalerUnmarshaler interface {
Marshaler
Unmarshaler
}
// Optional defines an undefined-test method for a type to integrate with 'omitempty' logic.
type Optional interface {
IsDefined() bool
@ -36,9 +43,17 @@ type UnknownsMarshaler interface {
MarshalUnknowns(w *jwriter.Writer, first bool)
}
func isNilInterface(i interface{}) bool {
return (*[2]uintptr)(unsafe.Pointer(&i))[1] == 0
}
// Marshal returns data as a single byte slice. Method is suboptimal as the data is likely to be copied
// from a chain of smaller chunks.
func Marshal(v Marshaler) ([]byte, error) {
if isNilInterface(v) {
return nullBytes, nil
}
w := jwriter.Writer{}
v.MarshalEasyJSON(&w)
return w.BuildBytes()
@ -46,6 +61,10 @@ func Marshal(v Marshaler) ([]byte, error) {
// MarshalToWriter marshals the data to an io.Writer.
func MarshalToWriter(v Marshaler, w io.Writer) (written int, err error) {
if isNilInterface(v) {
return w.Write(nullBytes)
}
jw := jwriter.Writer{}
v.MarshalEasyJSON(&jw)
return jw.DumpTo(w)
@ -56,6 +75,13 @@ func MarshalToWriter(v Marshaler, w io.Writer) (written int, err error) {
// false if an error occurred before any http.ResponseWriter methods were actually
// invoked (in this case a 500 reply is possible).
func MarshalToHTTPResponseWriter(v Marshaler, w http.ResponseWriter) (started bool, written int, err error) {
if isNilInterface(v) {
w.Header().Set("Content-Type", "application/json")
w.Header().Set("Content-Length", strconv.Itoa(len(nullBytes)))
written, err = w.Write(nullBytes)
return true, written, err
}
jw := jwriter.Writer{}
v.MarshalEasyJSON(&jw)
if jw.Error != nil {

View file

@ -5,6 +5,7 @@
package jlexer
import (
"bytes"
"encoding/base64"
"encoding/json"
"errors"
@ -14,6 +15,8 @@ import (
"unicode"
"unicode/utf16"
"unicode/utf8"
"github.com/josharian/intern"
)
// tokenKind determines type of a token.
@ -32,9 +35,10 @@ const (
type token struct {
kind tokenKind // Type of a token.
boolValue bool // Value if a boolean literal token.
byteValue []byte // Raw value of a token.
delimValue byte
boolValue bool // Value if a boolean literal token.
byteValueCloned bool // true if byteValue was allocated and does not refer to original json body
byteValue []byte // Raw value of a token.
delimValue byte
}
// Lexer is a JSON lexer: it iterates over JSON tokens in a byte slice.
@ -240,23 +244,65 @@ func (r *Lexer) fetchNumber() {
// findStringLen tries to scan into the string literal for ending quote char to determine required size.
// The size will be exact if no escapes are present and may be inexact if there are escaped chars.
func findStringLen(data []byte) (isValid, hasEscapes bool, length int) {
delta := 0
for i := 0; i < len(data); i++ {
switch data[i] {
case '\\':
i++
delta++
if i < len(data) && data[i] == 'u' {
delta++
}
case '"':
return true, (delta > 0), (i - delta)
func findStringLen(data []byte) (isValid bool, length int) {
for {
idx := bytes.IndexByte(data, '"')
if idx == -1 {
return false, len(data)
}
if idx == 0 || (idx > 0 && data[idx-1] != '\\') {
return true, length + idx
}
// count \\\\\\\ sequences. even number of slashes means quote is not really escaped
cnt := 1
for idx-cnt-1 >= 0 && data[idx-cnt-1] == '\\' {
cnt++
}
if cnt%2 == 0 {
return true, length + idx
}
length += idx + 1
data = data[idx+1:]
}
}
// unescapeStringToken performs unescaping of string token.
// if no escaping is needed, original string is returned, otherwise - a new one allocated
func (r *Lexer) unescapeStringToken() (err error) {
data := r.token.byteValue
var unescapedData []byte
for {
i := bytes.IndexByte(data, '\\')
if i == -1 {
break
}
escapedRune, escapedBytes, err := decodeEscape(data[i:])
if err != nil {
r.errParse(err.Error())
return err
}
if unescapedData == nil {
unescapedData = make([]byte, 0, len(r.token.byteValue))
}
var d [4]byte
s := utf8.EncodeRune(d[:], escapedRune)
unescapedData = append(unescapedData, data[:i]...)
unescapedData = append(unescapedData, d[:s]...)
data = data[i+escapedBytes:]
}
return false, false, len(data)
if unescapedData != nil {
r.token.byteValue = append(unescapedData, data...)
r.token.byteValueCloned = true
}
return
}
// getu4 decodes \uXXXX from the beginning of s, returning the hex value,
@ -286,36 +332,30 @@ func getu4(s []byte) rune {
return val
}
// processEscape processes a single escape sequence and returns number of bytes processed.
func (r *Lexer) processEscape(data []byte) (int, error) {
// decodeEscape processes a single escape sequence and returns number of bytes processed.
func decodeEscape(data []byte) (decoded rune, bytesProcessed int, err error) {
if len(data) < 2 {
return 0, fmt.Errorf("syntax error at %v", string(data))
return 0, 0, errors.New("incorrect escape symbol \\ at the end of token")
}
c := data[1]
switch c {
case '"', '/', '\\':
r.token.byteValue = append(r.token.byteValue, c)
return 2, nil
return rune(c), 2, nil
case 'b':
r.token.byteValue = append(r.token.byteValue, '\b')
return 2, nil
return '\b', 2, nil
case 'f':
r.token.byteValue = append(r.token.byteValue, '\f')
return 2, nil
return '\f', 2, nil
case 'n':
r.token.byteValue = append(r.token.byteValue, '\n')
return 2, nil
return '\n', 2, nil
case 'r':
r.token.byteValue = append(r.token.byteValue, '\r')
return 2, nil
return '\r', 2, nil
case 't':
r.token.byteValue = append(r.token.byteValue, '\t')
return 2, nil
return '\t', 2, nil
case 'u':
rr := getu4(data)
if rr < 0 {
return 0, errors.New("syntax error")
return 0, 0, errors.New("incorrectly escaped \\uXXXX sequence")
}
read := 6
@ -328,13 +368,10 @@ func (r *Lexer) processEscape(data []byte) (int, error) {
rr = unicode.ReplacementChar
}
}
var d [4]byte
s := utf8.EncodeRune(d[:], rr)
r.token.byteValue = append(r.token.byteValue, d[:s]...)
return read, nil
return rr, read, nil
}
return 0, errors.New("syntax error")
return 0, 0, errors.New("incorrectly escaped bytes")
}
// fetchString scans a string literal token.
@ -342,43 +379,14 @@ func (r *Lexer) fetchString() {
r.pos++
data := r.Data[r.pos:]
isValid, hasEscapes, length := findStringLen(data)
isValid, length := findStringLen(data)
if !isValid {
r.pos += length
r.errParse("unterminated string literal")
return
}
if !hasEscapes {
r.token.byteValue = data[:length]
r.pos += length + 1
return
}
r.token.byteValue = make([]byte, 0, length)
p := 0
for i := 0; i < len(data); {
switch data[i] {
case '"':
r.pos += i + 1
r.token.byteValue = append(r.token.byteValue, data[p:i]...)
i++
return
case '\\':
r.token.byteValue = append(r.token.byteValue, data[p:i]...)
off, err := r.processEscape(data[i:])
if err != nil {
r.errParse(err.Error())
return
}
i += off
p = i
default:
i++
}
}
r.errParse("unterminated string literal")
r.token.byteValue = data[:length]
r.pos += length + 1 // skip closing '"' as well
}
// scanToken scans the next token if no token is currently available in the lexer.
@ -602,7 +610,7 @@ func (r *Lexer) Consumed() {
}
}
func (r *Lexer) unsafeString() (string, []byte) {
func (r *Lexer) unsafeString(skipUnescape bool) (string, []byte) {
if r.token.kind == tokenUndef && r.Ok() {
r.FetchToken()
}
@ -610,6 +618,13 @@ func (r *Lexer) unsafeString() (string, []byte) {
r.errInvalidToken("string")
return "", nil
}
if !skipUnescape {
if err := r.unescapeStringToken(); err != nil {
r.errInvalidToken("string")
return "", nil
}
}
bytes := r.token.byteValue
ret := bytesToStr(r.token.byteValue)
r.consume()
@ -621,13 +636,19 @@ func (r *Lexer) unsafeString() (string, []byte) {
// Warning: returned string may point to the input buffer, so the string should not outlive
// the input buffer. Intended pattern of usage is as an argument to a switch statement.
func (r *Lexer) UnsafeString() string {
ret, _ := r.unsafeString()
ret, _ := r.unsafeString(false)
return ret
}
// UnsafeBytes returns the byte slice if the token is a string literal.
func (r *Lexer) UnsafeBytes() []byte {
_, ret := r.unsafeString()
_, ret := r.unsafeString(false)
return ret
}
// UnsafeFieldName returns current member name string token
func (r *Lexer) UnsafeFieldName(skipUnescape bool) string {
ret, _ := r.unsafeString(skipUnescape)
return ret
}
@ -640,7 +661,34 @@ func (r *Lexer) String() string {
r.errInvalidToken("string")
return ""
}
ret := string(r.token.byteValue)
if err := r.unescapeStringToken(); err != nil {
r.errInvalidToken("string")
return ""
}
var ret string
if r.token.byteValueCloned {
ret = bytesToStr(r.token.byteValue)
} else {
ret = string(r.token.byteValue)
}
r.consume()
return ret
}
// StringIntern reads a string literal, and performs string interning on it.
func (r *Lexer) StringIntern() string {
if r.token.kind == tokenUndef && r.Ok() {
r.FetchToken()
}
if !r.Ok() || r.token.kind != tokenString {
r.errInvalidToken("string")
return ""
}
if err := r.unescapeStringToken(); err != nil {
r.errInvalidToken("string")
return ""
}
ret := intern.Bytes(r.token.byteValue)
r.consume()
return ret
}
@ -839,7 +887,7 @@ func (r *Lexer) Int() int {
}
func (r *Lexer) Uint8Str() uint8 {
s, b := r.unsafeString()
s, b := r.unsafeString(false)
if !r.Ok() {
return 0
}
@ -856,7 +904,7 @@ func (r *Lexer) Uint8Str() uint8 {
}
func (r *Lexer) Uint16Str() uint16 {
s, b := r.unsafeString()
s, b := r.unsafeString(false)
if !r.Ok() {
return 0
}
@ -873,7 +921,7 @@ func (r *Lexer) Uint16Str() uint16 {
}
func (r *Lexer) Uint32Str() uint32 {
s, b := r.unsafeString()
s, b := r.unsafeString(false)
if !r.Ok() {
return 0
}
@ -890,7 +938,7 @@ func (r *Lexer) Uint32Str() uint32 {
}
func (r *Lexer) Uint64Str() uint64 {
s, b := r.unsafeString()
s, b := r.unsafeString(false)
if !r.Ok() {
return 0
}
@ -915,7 +963,7 @@ func (r *Lexer) UintptrStr() uintptr {
}
func (r *Lexer) Int8Str() int8 {
s, b := r.unsafeString()
s, b := r.unsafeString(false)
if !r.Ok() {
return 0
}
@ -932,7 +980,7 @@ func (r *Lexer) Int8Str() int8 {
}
func (r *Lexer) Int16Str() int16 {
s, b := r.unsafeString()
s, b := r.unsafeString(false)
if !r.Ok() {
return 0
}
@ -949,7 +997,7 @@ func (r *Lexer) Int16Str() int16 {
}
func (r *Lexer) Int32Str() int32 {
s, b := r.unsafeString()
s, b := r.unsafeString(false)
if !r.Ok() {
return 0
}
@ -966,7 +1014,7 @@ func (r *Lexer) Int32Str() int32 {
}
func (r *Lexer) Int64Str() int64 {
s, b := r.unsafeString()
s, b := r.unsafeString(false)
if !r.Ok() {
return 0
}
@ -1004,7 +1052,7 @@ func (r *Lexer) Float32() float32 {
}
func (r *Lexer) Float32Str() float32 {
s, b := r.unsafeString()
s, b := r.unsafeString(false)
if !r.Ok() {
return 0
}
@ -1037,7 +1085,7 @@ func (r *Lexer) Float64() float64 {
}
func (r *Lexer) Float64Str() float64 {
s, b := r.unsafeString()
s, b := r.unsafeString(false)
if !r.Ok() {
return 0
}

View file

@ -297,11 +297,9 @@ func (w *Writer) String(s string) {
p := 0 // last non-escape symbol
var escapeTable [128]bool
escapeTable := &htmlEscapeTable
if w.NoEscapeHTML {
escapeTable = htmlNoEscapeTable
} else {
escapeTable = htmlEscapeTable
escapeTable = &htmlNoEscapeTable
}
for i := 0; i < len(s); {

View file

@ -1,8 +1,6 @@
package easyjson
import (
json "encoding/json"
jlexer "github.com/mailru/easyjson/jlexer"
"github.com/mailru/easyjson/jwriter"
)
@ -10,14 +8,14 @@ import (
// UnknownFieldsProxy implemets UnknownsUnmarshaler and UnknownsMarshaler
// use it as embedded field in your structure to parse and then serialize unknown struct fields
type UnknownFieldsProxy struct {
unknownFields map[string]interface{}
unknownFields map[string][]byte
}
func (s *UnknownFieldsProxy) UnmarshalUnknown(in *jlexer.Lexer, key string) {
if s.unknownFields == nil {
s.unknownFields = make(map[string]interface{}, 1)
s.unknownFields = make(map[string][]byte, 1)
}
s.unknownFields[key] = in.Interface()
s.unknownFields[key] = in.Raw()
}
func (s UnknownFieldsProxy) MarshalUnknowns(out *jwriter.Writer, first bool) {
@ -29,6 +27,6 @@ func (s UnknownFieldsProxy) MarshalUnknowns(out *jwriter.Writer, first bool) {
}
out.String(string(key))
out.RawByte(':')
out.Raw(json.Marshal(val))
out.Raw(val, nil)
}
}