Signed-off-by: Jessica Frazelle <acidburn@docker.com>
| ... | ... |
@@ -44,7 +44,7 @@ clone git github.com/vbatts/tar-split v0.9.10 |
| 44 | 44 |
|
| 45 | 45 |
clone git github.com/docker/notary 089d8450d8928aa1c58fd03f09cabbde9bcb4590 |
| 46 | 46 |
clone git github.com/endophage/gotuf 2df1c8e0a7b7e10ae2113bf37aaa1bf1c1de8cc5 |
| 47 |
-clone git github.com/jfrazelle/go 6e461eb70cb4187b41a84e9a567d7137bdbe0f16 |
|
| 47 |
+clone git github.com/jfrazelle/go v1.5.1-1 |
|
| 48 | 48 |
clone git github.com/agl/ed25519 d2b94fd789ea21d12fac1a4443dd3a3f79cda72c |
| 49 | 49 |
|
| 50 | 50 |
clone git github.com/opencontainers/runc 6c198ae2d065c37f44316e0de3df7f3b88950923 # libcontainer |
| 51 | 51 |
new file mode 100644 |
| ... | ... |
@@ -0,0 +1,27 @@ |
| 0 |
+Copyright (c) 2012 The Go Authors. All rights reserved. |
|
| 1 |
+ |
|
| 2 |
+Redistribution and use in source and binary forms, with or without |
|
| 3 |
+modification, are permitted provided that the following conditions are |
|
| 4 |
+met: |
|
| 5 |
+ |
|
| 6 |
+ * Redistributions of source code must retain the above copyright |
|
| 7 |
+notice, this list of conditions and the following disclaimer. |
|
| 8 |
+ * Redistributions in binary form must reproduce the above |
|
| 9 |
+copyright notice, this list of conditions and the following disclaimer |
|
| 10 |
+in the documentation and/or other materials provided with the |
|
| 11 |
+distribution. |
|
| 12 |
+ * Neither the name of Google Inc. nor the names of its |
|
| 13 |
+contributors may be used to endorse or promote products derived from |
|
| 14 |
+this software without specific prior written permission. |
|
| 15 |
+ |
|
| 16 |
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
|
| 17 |
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
|
| 18 |
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
|
| 19 |
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
|
| 20 |
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
|
| 21 |
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
|
| 22 |
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
|
| 23 |
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
|
| 24 |
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
|
| 25 |
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
|
| 26 |
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| ... | ... |
@@ -48,6 +48,13 @@ import ( |
| 48 | 48 |
// map[string]interface{}, for JSON objects
|
| 49 | 49 |
// nil for JSON null |
| 50 | 50 |
// |
| 51 |
+// To unmarshal a JSON array into a slice, Unmarshal resets the slice to nil |
|
| 52 |
+// and then appends each element to the slice. |
|
| 53 |
+// |
|
| 54 |
+// To unmarshal a JSON object into a map, Unmarshal replaces the map |
|
| 55 |
+// with an empty map and then adds key-value pairs from the object to |
|
| 56 |
+// the map. |
|
| 57 |
+// |
|
| 51 | 58 |
// If a JSON value is not appropriate for a given target type, |
| 52 | 59 |
// or if a JSON number overflows the target type, Unmarshal |
| 53 | 60 |
// skips that field and completes the unmarshalling as best it can. |
| ... | ... |
@@ -90,8 +97,9 @@ type Unmarshaler interface {
|
| 90 | 90 |
// An UnmarshalTypeError describes a JSON value that was |
| 91 | 91 |
// not appropriate for a value of a specific Go type. |
| 92 | 92 |
type UnmarshalTypeError struct {
|
| 93 |
- Value string // description of JSON value - "bool", "array", "number -5" |
|
| 94 |
- Type reflect.Type // type of Go value it could not be assigned to |
|
| 93 |
+ Value string // description of JSON value - "bool", "array", "number -5" |
|
| 94 |
+ Type reflect.Type // type of Go value it could not be assigned to |
|
| 95 |
+ Offset int64 // error occurred after reading Offset bytes |
|
| 95 | 96 |
} |
| 96 | 97 |
|
| 97 | 98 |
func (e *UnmarshalTypeError) Error() string {
|
| ... | ... |
@@ -378,7 +386,7 @@ func (d *decodeState) array(v reflect.Value) {
|
| 378 | 378 |
return |
| 379 | 379 |
} |
| 380 | 380 |
if ut != nil {
|
| 381 |
- d.saveError(&UnmarshalTypeError{"array", v.Type()})
|
|
| 381 |
+ d.saveError(&UnmarshalTypeError{"array", v.Type(), int64(d.off)})
|
|
| 382 | 382 |
d.off-- |
| 383 | 383 |
d.next() |
| 384 | 384 |
return |
| ... | ... |
@@ -397,7 +405,7 @@ func (d *decodeState) array(v reflect.Value) {
|
| 397 | 397 |
// Otherwise it's invalid. |
| 398 | 398 |
fallthrough |
| 399 | 399 |
default: |
| 400 |
- d.saveError(&UnmarshalTypeError{"array", v.Type()})
|
|
| 400 |
+ d.saveError(&UnmarshalTypeError{"array", v.Type(), int64(d.off)})
|
|
| 401 | 401 |
d.off-- |
| 402 | 402 |
d.next() |
| 403 | 403 |
return |
| ... | ... |
@@ -486,7 +494,7 @@ func (d *decodeState) object(v reflect.Value) {
|
| 486 | 486 |
return |
| 487 | 487 |
} |
| 488 | 488 |
if ut != nil {
|
| 489 |
- d.saveError(&UnmarshalTypeError{"object", v.Type()})
|
|
| 489 |
+ d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)})
|
|
| 490 | 490 |
d.off-- |
| 491 | 491 |
d.next() // skip over { } in input
|
| 492 | 492 |
return |
| ... | ... |
@@ -505,7 +513,7 @@ func (d *decodeState) object(v reflect.Value) {
|
| 505 | 505 |
// map must have string kind |
| 506 | 506 |
t := v.Type() |
| 507 | 507 |
if t.Key().Kind() != reflect.String {
|
| 508 |
- d.saveError(&UnmarshalTypeError{"object", v.Type()})
|
|
| 508 |
+ d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)})
|
|
| 509 | 509 |
d.off-- |
| 510 | 510 |
d.next() // skip over { } in input
|
| 511 | 511 |
return |
| ... | ... |
@@ -516,7 +524,7 @@ func (d *decodeState) object(v reflect.Value) {
|
| 516 | 516 |
case reflect.Struct: |
| 517 | 517 |
|
| 518 | 518 |
default: |
| 519 |
- d.saveError(&UnmarshalTypeError{"object", v.Type()})
|
|
| 519 |
+ d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)})
|
|
| 520 | 520 |
d.off-- |
| 521 | 521 |
d.next() // skip over { } in input
|
| 522 | 522 |
return |
| ... | ... |
@@ -600,7 +608,7 @@ func (d *decodeState) object(v reflect.Value) {
|
| 600 | 600 |
case string: |
| 601 | 601 |
d.literalStore([]byte(qv), subv, true) |
| 602 | 602 |
default: |
| 603 |
- d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal unquoted value into %v", item, v.Type()))
|
|
| 603 |
+ d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal unquoted value into %v", subv.Type()))
|
|
| 604 | 604 |
} |
| 605 | 605 |
} else {
|
| 606 | 606 |
d.value(subv) |
| ... | ... |
@@ -647,7 +655,7 @@ func (d *decodeState) convertNumber(s string) (interface{}, error) {
|
| 647 | 647 |
} |
| 648 | 648 |
f, err := strconv.ParseFloat(s, 64) |
| 649 | 649 |
if err != nil {
|
| 650 |
- return nil, &UnmarshalTypeError{"number " + s, reflect.TypeOf(0.0)}
|
|
| 650 |
+ return nil, &UnmarshalTypeError{"number " + s, reflect.TypeOf(0.0), int64(d.off)}
|
|
| 651 | 651 |
} |
| 652 | 652 |
return f, nil |
| 653 | 653 |
} |
| ... | ... |
@@ -680,8 +688,9 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool |
| 680 | 680 |
if fromQuoted {
|
| 681 | 681 |
d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()))
|
| 682 | 682 |
} else {
|
| 683 |
- d.saveError(&UnmarshalTypeError{"string", v.Type()})
|
|
| 683 |
+ d.saveError(&UnmarshalTypeError{"string", v.Type(), int64(d.off)})
|
|
| 684 | 684 |
} |
| 685 |
+ return |
|
| 685 | 686 |
} |
| 686 | 687 |
s, ok := unquoteBytes(item) |
| 687 | 688 |
if !ok {
|
| ... | ... |
@@ -714,7 +723,7 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool |
| 714 | 714 |
if fromQuoted {
|
| 715 | 715 |
d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()))
|
| 716 | 716 |
} else {
|
| 717 |
- d.saveError(&UnmarshalTypeError{"bool", v.Type()})
|
|
| 717 |
+ d.saveError(&UnmarshalTypeError{"bool", v.Type(), int64(d.off)})
|
|
| 718 | 718 |
} |
| 719 | 719 |
case reflect.Bool: |
| 720 | 720 |
v.SetBool(value) |
| ... | ... |
@@ -722,7 +731,7 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool |
| 722 | 722 |
if v.NumMethod() == 0 {
|
| 723 | 723 |
v.Set(reflect.ValueOf(value)) |
| 724 | 724 |
} else {
|
| 725 |
- d.saveError(&UnmarshalTypeError{"bool", v.Type()})
|
|
| 725 |
+ d.saveError(&UnmarshalTypeError{"bool", v.Type(), int64(d.off)})
|
|
| 726 | 726 |
} |
| 727 | 727 |
} |
| 728 | 728 |
|
| ... | ... |
@@ -737,10 +746,10 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool |
| 737 | 737 |
} |
| 738 | 738 |
switch v.Kind() {
|
| 739 | 739 |
default: |
| 740 |
- d.saveError(&UnmarshalTypeError{"string", v.Type()})
|
|
| 740 |
+ d.saveError(&UnmarshalTypeError{"string", v.Type(), int64(d.off)})
|
|
| 741 | 741 |
case reflect.Slice: |
| 742 |
- if v.Type() != byteSliceType {
|
|
| 743 |
- d.saveError(&UnmarshalTypeError{"string", v.Type()})
|
|
| 742 |
+ if v.Type().Elem().Kind() != reflect.Uint8 {
|
|
| 743 |
+ d.saveError(&UnmarshalTypeError{"string", v.Type(), int64(d.off)})
|
|
| 744 | 744 |
break |
| 745 | 745 |
} |
| 746 | 746 |
b := make([]byte, base64.StdEncoding.DecodedLen(len(s))) |
| ... | ... |
@@ -756,7 +765,7 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool |
| 756 | 756 |
if v.NumMethod() == 0 {
|
| 757 | 757 |
v.Set(reflect.ValueOf(string(s))) |
| 758 | 758 |
} else {
|
| 759 |
- d.saveError(&UnmarshalTypeError{"string", v.Type()})
|
|
| 759 |
+ d.saveError(&UnmarshalTypeError{"string", v.Type(), int64(d.off)})
|
|
| 760 | 760 |
} |
| 761 | 761 |
} |
| 762 | 762 |
|
| ... | ... |
@@ -778,7 +787,7 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool |
| 778 | 778 |
if fromQuoted {
|
| 779 | 779 |
d.error(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()))
|
| 780 | 780 |
} else {
|
| 781 |
- d.error(&UnmarshalTypeError{"number", v.Type()})
|
|
| 781 |
+ d.error(&UnmarshalTypeError{"number", v.Type(), int64(d.off)})
|
|
| 782 | 782 |
} |
| 783 | 783 |
case reflect.Interface: |
| 784 | 784 |
n, err := d.convertNumber(s) |
| ... | ... |
@@ -787,7 +796,7 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool |
| 787 | 787 |
break |
| 788 | 788 |
} |
| 789 | 789 |
if v.NumMethod() != 0 {
|
| 790 |
- d.saveError(&UnmarshalTypeError{"number", v.Type()})
|
|
| 790 |
+ d.saveError(&UnmarshalTypeError{"number", v.Type(), int64(d.off)})
|
|
| 791 | 791 |
break |
| 792 | 792 |
} |
| 793 | 793 |
v.Set(reflect.ValueOf(n)) |
| ... | ... |
@@ -795,7 +804,7 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool |
| 795 | 795 |
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: |
| 796 | 796 |
n, err := strconv.ParseInt(s, 10, 64) |
| 797 | 797 |
if err != nil || v.OverflowInt(n) {
|
| 798 |
- d.saveError(&UnmarshalTypeError{"number " + s, v.Type()})
|
|
| 798 |
+ d.saveError(&UnmarshalTypeError{"number " + s, v.Type(), int64(d.off)})
|
|
| 799 | 799 |
break |
| 800 | 800 |
} |
| 801 | 801 |
v.SetInt(n) |
| ... | ... |
@@ -803,7 +812,7 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool |
| 803 | 803 |
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: |
| 804 | 804 |
n, err := strconv.ParseUint(s, 10, 64) |
| 805 | 805 |
if err != nil || v.OverflowUint(n) {
|
| 806 |
- d.saveError(&UnmarshalTypeError{"number " + s, v.Type()})
|
|
| 806 |
+ d.saveError(&UnmarshalTypeError{"number " + s, v.Type(), int64(d.off)})
|
|
| 807 | 807 |
break |
| 808 | 808 |
} |
| 809 | 809 |
v.SetUint(n) |
| ... | ... |
@@ -811,7 +820,7 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool |
| 811 | 811 |
case reflect.Float32, reflect.Float64: |
| 812 | 812 |
n, err := strconv.ParseFloat(s, v.Type().Bits()) |
| 813 | 813 |
if err != nil || v.OverflowFloat(n) {
|
| 814 |
- d.saveError(&UnmarshalTypeError{"number " + s, v.Type()})
|
|
| 814 |
+ d.saveError(&UnmarshalTypeError{"number " + s, v.Type(), int64(d.off)})
|
|
| 815 | 815 |
break |
| 816 | 816 |
} |
| 817 | 817 |
v.SetFloat(n) |
| ... | ... |
@@ -7,7 +7,7 @@ |
| 7 | 7 |
// in the documentation for the Marshal and Unmarshal functions. |
| 8 | 8 |
// |
| 9 | 9 |
// See "JSON and Go" for an introduction to this package: |
| 10 |
-// http://golang.org/doc/articles/json_and_go.html |
|
| 10 |
+// https://golang.org/doc/articles/json_and_go.html |
|
| 11 | 11 |
package json |
| 12 | 12 |
|
| 13 | 13 |
import ( |
| ... | ... |
@@ -79,8 +79,8 @@ import ( |
| 79 | 79 |
// |
| 80 | 80 |
// The "string" option signals that a field is stored as JSON inside a |
| 81 | 81 |
// JSON-encoded string. It applies only to fields of string, floating point, |
| 82 |
-// or integer types. This extra level of encoding is sometimes used when |
|
| 83 |
-// communicating with JavaScript programs: |
|
| 82 |
+// integer, or boolean types. This extra level of encoding is sometimes used |
|
| 83 |
+// when communicating with JavaScript programs: |
|
| 84 | 84 |
// |
| 85 | 85 |
// Int64String int64 `json:",string"` |
| 86 | 86 |
// |
| ... | ... |
@@ -113,8 +113,8 @@ import ( |
| 113 | 113 |
// a JSON tag of "-". |
| 114 | 114 |
// |
| 115 | 115 |
// Map values encode as JSON objects. |
| 116 |
-// The map's key type must be string; the object keys are used directly |
|
| 117 |
-// as map keys. |
|
| 116 |
+// The map's key type must be string; the map keys are used as JSON object |
|
| 117 |
+// keys, subject to the UTF-8 coercion described for string values above. |
|
| 118 | 118 |
// |
| 119 | 119 |
// Pointer values encode as the value pointed to. |
| 120 | 120 |
// A nil pointer encodes as the null JSON object. |
| ... | ... |
@@ -287,8 +287,6 @@ func (e *encodeState) error(err error) {
|
| 287 | 287 |
panic(err) |
| 288 | 288 |
} |
| 289 | 289 |
|
| 290 |
-var byteSliceType = reflect.TypeOf([]byte(nil)) |
|
| 291 |
- |
|
| 292 | 290 |
func isEmptyValue(v reflect.Value) bool {
|
| 293 | 291 |
switch v.Kind() {
|
| 294 | 292 |
case reflect.Array, reflect.Map, reflect.Slice, reflect.String: |
| ... | ... |
@@ -1075,6 +1073,19 @@ func typeFields(t reflect.Type) []field {
|
| 1075 | 1075 |
ft = ft.Elem() |
| 1076 | 1076 |
} |
| 1077 | 1077 |
|
| 1078 |
+ // Only strings, floats, integers, and booleans can be quoted. |
|
| 1079 |
+ quoted := false |
|
| 1080 |
+ if opts.Contains("string") {
|
|
| 1081 |
+ switch ft.Kind() {
|
|
| 1082 |
+ case reflect.Bool, |
|
| 1083 |
+ reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, |
|
| 1084 |
+ reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, |
|
| 1085 |
+ reflect.Float32, reflect.Float64, |
|
| 1086 |
+ reflect.String: |
|
| 1087 |
+ quoted = true |
|
| 1088 |
+ } |
|
| 1089 |
+ } |
|
| 1090 |
+ |
|
| 1078 | 1091 |
// Record found field and index sequence. |
| 1079 | 1092 |
if name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct {
|
| 1080 | 1093 |
tagged := name != "" |
| ... | ... |
@@ -1087,7 +1098,7 @@ func typeFields(t reflect.Type) []field {
|
| 1087 | 1087 |
index: index, |
| 1088 | 1088 |
typ: ft, |
| 1089 | 1089 |
omitEmpty: opts.Contains("omitempty"),
|
| 1090 |
- quoted: opts.Contains("string"),
|
|
| 1090 |
+ quoted: quoted, |
|
| 1091 | 1091 |
})) |
| 1092 | 1092 |
if count[f.typ] > 1 {
|
| 1093 | 1093 |
// If there were multiple instances, add a second, |
| ... | ... |
@@ -26,7 +26,7 @@ const ( |
| 26 | 26 |
// The letters S and K are special because they map to 3 runes, not just 2: |
| 27 | 27 |
// * S maps to s and to U+017F 'Å¿' Latin small letter long s |
| 28 | 28 |
// * k maps to K and to U+212A 'K' Kelvin sign |
| 29 |
-// See http://play.golang.org/p/tTxjOc0OGo |
|
| 29 |
+// See https://play.golang.org/p/tTxjOc0OGo |
|
| 30 | 30 |
// |
| 31 | 31 |
// The returned function is specialized for matching against s and |
| 32 | 32 |
// should only be given s. It's not curried for performance reasons. |
| ... | ... |
@@ -38,8 +38,15 @@ func nextValue(data []byte, scan *scanner) (value, rest []byte, err error) {
|
| 38 | 38 |
scan.reset() |
| 39 | 39 |
for i, c := range data {
|
| 40 | 40 |
v := scan.step(scan, int(c)) |
| 41 |
- if v >= scanEnd {
|
|
| 41 |
+ if v >= scanEndObject {
|
|
| 42 | 42 |
switch v {
|
| 43 |
+ // probe the scanner with a space to determine whether we will |
|
| 44 |
+ // get scanEnd on the next character. Otherwise, if the next character |
|
| 45 |
+ // is not a space, scanEndTop allocates a needless error. |
|
| 46 |
+ case scanEndObject, scanEndArray: |
|
| 47 |
+ if scan.step(scan, ' ') == scanEnd {
|
|
| 48 |
+ return data[:i+1], data[i+1:], nil |
|
| 49 |
+ } |
|
| 43 | 50 |
case scanError: |
| 44 | 51 |
return nil, nil, scan.err |
| 45 | 52 |
case scanEnd: |
| ... | ... |
@@ -12,11 +12,15 @@ import ( |
| 12 | 12 |
|
| 13 | 13 |
// A Decoder reads and decodes JSON objects from an input stream. |
| 14 | 14 |
type Decoder struct {
|
| 15 |
- r io.Reader |
|
| 16 |
- buf []byte |
|
| 17 |
- d decodeState |
|
| 18 |
- scan scanner |
|
| 19 |
- err error |
|
| 15 |
+ r io.Reader |
|
| 16 |
+ buf []byte |
|
| 17 |
+ d decodeState |
|
| 18 |
+ scanp int // start of unread data in buf |
|
| 19 |
+ scan scanner |
|
| 20 |
+ err error |
|
| 21 |
+ |
|
| 22 |
+ tokenState int |
|
| 23 |
+ tokenStack []int |
|
| 20 | 24 |
} |
| 21 | 25 |
|
| 22 | 26 |
// NewDecoder returns a new decoder that reads from r. |
| ... | ... |
@@ -41,20 +45,29 @@ func (dec *Decoder) Decode(v interface{}) error {
|
| 41 | 41 |
return dec.err |
| 42 | 42 |
} |
| 43 | 43 |
|
| 44 |
+ if err := dec.tokenPrepareForDecode(); err != nil {
|
|
| 45 |
+ return err |
|
| 46 |
+ } |
|
| 47 |
+ |
|
| 48 |
+ if !dec.tokenValueAllowed() {
|
|
| 49 |
+ return &SyntaxError{msg: "not at beginning of value"}
|
|
| 50 |
+ } |
|
| 51 |
+ |
|
| 52 |
+ // Read whole value into buffer. |
|
| 44 | 53 |
n, err := dec.readValue() |
| 45 | 54 |
if err != nil {
|
| 46 | 55 |
return err |
| 47 | 56 |
} |
| 57 |
+ dec.d.init(dec.buf[dec.scanp : dec.scanp+n]) |
|
| 58 |
+ dec.scanp += n |
|
| 48 | 59 |
|
| 49 | 60 |
// Don't save err from unmarshal into dec.err: |
| 50 | 61 |
// the connection is still usable since we read a complete JSON |
| 51 | 62 |
// object from it before the error happened. |
| 52 |
- dec.d.init(dec.buf[0:n]) |
|
| 53 | 63 |
err = dec.d.unmarshal(v) |
| 54 | 64 |
|
| 55 |
- // Slide rest of data down. |
|
| 56 |
- rest := copy(dec.buf, dec.buf[n:]) |
|
| 57 |
- dec.buf = dec.buf[0:rest] |
|
| 65 |
+ // fixup token streaming state |
|
| 66 |
+ dec.tokenValueEnd() |
|
| 58 | 67 |
|
| 59 | 68 |
return err |
| 60 | 69 |
} |
| ... | ... |
@@ -62,7 +75,7 @@ func (dec *Decoder) Decode(v interface{}) error {
|
| 62 | 62 |
// Buffered returns a reader of the data remaining in the Decoder's |
| 63 | 63 |
// buffer. The reader is valid until the next call to Decode. |
| 64 | 64 |
func (dec *Decoder) Buffered() io.Reader {
|
| 65 |
- return bytes.NewReader(dec.buf) |
|
| 65 |
+ return bytes.NewReader(dec.buf[dec.scanp:]) |
|
| 66 | 66 |
} |
| 67 | 67 |
|
| 68 | 68 |
// readValue reads a JSON value into dec.buf. |
| ... | ... |
@@ -70,7 +83,7 @@ func (dec *Decoder) Buffered() io.Reader {
|
| 70 | 70 |
func (dec *Decoder) readValue() (int, error) {
|
| 71 | 71 |
dec.scan.reset() |
| 72 | 72 |
|
| 73 |
- scanp := 0 |
|
| 73 |
+ scanp := dec.scanp |
|
| 74 | 74 |
var err error |
| 75 | 75 |
Input: |
| 76 | 76 |
for {
|
| ... | ... |
@@ -111,20 +124,35 @@ Input: |
| 111 | 111 |
return 0, err |
| 112 | 112 |
} |
| 113 | 113 |
|
| 114 |
- // Make room to read more into the buffer. |
|
| 115 |
- const minRead = 512 |
|
| 116 |
- if cap(dec.buf)-len(dec.buf) < minRead {
|
|
| 117 |
- newBuf := make([]byte, len(dec.buf), 2*cap(dec.buf)+minRead) |
|
| 118 |
- copy(newBuf, dec.buf) |
|
| 119 |
- dec.buf = newBuf |
|
| 120 |
- } |
|
| 114 |
+ n := scanp - dec.scanp |
|
| 115 |
+ err = dec.refill() |
|
| 116 |
+ scanp = dec.scanp + n |
|
| 117 |
+ } |
|
| 118 |
+ return scanp - dec.scanp, nil |
|
| 119 |
+} |
|
| 121 | 120 |
|
| 122 |
- // Read. Delay error for next iteration (after scan). |
|
| 123 |
- var n int |
|
| 124 |
- n, err = dec.r.Read(dec.buf[len(dec.buf):cap(dec.buf)]) |
|
| 125 |
- dec.buf = dec.buf[0 : len(dec.buf)+n] |
|
| 121 |
+func (dec *Decoder) refill() error {
|
|
| 122 |
+ // Make room to read more into the buffer. |
|
| 123 |
+ // First slide down data already consumed. |
|
| 124 |
+ if dec.scanp > 0 {
|
|
| 125 |
+ n := copy(dec.buf, dec.buf[dec.scanp:]) |
|
| 126 |
+ dec.buf = dec.buf[:n] |
|
| 127 |
+ dec.scanp = 0 |
|
| 126 | 128 |
} |
| 127 |
- return scanp, nil |
|
| 129 |
+ |
|
| 130 |
+ // Grow buffer if not large enough. |
|
| 131 |
+ const minRead = 512 |
|
| 132 |
+ if cap(dec.buf)-len(dec.buf) < minRead {
|
|
| 133 |
+ newBuf := make([]byte, len(dec.buf), 2*cap(dec.buf)+minRead) |
|
| 134 |
+ copy(newBuf, dec.buf) |
|
| 135 |
+ dec.buf = newBuf |
|
| 136 |
+ } |
|
| 137 |
+ |
|
| 138 |
+ // Read. Delay error for next iteration (after scan). |
|
| 139 |
+ n, err := dec.r.Read(dec.buf[len(dec.buf):cap(dec.buf)]) |
|
| 140 |
+ dec.buf = dec.buf[0 : len(dec.buf)+n] |
|
| 141 |
+ |
|
| 142 |
+ return err |
|
| 128 | 143 |
} |
| 129 | 144 |
|
| 130 | 145 |
func nonSpace(b []byte) bool {
|
| ... | ... |
@@ -205,3 +233,255 @@ func (m *RawMessage) UnmarshalJSON(data []byte) error {
|
| 205 | 205 |
|
| 206 | 206 |
var _ Marshaler = (*RawMessage)(nil) |
| 207 | 207 |
var _ Unmarshaler = (*RawMessage)(nil) |
| 208 |
+ |
|
| 209 |
+// A Token holds a value of one of these types: |
|
| 210 |
+// |
|
| 211 |
+// Delim, for the four JSON delimiters [ ] { }
|
|
| 212 |
+// bool, for JSON booleans |
|
| 213 |
+// float64, for JSON numbers |
|
| 214 |
+// Number, for JSON numbers |
|
| 215 |
+// string, for JSON string literals |
|
| 216 |
+// nil, for JSON null |
|
| 217 |
+// |
|
| 218 |
+type Token interface{}
|
|
| 219 |
+ |
|
| 220 |
+const ( |
|
| 221 |
+ tokenTopValue = iota |
|
| 222 |
+ tokenArrayStart |
|
| 223 |
+ tokenArrayValue |
|
| 224 |
+ tokenArrayComma |
|
| 225 |
+ tokenObjectStart |
|
| 226 |
+ tokenObjectKey |
|
| 227 |
+ tokenObjectColon |
|
| 228 |
+ tokenObjectValue |
|
| 229 |
+ tokenObjectComma |
|
| 230 |
+) |
|
| 231 |
+ |
|
| 232 |
+// advance tokenstate from a separator state to a value state |
|
| 233 |
+func (dec *Decoder) tokenPrepareForDecode() error {
|
|
| 234 |
+ // Note: Not calling peek before switch, to avoid |
|
| 235 |
+ // putting peek into the standard Decode path. |
|
| 236 |
+ // peek is only called when using the Token API. |
|
| 237 |
+ switch dec.tokenState {
|
|
| 238 |
+ case tokenArrayComma: |
|
| 239 |
+ c, err := dec.peek() |
|
| 240 |
+ if err != nil {
|
|
| 241 |
+ return err |
|
| 242 |
+ } |
|
| 243 |
+ if c != ',' {
|
|
| 244 |
+ return &SyntaxError{"expected comma after array element", 0}
|
|
| 245 |
+ } |
|
| 246 |
+ dec.scanp++ |
|
| 247 |
+ dec.tokenState = tokenArrayValue |
|
| 248 |
+ case tokenObjectColon: |
|
| 249 |
+ c, err := dec.peek() |
|
| 250 |
+ if err != nil {
|
|
| 251 |
+ return err |
|
| 252 |
+ } |
|
| 253 |
+ if c != ':' {
|
|
| 254 |
+ return &SyntaxError{"expected colon after object key", 0}
|
|
| 255 |
+ } |
|
| 256 |
+ dec.scanp++ |
|
| 257 |
+ dec.tokenState = tokenObjectValue |
|
| 258 |
+ } |
|
| 259 |
+ return nil |
|
| 260 |
+} |
|
| 261 |
+ |
|
| 262 |
+func (dec *Decoder) tokenValueAllowed() bool {
|
|
| 263 |
+ switch dec.tokenState {
|
|
| 264 |
+ case tokenTopValue, tokenArrayStart, tokenArrayValue, tokenObjectValue: |
|
| 265 |
+ return true |
|
| 266 |
+ } |
|
| 267 |
+ return false |
|
| 268 |
+} |
|
| 269 |
+ |
|
| 270 |
+func (dec *Decoder) tokenValueEnd() {
|
|
| 271 |
+ switch dec.tokenState {
|
|
| 272 |
+ case tokenArrayStart, tokenArrayValue: |
|
| 273 |
+ dec.tokenState = tokenArrayComma |
|
| 274 |
+ case tokenObjectValue: |
|
| 275 |
+ dec.tokenState = tokenObjectComma |
|
| 276 |
+ } |
|
| 277 |
+} |
|
| 278 |
+ |
|
| 279 |
+// A Delim is a JSON array or object delimiter, one of [ ] { or }.
|
|
| 280 |
+type Delim rune |
|
| 281 |
+ |
|
| 282 |
+func (d Delim) String() string {
|
|
| 283 |
+ return string(d) |
|
| 284 |
+} |
|
| 285 |
+ |
|
| 286 |
+// Token returns the next JSON token in the input stream. |
|
| 287 |
+// At the end of the input stream, Token returns nil, io.EOF. |
|
| 288 |
+// |
|
| 289 |
+// Token guarantees that the delimiters [ ] { } it returns are
|
|
| 290 |
+// properly nested and matched: if Token encounters an unexpected |
|
| 291 |
+// delimiter in the input, it will return an error. |
|
| 292 |
+// |
|
| 293 |
+// The input stream consists of basic JSON values—bool, string, |
|
| 294 |
+// number, and null—along with delimiters [ ] { } of type Delim
|
|
| 295 |
+// to mark the start and end of arrays and objects. |
|
| 296 |
+// Commas and colons are elided. |
|
| 297 |
+func (dec *Decoder) Token() (Token, error) {
|
|
| 298 |
+ for {
|
|
| 299 |
+ c, err := dec.peek() |
|
| 300 |
+ if err != nil {
|
|
| 301 |
+ return nil, err |
|
| 302 |
+ } |
|
| 303 |
+ switch c {
|
|
| 304 |
+ case '[': |
|
| 305 |
+ if !dec.tokenValueAllowed() {
|
|
| 306 |
+ return dec.tokenError(c) |
|
| 307 |
+ } |
|
| 308 |
+ dec.scanp++ |
|
| 309 |
+ dec.tokenStack = append(dec.tokenStack, dec.tokenState) |
|
| 310 |
+ dec.tokenState = tokenArrayStart |
|
| 311 |
+ return Delim('['), nil
|
|
| 312 |
+ |
|
| 313 |
+ case ']': |
|
| 314 |
+ if dec.tokenState != tokenArrayStart && dec.tokenState != tokenArrayComma {
|
|
| 315 |
+ return dec.tokenError(c) |
|
| 316 |
+ } |
|
| 317 |
+ dec.scanp++ |
|
| 318 |
+ dec.tokenState = dec.tokenStack[len(dec.tokenStack)-1] |
|
| 319 |
+ dec.tokenStack = dec.tokenStack[:len(dec.tokenStack)-1] |
|
| 320 |
+ dec.tokenValueEnd() |
|
| 321 |
+ return Delim(']'), nil
|
|
| 322 |
+ |
|
| 323 |
+ case '{':
|
|
| 324 |
+ if !dec.tokenValueAllowed() {
|
|
| 325 |
+ return dec.tokenError(c) |
|
| 326 |
+ } |
|
| 327 |
+ dec.scanp++ |
|
| 328 |
+ dec.tokenStack = append(dec.tokenStack, dec.tokenState) |
|
| 329 |
+ dec.tokenState = tokenObjectStart |
|
| 330 |
+ return Delim('{'), nil
|
|
| 331 |
+ |
|
| 332 |
+ case '}': |
|
| 333 |
+ if dec.tokenState != tokenObjectStart && dec.tokenState != tokenObjectComma {
|
|
| 334 |
+ return dec.tokenError(c) |
|
| 335 |
+ } |
|
| 336 |
+ dec.scanp++ |
|
| 337 |
+ dec.tokenState = dec.tokenStack[len(dec.tokenStack)-1] |
|
| 338 |
+ dec.tokenStack = dec.tokenStack[:len(dec.tokenStack)-1] |
|
| 339 |
+ dec.tokenValueEnd() |
|
| 340 |
+ return Delim('}'), nil
|
|
| 341 |
+ |
|
| 342 |
+ case ':': |
|
| 343 |
+ if dec.tokenState != tokenObjectColon {
|
|
| 344 |
+ return dec.tokenError(c) |
|
| 345 |
+ } |
|
| 346 |
+ dec.scanp++ |
|
| 347 |
+ dec.tokenState = tokenObjectValue |
|
| 348 |
+ continue |
|
| 349 |
+ |
|
| 350 |
+ case ',': |
|
| 351 |
+ if dec.tokenState == tokenArrayComma {
|
|
| 352 |
+ dec.scanp++ |
|
| 353 |
+ dec.tokenState = tokenArrayValue |
|
| 354 |
+ continue |
|
| 355 |
+ } |
|
| 356 |
+ if dec.tokenState == tokenObjectComma {
|
|
| 357 |
+ dec.scanp++ |
|
| 358 |
+ dec.tokenState = tokenObjectKey |
|
| 359 |
+ continue |
|
| 360 |
+ } |
|
| 361 |
+ return dec.tokenError(c) |
|
| 362 |
+ |
|
| 363 |
+ case '"': |
|
| 364 |
+ if dec.tokenState == tokenObjectStart || dec.tokenState == tokenObjectKey {
|
|
| 365 |
+ var x string |
|
| 366 |
+ old := dec.tokenState |
|
| 367 |
+ dec.tokenState = tokenTopValue |
|
| 368 |
+ err := dec.Decode(&x) |
|
| 369 |
+ dec.tokenState = old |
|
| 370 |
+ if err != nil {
|
|
| 371 |
+ clearOffset(err) |
|
| 372 |
+ return nil, err |
|
| 373 |
+ } |
|
| 374 |
+ dec.tokenState = tokenObjectColon |
|
| 375 |
+ return x, nil |
|
| 376 |
+ } |
|
| 377 |
+ fallthrough |
|
| 378 |
+ |
|
| 379 |
+ default: |
|
| 380 |
+ if !dec.tokenValueAllowed() {
|
|
| 381 |
+ return dec.tokenError(c) |
|
| 382 |
+ } |
|
| 383 |
+ var x interface{}
|
|
| 384 |
+ if err := dec.Decode(&x); err != nil {
|
|
| 385 |
+ clearOffset(err) |
|
| 386 |
+ return nil, err |
|
| 387 |
+ } |
|
| 388 |
+ return x, nil |
|
| 389 |
+ } |
|
| 390 |
+ } |
|
| 391 |
+} |
|
| 392 |
+ |
|
| 393 |
+func clearOffset(err error) {
|
|
| 394 |
+ if s, ok := err.(*SyntaxError); ok {
|
|
| 395 |
+ s.Offset = 0 |
|
| 396 |
+ } |
|
| 397 |
+} |
|
| 398 |
+ |
|
| 399 |
+func (dec *Decoder) tokenError(c byte) (Token, error) {
|
|
| 400 |
+ var context string |
|
| 401 |
+ switch dec.tokenState {
|
|
| 402 |
+ case tokenTopValue: |
|
| 403 |
+ context = " looking for beginning of value" |
|
| 404 |
+ case tokenArrayStart, tokenArrayValue, tokenObjectValue: |
|
| 405 |
+ context = " looking for beginning of value" |
|
| 406 |
+ case tokenArrayComma: |
|
| 407 |
+ context = " after array element" |
|
| 408 |
+ case tokenObjectKey: |
|
| 409 |
+ context = " looking for beginning of object key string" |
|
| 410 |
+ case tokenObjectColon: |
|
| 411 |
+ context = " after object key" |
|
| 412 |
+ case tokenObjectComma: |
|
| 413 |
+ context = " after object key:value pair" |
|
| 414 |
+ } |
|
| 415 |
+ return nil, &SyntaxError{"invalid character " + quoteChar(int(c)) + " " + context, 0}
|
|
| 416 |
+} |
|
| 417 |
+ |
|
| 418 |
+// More reports whether there is another element in the |
|
| 419 |
+// current array or object being parsed. |
|
| 420 |
+func (dec *Decoder) More() bool {
|
|
| 421 |
+ c, err := dec.peek() |
|
| 422 |
+ return err == nil && c != ']' && c != '}' |
|
| 423 |
+} |
|
| 424 |
+ |
|
| 425 |
+func (dec *Decoder) peek() (byte, error) {
|
|
| 426 |
+ var err error |
|
| 427 |
+ for {
|
|
| 428 |
+ for i := dec.scanp; i < len(dec.buf); i++ {
|
|
| 429 |
+ c := dec.buf[i] |
|
| 430 |
+ if isSpace(rune(c)) {
|
|
| 431 |
+ continue |
|
| 432 |
+ } |
|
| 433 |
+ dec.scanp = i |
|
| 434 |
+ return c, nil |
|
| 435 |
+ } |
|
| 436 |
+ // buffer has been scanned, now report any error |
|
| 437 |
+ if err != nil {
|
|
| 438 |
+ return 0, err |
|
| 439 |
+ } |
|
| 440 |
+ err = dec.refill() |
|
| 441 |
+ } |
|
| 442 |
+} |
|
| 443 |
+ |
|
| 444 |
+/* |
|
| 445 |
+TODO |
|
| 446 |
+ |
|
| 447 |
+// EncodeToken writes the given JSON token to the stream. |
|
| 448 |
+// It returns an error if the delimiters [ ] { } are not properly used.
|
|
| 449 |
+// |
|
| 450 |
+// EncodeToken does not call Flush, because usually it is part of |
|
| 451 |
+// a larger operation such as Encode, and those will call Flush when finished. |
|
| 452 |
+// Callers that create an Encoder and then invoke EncodeToken directly, |
|
| 453 |
+// without using Encode, need to call Flush when finished to ensure that |
|
| 454 |
+// the JSON is written to the underlying writer. |
|
| 455 |
+func (e *Encoder) EncodeToken(t Token) error {
|
|
| 456 |
+ ... |
|
| 457 |
+} |
|
| 458 |
+ |
|
| 459 |
+*/ |