mirror of
https://github.com/ceph/ceph-csi.git
synced 2025-06-14 10:53:34 +00:00
vendor files
This commit is contained in:
37
vendor/k8s.io/apimachinery/pkg/util/yaml/BUILD
generated
vendored
Normal file
37
vendor/k8s.io/apimachinery/pkg/util/yaml/BUILD
generated
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
package(default_visibility = ["//visibility:public"])
|
||||
|
||||
load(
|
||||
"@io_bazel_rules_go//go:def.bzl",
|
||||
"go_library",
|
||||
"go_test",
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "go_default_test",
|
||||
srcs = ["decoder_test.go"],
|
||||
importpath = "k8s.io/apimachinery/pkg/util/yaml",
|
||||
library = ":go_default_library",
|
||||
)
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = ["decoder.go"],
|
||||
importpath = "k8s.io/apimachinery/pkg/util/yaml",
|
||||
deps = [
|
||||
"//vendor/github.com/ghodss/yaml:go_default_library",
|
||||
"//vendor/github.com/golang/glog:go_default_library",
|
||||
],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "package-srcs",
|
||||
srcs = glob(["**"]),
|
||||
tags = ["automanaged"],
|
||||
visibility = ["//visibility:private"],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "all-srcs",
|
||||
srcs = [":package-srcs"],
|
||||
tags = ["automanaged"],
|
||||
)
|
346
vendor/k8s.io/apimachinery/pkg/util/yaml/decoder.go
generated
vendored
Normal file
346
vendor/k8s.io/apimachinery/pkg/util/yaml/decoder.go
generated
vendored
Normal file
@ -0,0 +1,346 @@
|
||||
/*
|
||||
Copyright 2014 The Kubernetes Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
"github.com/ghodss/yaml"
|
||||
"github.com/golang/glog"
|
||||
)
|
||||
|
||||
// ToJSON converts a single YAML document into a JSON document
|
||||
// or returns an error. If the document appears to be JSON the
|
||||
// YAML decoding path is not used (so that error messages are
|
||||
// JSON specific).
|
||||
func ToJSON(data []byte) ([]byte, error) {
|
||||
if hasJSONPrefix(data) {
|
||||
return data, nil
|
||||
}
|
||||
return yaml.YAMLToJSON(data)
|
||||
}
|
||||
|
||||
// YAMLToJSONDecoder decodes YAML documents from an io.Reader by
|
||||
// separating individual documents. It first converts the YAML
|
||||
// body to JSON, then unmarshals the JSON.
|
||||
type YAMLToJSONDecoder struct {
|
||||
reader Reader
|
||||
}
|
||||
|
||||
// NewYAMLToJSONDecoder decodes YAML documents from the provided
|
||||
// stream in chunks by converting each document (as defined by
|
||||
// the YAML spec) into its own chunk, converting it to JSON via
|
||||
// yaml.YAMLToJSON, and then passing it to json.Decoder.
|
||||
func NewYAMLToJSONDecoder(r io.Reader) *YAMLToJSONDecoder {
|
||||
reader := bufio.NewReader(r)
|
||||
return &YAMLToJSONDecoder{
|
||||
reader: NewYAMLReader(reader),
|
||||
}
|
||||
}
|
||||
|
||||
// Decode reads a YAML document as JSON from the stream or returns
|
||||
// an error. The decoding rules match json.Unmarshal, not
|
||||
// yaml.Unmarshal.
|
||||
func (d *YAMLToJSONDecoder) Decode(into interface{}) error {
|
||||
bytes, err := d.reader.Read()
|
||||
if err != nil && err != io.EOF {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(bytes) != 0 {
|
||||
err := yaml.Unmarshal(bytes, into)
|
||||
if err != nil {
|
||||
return YAMLSyntaxError{err}
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// YAMLDecoder reads chunks of objects and returns ErrShortBuffer if
|
||||
// the data is not sufficient.
|
||||
type YAMLDecoder struct {
|
||||
r io.ReadCloser
|
||||
scanner *bufio.Scanner
|
||||
remaining []byte
|
||||
}
|
||||
|
||||
// NewDocumentDecoder decodes YAML documents from the provided
|
||||
// stream in chunks by converting each document (as defined by
|
||||
// the YAML spec) into its own chunk. io.ErrShortBuffer will be
|
||||
// returned if the entire buffer could not be read to assist
|
||||
// the caller in framing the chunk.
|
||||
func NewDocumentDecoder(r io.ReadCloser) io.ReadCloser {
|
||||
scanner := bufio.NewScanner(r)
|
||||
scanner.Split(splitYAMLDocument)
|
||||
return &YAMLDecoder{
|
||||
r: r,
|
||||
scanner: scanner,
|
||||
}
|
||||
}
|
||||
|
||||
// Read reads the previous slice into the buffer, or attempts to read
|
||||
// the next chunk.
|
||||
// TODO: switch to readline approach.
|
||||
func (d *YAMLDecoder) Read(data []byte) (n int, err error) {
|
||||
left := len(d.remaining)
|
||||
if left == 0 {
|
||||
// return the next chunk from the stream
|
||||
if !d.scanner.Scan() {
|
||||
err := d.scanner.Err()
|
||||
if err == nil {
|
||||
err = io.EOF
|
||||
}
|
||||
return 0, err
|
||||
}
|
||||
out := d.scanner.Bytes()
|
||||
d.remaining = out
|
||||
left = len(out)
|
||||
}
|
||||
|
||||
// fits within data
|
||||
if left <= len(data) {
|
||||
copy(data, d.remaining)
|
||||
d.remaining = nil
|
||||
return len(d.remaining), nil
|
||||
}
|
||||
|
||||
// caller will need to reread
|
||||
copy(data, d.remaining[:left])
|
||||
d.remaining = d.remaining[left:]
|
||||
return len(data), io.ErrShortBuffer
|
||||
}
|
||||
|
||||
func (d *YAMLDecoder) Close() error {
|
||||
return d.r.Close()
|
||||
}
|
||||
|
||||
const yamlSeparator = "\n---"
|
||||
const separator = "---"
|
||||
|
||||
// splitYAMLDocument is a bufio.SplitFunc for splitting YAML streams into individual documents.
|
||||
func splitYAMLDocument(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
||||
if atEOF && len(data) == 0 {
|
||||
return 0, nil, nil
|
||||
}
|
||||
sep := len([]byte(yamlSeparator))
|
||||
if i := bytes.Index(data, []byte(yamlSeparator)); i >= 0 {
|
||||
// We have a potential document terminator
|
||||
i += sep
|
||||
after := data[i:]
|
||||
if len(after) == 0 {
|
||||
// we can't read any more characters
|
||||
if atEOF {
|
||||
return len(data), data[:len(data)-sep], nil
|
||||
}
|
||||
return 0, nil, nil
|
||||
}
|
||||
if j := bytes.IndexByte(after, '\n'); j >= 0 {
|
||||
return i + j + 1, data[0 : i-sep], nil
|
||||
}
|
||||
return 0, nil, nil
|
||||
}
|
||||
// If we're at EOF, we have a final, non-terminated line. Return it.
|
||||
if atEOF {
|
||||
return len(data), data, nil
|
||||
}
|
||||
// Request more data.
|
||||
return 0, nil, nil
|
||||
}
|
||||
|
||||
// decoder is a convenience interface for Decode.
|
||||
type decoder interface {
|
||||
Decode(into interface{}) error
|
||||
}
|
||||
|
||||
// YAMLOrJSONDecoder attempts to decode a stream of JSON documents or
|
||||
// YAML documents by sniffing for a leading { character.
|
||||
type YAMLOrJSONDecoder struct {
|
||||
r io.Reader
|
||||
bufferSize int
|
||||
|
||||
decoder decoder
|
||||
rawData []byte
|
||||
}
|
||||
|
||||
type JSONSyntaxError struct {
|
||||
Line int
|
||||
Err error
|
||||
}
|
||||
|
||||
func (e JSONSyntaxError) Error() string {
|
||||
return fmt.Sprintf("json: line %d: %s", e.Line, e.Err.Error())
|
||||
}
|
||||
|
||||
type YAMLSyntaxError struct {
|
||||
err error
|
||||
}
|
||||
|
||||
func (e YAMLSyntaxError) Error() string {
|
||||
return e.err.Error()
|
||||
}
|
||||
|
||||
// NewYAMLOrJSONDecoder returns a decoder that will process YAML documents
|
||||
// or JSON documents from the given reader as a stream. bufferSize determines
|
||||
// how far into the stream the decoder will look to figure out whether this
|
||||
// is a JSON stream (has whitespace followed by an open brace).
|
||||
func NewYAMLOrJSONDecoder(r io.Reader, bufferSize int) *YAMLOrJSONDecoder {
|
||||
return &YAMLOrJSONDecoder{
|
||||
r: r,
|
||||
bufferSize: bufferSize,
|
||||
}
|
||||
}
|
||||
|
||||
// Decode unmarshals the next object from the underlying stream into the
|
||||
// provide object, or returns an error.
|
||||
func (d *YAMLOrJSONDecoder) Decode(into interface{}) error {
|
||||
if d.decoder == nil {
|
||||
buffer, origData, isJSON := GuessJSONStream(d.r, d.bufferSize)
|
||||
if isJSON {
|
||||
glog.V(4).Infof("decoding stream as JSON")
|
||||
d.decoder = json.NewDecoder(buffer)
|
||||
d.rawData = origData
|
||||
} else {
|
||||
glog.V(4).Infof("decoding stream as YAML")
|
||||
d.decoder = NewYAMLToJSONDecoder(buffer)
|
||||
}
|
||||
}
|
||||
err := d.decoder.Decode(into)
|
||||
if jsonDecoder, ok := d.decoder.(*json.Decoder); ok {
|
||||
if syntax, ok := err.(*json.SyntaxError); ok {
|
||||
data, readErr := ioutil.ReadAll(jsonDecoder.Buffered())
|
||||
if readErr != nil {
|
||||
glog.V(4).Infof("reading stream failed: %v", readErr)
|
||||
}
|
||||
js := string(data)
|
||||
|
||||
// if contents from io.Reader are not complete,
|
||||
// use the original raw data to prevent panic
|
||||
if int64(len(js)) <= syntax.Offset {
|
||||
js = string(d.rawData)
|
||||
}
|
||||
|
||||
start := strings.LastIndex(js[:syntax.Offset], "\n") + 1
|
||||
line := strings.Count(js[:start], "\n")
|
||||
return JSONSyntaxError{
|
||||
Line: line,
|
||||
Err: fmt.Errorf(syntax.Error()),
|
||||
}
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
type Reader interface {
|
||||
Read() ([]byte, error)
|
||||
}
|
||||
|
||||
type YAMLReader struct {
|
||||
reader Reader
|
||||
}
|
||||
|
||||
func NewYAMLReader(r *bufio.Reader) *YAMLReader {
|
||||
return &YAMLReader{
|
||||
reader: &LineReader{reader: r},
|
||||
}
|
||||
}
|
||||
|
||||
// Read returns a full YAML document.
|
||||
func (r *YAMLReader) Read() ([]byte, error) {
|
||||
var buffer bytes.Buffer
|
||||
for {
|
||||
line, err := r.reader.Read()
|
||||
if err != nil && err != io.EOF {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sep := len([]byte(separator))
|
||||
if i := bytes.Index(line, []byte(separator)); i == 0 {
|
||||
// We have a potential document terminator
|
||||
i += sep
|
||||
after := line[i:]
|
||||
if len(strings.TrimRightFunc(string(after), unicode.IsSpace)) == 0 {
|
||||
if buffer.Len() != 0 {
|
||||
return buffer.Bytes(), nil
|
||||
}
|
||||
if err == io.EOF {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
if err == io.EOF {
|
||||
if buffer.Len() != 0 {
|
||||
// If we're at EOF, we have a final, non-terminated line. Return it.
|
||||
return buffer.Bytes(), nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
buffer.Write(line)
|
||||
}
|
||||
}
|
||||
|
||||
type LineReader struct {
|
||||
reader *bufio.Reader
|
||||
}
|
||||
|
||||
// Read returns a single line (with '\n' ended) from the underlying reader.
|
||||
// An error is returned iff there is an error with the underlying reader.
|
||||
func (r *LineReader) Read() ([]byte, error) {
|
||||
var (
|
||||
isPrefix bool = true
|
||||
err error = nil
|
||||
line []byte
|
||||
buffer bytes.Buffer
|
||||
)
|
||||
|
||||
for isPrefix && err == nil {
|
||||
line, isPrefix, err = r.reader.ReadLine()
|
||||
buffer.Write(line)
|
||||
}
|
||||
buffer.WriteByte('\n')
|
||||
return buffer.Bytes(), err
|
||||
}
|
||||
|
||||
// GuessJSONStream scans the provided reader up to size, looking
|
||||
// for an open brace indicating this is JSON. It will return the
|
||||
// bufio.Reader it creates for the consumer.
|
||||
func GuessJSONStream(r io.Reader, size int) (io.Reader, []byte, bool) {
|
||||
buffer := bufio.NewReaderSize(r, size)
|
||||
b, _ := buffer.Peek(size)
|
||||
return buffer, b, hasJSONPrefix(b)
|
||||
}
|
||||
|
||||
var jsonPrefix = []byte("{")
|
||||
|
||||
// hasJSONPrefix returns true if the provided buffer appears to start with
|
||||
// a JSON open brace.
|
||||
func hasJSONPrefix(buf []byte) bool {
|
||||
return hasPrefix(buf, jsonPrefix)
|
||||
}
|
||||
|
||||
// Return true if the first non-whitespace bytes in buf is
|
||||
// prefix.
|
||||
func hasPrefix(buf []byte, prefix []byte) bool {
|
||||
trim := bytes.TrimLeftFunc(buf, unicode.IsSpace)
|
||||
return bytes.HasPrefix(trim, prefix)
|
||||
}
|
349
vendor/k8s.io/apimachinery/pkg/util/yaml/decoder_test.go
generated
vendored
Normal file
349
vendor/k8s.io/apimachinery/pkg/util/yaml/decoder_test.go
generated
vendored
Normal file
@ -0,0 +1,349 @@
|
||||
/*
|
||||
Copyright 2014 The Kubernetes Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"math/rand"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestSplitYAMLDocument(t *testing.T) {
|
||||
testCases := []struct {
|
||||
input string
|
||||
atEOF bool
|
||||
expect string
|
||||
adv int
|
||||
}{
|
||||
{"foo", true, "foo", 3},
|
||||
{"fo", false, "", 0},
|
||||
|
||||
{"---", true, "---", 3},
|
||||
{"---\n", true, "---\n", 4},
|
||||
{"---\n", false, "", 0},
|
||||
|
||||
{"\n---\n", false, "", 5},
|
||||
{"\n---\n", true, "", 5},
|
||||
|
||||
{"abc\n---\ndef", true, "abc", 8},
|
||||
{"def", true, "def", 3},
|
||||
{"", true, "", 0},
|
||||
}
|
||||
for i, testCase := range testCases {
|
||||
adv, token, err := splitYAMLDocument([]byte(testCase.input), testCase.atEOF)
|
||||
if err != nil {
|
||||
t.Errorf("%d: unexpected error: %v", i, err)
|
||||
continue
|
||||
}
|
||||
if adv != testCase.adv {
|
||||
t.Errorf("%d: advance did not match: %d %d", i, testCase.adv, adv)
|
||||
}
|
||||
if testCase.expect != string(token) {
|
||||
t.Errorf("%d: token did not match: %q %q", i, testCase.expect, string(token))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestGuessJSON(t *testing.T) {
|
||||
if r, _, isJSON := GuessJSONStream(bytes.NewReader([]byte(" \n{}")), 100); !isJSON {
|
||||
t.Fatalf("expected stream to be JSON")
|
||||
} else {
|
||||
b := make([]byte, 30)
|
||||
n, err := r.Read(b)
|
||||
if err != nil || n != 4 {
|
||||
t.Fatalf("unexpected body: %d / %v", n, err)
|
||||
}
|
||||
if string(b[:n]) != " \n{}" {
|
||||
t.Fatalf("unexpected body: %q", string(b[:n]))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestScanYAML(t *testing.T) {
|
||||
s := bufio.NewScanner(bytes.NewReader([]byte(`---
|
||||
stuff: 1
|
||||
|
||||
---
|
||||
`)))
|
||||
s.Split(splitYAMLDocument)
|
||||
if !s.Scan() {
|
||||
t.Fatalf("should have been able to scan")
|
||||
}
|
||||
t.Logf("scan: %s", s.Text())
|
||||
if !s.Scan() {
|
||||
t.Fatalf("should have been able to scan")
|
||||
}
|
||||
t.Logf("scan: %s", s.Text())
|
||||
if s.Scan() {
|
||||
t.Fatalf("scan should have been done")
|
||||
}
|
||||
if s.Err() != nil {
|
||||
t.Fatalf("err should have been nil: %v", s.Err())
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeYAML(t *testing.T) {
|
||||
s := NewYAMLToJSONDecoder(bytes.NewReader([]byte(`---
|
||||
stuff: 1
|
||||
|
||||
---
|
||||
`)))
|
||||
obj := generic{}
|
||||
if err := s.Decode(&obj); err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if fmt.Sprintf("%#v", obj) != `yaml.generic{"stuff":1}` {
|
||||
t.Errorf("unexpected object: %#v", obj)
|
||||
}
|
||||
obj = generic{}
|
||||
if err := s.Decode(&obj); err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(obj) != 0 {
|
||||
t.Fatalf("unexpected object: %#v", obj)
|
||||
}
|
||||
obj = generic{}
|
||||
if err := s.Decode(&obj); err != io.EOF {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeBrokenYAML(t *testing.T) {
|
||||
s := NewYAMLOrJSONDecoder(bytes.NewReader([]byte(`---
|
||||
stuff: 1
|
||||
test-foo: 1
|
||||
|
||||
---
|
||||
`)), 100)
|
||||
obj := generic{}
|
||||
err := s.Decode(&obj)
|
||||
if err == nil {
|
||||
t.Fatal("expected error with yaml: violate, got no error")
|
||||
}
|
||||
fmt.Printf("err: %s\n", err.Error())
|
||||
if !strings.Contains(err.Error(), "yaml: line 2:") {
|
||||
t.Fatalf("expected %q to have 'yaml: line 2:' found a tab character", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeBrokenJSON(t *testing.T) {
|
||||
s := NewYAMLOrJSONDecoder(bytes.NewReader([]byte(`{
|
||||
"foo": {
|
||||
"stuff": 1
|
||||
"otherStuff": 2
|
||||
}
|
||||
}
|
||||
`)), 100)
|
||||
obj := generic{}
|
||||
err := s.Decode(&obj)
|
||||
if err == nil {
|
||||
t.Fatal("expected error with json: prefix, got no error")
|
||||
}
|
||||
if !strings.HasPrefix(err.Error(), "json: line 3:") {
|
||||
t.Fatalf("expected %q to have 'json: line 3:' prefix", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
type generic map[string]interface{}
|
||||
|
||||
func TestYAMLOrJSONDecoder(t *testing.T) {
|
||||
testCases := []struct {
|
||||
input string
|
||||
buffer int
|
||||
isJSON bool
|
||||
err bool
|
||||
out []generic
|
||||
}{
|
||||
{` {"1":2}{"3":4}`, 2, true, false, []generic{
|
||||
{"1": 2},
|
||||
{"3": 4},
|
||||
}},
|
||||
{" \n{}", 3, true, false, []generic{
|
||||
{},
|
||||
}},
|
||||
{" \na: b", 2, false, false, []generic{
|
||||
{"a": "b"},
|
||||
}},
|
||||
{" \n{\"a\": \"b\"}", 2, false, true, []generic{
|
||||
{"a": "b"},
|
||||
}},
|
||||
{" \n{\"a\": \"b\"}", 3, true, false, []generic{
|
||||
{"a": "b"},
|
||||
}},
|
||||
{` {"a":"b"}`, 100, true, false, []generic{
|
||||
{"a": "b"},
|
||||
}},
|
||||
{"", 1, false, false, []generic{}},
|
||||
{"foo: bar\n---\nbaz: biz", 100, false, false, []generic{
|
||||
{"foo": "bar"},
|
||||
{"baz": "biz"},
|
||||
}},
|
||||
{"foo: bar\n---\n", 100, false, false, []generic{
|
||||
{"foo": "bar"},
|
||||
}},
|
||||
{"foo: bar\n---", 100, false, false, []generic{
|
||||
{"foo": "bar"},
|
||||
}},
|
||||
{"foo: bar\n--", 100, false, true, []generic{
|
||||
{"foo": "bar"},
|
||||
}},
|
||||
{"foo: bar\n-", 100, false, true, []generic{
|
||||
{"foo": "bar"},
|
||||
}},
|
||||
{"foo: bar\n", 100, false, false, []generic{
|
||||
{"foo": "bar"},
|
||||
}},
|
||||
}
|
||||
for i, testCase := range testCases {
|
||||
decoder := NewYAMLOrJSONDecoder(bytes.NewReader([]byte(testCase.input)), testCase.buffer)
|
||||
objs := []generic{}
|
||||
|
||||
var err error
|
||||
for {
|
||||
out := make(generic)
|
||||
err = decoder.Decode(&out)
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
objs = append(objs, out)
|
||||
}
|
||||
if err != io.EOF {
|
||||
switch {
|
||||
case testCase.err && err == nil:
|
||||
t.Errorf("%d: unexpected non-error", i)
|
||||
continue
|
||||
case !testCase.err && err != nil:
|
||||
t.Errorf("%d: unexpected error: %v", i, err)
|
||||
continue
|
||||
case err != nil:
|
||||
continue
|
||||
}
|
||||
}
|
||||
switch decoder.decoder.(type) {
|
||||
case *YAMLToJSONDecoder:
|
||||
if testCase.isJSON {
|
||||
t.Errorf("%d: expected JSON decoder, got YAML", i)
|
||||
}
|
||||
case *json.Decoder:
|
||||
if !testCase.isJSON {
|
||||
t.Errorf("%d: expected YAML decoder, got JSON", i)
|
||||
}
|
||||
}
|
||||
if fmt.Sprintf("%#v", testCase.out) != fmt.Sprintf("%#v", objs) {
|
||||
t.Errorf("%d: objects were not equal: \n%#v\n%#v", i, testCase.out, objs)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestReadSingleLongLine(t *testing.T) {
|
||||
testReadLines(t, []int{128 * 1024})
|
||||
}
|
||||
|
||||
func TestReadRandomLineLengths(t *testing.T) {
|
||||
minLength := 100
|
||||
maxLength := 96 * 1024
|
||||
maxLines := 100
|
||||
|
||||
lineLengths := make([]int, maxLines)
|
||||
for i := 0; i < maxLines; i++ {
|
||||
lineLengths[i] = rand.Intn(maxLength-minLength) + minLength
|
||||
}
|
||||
|
||||
testReadLines(t, lineLengths)
|
||||
}
|
||||
|
||||
func testReadLines(t *testing.T, lineLengths []int) {
|
||||
var (
|
||||
lines [][]byte
|
||||
inputStream []byte
|
||||
)
|
||||
for _, lineLength := range lineLengths {
|
||||
inputLine := make([]byte, lineLength+1)
|
||||
for i := 0; i < lineLength; i++ {
|
||||
char := rand.Intn('z'-'A') + 'A'
|
||||
inputLine[i] = byte(char)
|
||||
}
|
||||
inputLine[len(inputLine)-1] = '\n'
|
||||
lines = append(lines, inputLine)
|
||||
}
|
||||
for _, line := range lines {
|
||||
inputStream = append(inputStream, line...)
|
||||
}
|
||||
|
||||
// init Reader
|
||||
reader := bufio.NewReader(bytes.NewReader(inputStream))
|
||||
lineReader := &LineReader{reader: reader}
|
||||
|
||||
// read lines
|
||||
var readLines [][]byte
|
||||
for range lines {
|
||||
bytes, err := lineReader.Read()
|
||||
if err != nil && err != io.EOF {
|
||||
t.Fatalf("failed to read lines: %v", err)
|
||||
}
|
||||
readLines = append(readLines, bytes)
|
||||
}
|
||||
|
||||
// validate
|
||||
for i := range lines {
|
||||
if len(lines[i]) != len(readLines[i]) {
|
||||
t.Fatalf("expected line length: %d, but got %d", len(lines[i]), len(readLines[i]))
|
||||
}
|
||||
if !reflect.DeepEqual(lines[i], readLines[i]) {
|
||||
t.Fatalf("expected line: %v, but got %v", lines[i], readLines[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestTypedJSONOrYamlErrors(t *testing.T) {
|
||||
s := NewYAMLOrJSONDecoder(bytes.NewReader([]byte(`{
|
||||
"foo": {
|
||||
"stuff": 1
|
||||
"otherStuff": 2
|
||||
}
|
||||
}
|
||||
`)), 100)
|
||||
obj := generic{}
|
||||
err := s.Decode(&obj)
|
||||
if err == nil {
|
||||
t.Fatal("expected error with json: prefix, got no error")
|
||||
}
|
||||
if _, ok := err.(JSONSyntaxError); !ok {
|
||||
t.Fatalf("expected %q to be of type JSONSyntaxError", err.Error())
|
||||
}
|
||||
|
||||
s = NewYAMLOrJSONDecoder(bytes.NewReader([]byte(`---
|
||||
stuff: 1
|
||||
test-foo: 1
|
||||
|
||||
---
|
||||
`)), 100)
|
||||
obj = generic{}
|
||||
err = s.Decode(&obj)
|
||||
if err == nil {
|
||||
t.Fatal("expected error with yaml: prefix, got no error")
|
||||
}
|
||||
if _, ok := err.(YAMLSyntaxError); !ok {
|
||||
t.Fatalf("expected %q to be of type YAMLSyntaxError", err.Error())
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user