diff --git a/third_party/github.com/BurntSushi/toml/.gitignore b/third_party/github.com/BurntSushi/toml/.gitignore deleted file mode 100644 index 55e90a1e5..000000000 --- a/third_party/github.com/BurntSushi/toml/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -TAGS -tags -.*.swp -tomlcheck/tomlcheck diff --git a/third_party/github.com/BurntSushi/toml/COMPATIBLE b/third_party/github.com/BurntSushi/toml/COMPATIBLE deleted file mode 100644 index 21e0938ca..000000000 --- a/third_party/github.com/BurntSushi/toml/COMPATIBLE +++ /dev/null @@ -1,3 +0,0 @@ -Compatible with TOML version -[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md) - diff --git a/third_party/github.com/BurntSushi/toml/COPYING b/third_party/github.com/BurntSushi/toml/COPYING deleted file mode 100644 index 5a8e33254..000000000 --- a/third_party/github.com/BurntSushi/toml/COPYING +++ /dev/null @@ -1,14 +0,0 @@ - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2, December 2004 - - Copyright (C) 2004 Sam Hocevar - - Everyone is permitted to copy and distribute verbatim or modified - copies of this license document, and changing it is allowed as long - as the name is changed. - - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. You just DO WHAT THE FUCK YOU WANT TO. - diff --git a/third_party/github.com/BurntSushi/toml/Makefile b/third_party/github.com/BurntSushi/toml/Makefile deleted file mode 100644 index e6adc3e9e..000000000 --- a/third_party/github.com/BurntSushi/toml/Makefile +++ /dev/null @@ -1,14 +0,0 @@ -install: - go install - -fmt: - gofmt -w *.go */*.go - colcheck *.go */*.go - -tags: - find ./ -name '*.go' -print0 | xargs -0 gotags > TAGS - -push: - git push origin master - git push github master - diff --git a/third_party/github.com/BurntSushi/toml/README.md b/third_party/github.com/BurntSushi/toml/README.md deleted file mode 100644 index 49f43f34e..000000000 --- a/third_party/github.com/BurntSushi/toml/README.md +++ /dev/null @@ -1,218 +0,0 @@ -# TOML parser and encoder for Go with reflection - -TOML stands for Tom's Obvious, Minimal Language. This Go package provides a -reflection interface similar to Go's standard library `json` and `xml` -packages. This package also supports the `encoding.TextUnmarshaler` and -`encoding.TextMarshaler` interfaces so that you can define custom data -representations. (There is an example of this below.) - -Spec: https://github.com/mojombo/toml - -Compatible with TOML version -[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md) - -Documentation: http://godoc.org/github.com/BurntSushi/toml - -Installation: - -```bash -go get github.com/BurntSushi/toml -``` - -Try the toml validator: - -```bash -go get github.com/BurntSushi/toml/tomlv -tomlv some-toml-file.toml -``` - - -## Testing - -This package passes all tests in -[toml-test](https://github.com/BurntSushi/toml-test) for both the decoder -and the encoder. - -## Examples - -This package works similarly to how the Go standard library handles `XML` -and `JSON`. Namely, data is loaded into Go values via reflection. - -For the simplest example, consider some TOML file as just a list of keys -and values: - -```toml -Age = 25 -Cats = [ "Cauchy", "Plato" ] -Pi = 3.14 -Perfection = [ 6, 28, 496, 8128 ] -DOB = 1987-07-05T05:45:00Z -``` - -Which could be defined in Go as: - -```go -type Config struct { - Age int - Cats []string - Pi float64 - Perfection []int - DOB time.Time // requires `import time` -} -``` - -And then decoded with: - -```go -var conf Config -if _, err := toml.Decode(tomlData, &conf); err != nil { - // handle error -} -``` - -You can also use struct tags if your struct field name doesn't map to a TOML -key value directly: - -```toml -some_key_NAME = "wat" -``` - -```go -type TOML struct { - ObscureKey string `toml:"some_key_NAME"` -} -``` - -## Using the `encoding.TextUnmarshaler` interface - -Here's an example that automatically parses duration strings into -`time.Duration` values: - -```toml -[[song]] -name = "Thunder Road" -duration = "4m49s" - -[[song]] -name = "Stairway to Heaven" -duration = "8m03s" -``` - -Which can be decoded with: - -```go -type song struct { - Name string - Duration duration -} -type songs struct { - Song []song -} -var favorites songs -if _, err := Decode(blob, &favorites); err != nil { - log.Fatal(err) -} - -for _, s := range favorites.Song { - fmt.Printf("%s (%s)\n", s.Name, s.Duration) -} -``` - -And you'll also need a `duration` type that satisfies the -`encoding.TextUnmarshaler` interface: - -```go -type duration struct { - time.Duration -} - -func (d *duration) UnmarshalText(text []byte) error { - var err error - d.Duration, err = time.ParseDuration(string(text)) - return err -} -``` - -## More complex usage - -Here's an example of how to load the example from the official spec page: - -```toml -# This is a TOML document. Boom. - -title = "TOML Example" - -[owner] -name = "Tom Preston-Werner" -organization = "GitHub" -bio = "GitHub Cofounder & CEO\nLikes tater tots and beer." -dob = 1979-05-27T07:32:00Z # First class dates? Why not? - -[database] -server = "192.168.1.1" -ports = [ 8001, 8001, 8002 ] -connection_max = 5000 -enabled = true - -[servers] - - # You can indent as you please. Tabs or spaces. TOML don't care. - [servers.alpha] - ip = "10.0.0.1" - dc = "eqdc10" - - [servers.beta] - ip = "10.0.0.2" - dc = "eqdc10" - -[clients] -data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it - -# Line breaks are OK when inside arrays -hosts = [ - "alpha", - "omega" -] -``` - -And the corresponding Go types are: - -```go -type tomlConfig struct { - Title string - Owner ownerInfo - DB database `toml:"database"` - Servers map[string]server - Clients clients -} - -type ownerInfo struct { - Name string - Org string `toml:"organization"` - Bio string - DOB time.Time -} - -type database struct { - Server string - Ports []int - ConnMax int `toml:"connection_max"` - Enabled bool -} - -type server struct { - IP string - DC string -} - -type clients struct { - Data [][]interface{} - Hosts []string -} -``` - -Note that a case insensitive match will be tried if an exact match can't be -found. - -A working example of the above can be found in `_examples/example.{go,toml}`. - diff --git a/third_party/github.com/BurntSushi/toml/_examples/example.go b/third_party/github.com/BurntSushi/toml/_examples/example.go deleted file mode 100644 index c320ac694..000000000 --- a/third_party/github.com/BurntSushi/toml/_examples/example.go +++ /dev/null @@ -1,59 +0,0 @@ -package main - -import ( - "fmt" - "time" - - "github.com/coreos/etcd/third_party/github.com/BurntSushi/toml" -) - -type tomlConfig struct { - Title string - Owner ownerInfo - DB database `toml:"database"` - Servers map[string]server - Clients clients -} - -type ownerInfo struct { - Name string - Org string `toml:"organization"` - Bio string - DOB time.Time -} - -type database struct { - Server string - Ports []int - ConnMax int `toml:"connection_max"` - Enabled bool -} - -type server struct { - IP string - DC string -} - -type clients struct { - Data [][]interface{} - Hosts []string -} - -func main() { - var config tomlConfig - if _, err := toml.DecodeFile("example.toml", &config); err != nil { - fmt.Println(err) - return - } - - fmt.Printf("Title: %s\n", config.Title) - fmt.Printf("Owner: %s (%s, %s), Born: %s\n", - config.Owner.Name, config.Owner.Org, config.Owner.Bio, config.Owner.DOB) - fmt.Printf("Database: %s %v (Max conn. %d), Enabled? %v\n", - config.DB.Server, config.DB.Ports, config.DB.ConnMax, config.DB.Enabled) - for serverName, server := range config.Servers { - fmt.Printf("Server: %s (%s, %s)\n", serverName, server.IP, server.DC) - } - fmt.Printf("Client data: %v\n", config.Clients.Data) - fmt.Printf("Client hosts: %v\n", config.Clients.Hosts) -} diff --git a/third_party/github.com/BurntSushi/toml/_examples/example.toml b/third_party/github.com/BurntSushi/toml/_examples/example.toml deleted file mode 100644 index 32c7a4faa..000000000 --- a/third_party/github.com/BurntSushi/toml/_examples/example.toml +++ /dev/null @@ -1,35 +0,0 @@ -# This is a TOML document. Boom. - -title = "TOML Example" - -[owner] -name = "Tom Preston-Werner" -organization = "GitHub" -bio = "GitHub Cofounder & CEO\nLikes tater tots and beer." -dob = 1979-05-27T07:32:00Z # First class dates? Why not? - -[database] -server = "192.168.1.1" -ports = [ 8001, 8001, 8002 ] -connection_max = 5000 -enabled = true - -[servers] - - # You can indent as you please. Tabs or spaces. TOML don't care. - [servers.alpha] - ip = "10.0.0.1" - dc = "eqdc10" - - [servers.beta] - ip = "10.0.0.2" - dc = "eqdc10" - -[clients] -data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it - -# Line breaks are OK when inside arrays -hosts = [ - "alpha", - "omega" -] diff --git a/third_party/github.com/BurntSushi/toml/_examples/hard.toml b/third_party/github.com/BurntSushi/toml/_examples/hard.toml deleted file mode 100644 index 26145d2b4..000000000 --- a/third_party/github.com/BurntSushi/toml/_examples/hard.toml +++ /dev/null @@ -1,22 +0,0 @@ -# Test file for TOML -# Only this one tries to emulate a TOML file written by a user of the kind of parser writers probably hate -# This part you'll really hate - -[the] -test_string = "You'll hate me after this - #" # " Annoying, isn't it? - - [the.hard] - test_array = [ "] ", " # "] # ] There you go, parse this! - test_array2 = [ "Test #11 ]proved that", "Experiment #9 was a success" ] - # You didn't think it'd as easy as chucking out the last #, did you? - another_test_string = " Same thing, but with a string #" - harder_test_string = " And when \"'s are in the string, along with # \"" # "and comments are there too" - # Things will get harder - - [the.hard.bit#] - what? = "You don't think some user won't do that?" - multi_line_array = [ - "]", - # ] Oh yes I did - ] - diff --git a/third_party/github.com/BurntSushi/toml/_examples/implicit.toml b/third_party/github.com/BurntSushi/toml/_examples/implicit.toml deleted file mode 100644 index 1dea5ceb4..000000000 --- a/third_party/github.com/BurntSushi/toml/_examples/implicit.toml +++ /dev/null @@ -1,4 +0,0 @@ -# [x] you -# [x.y] don't -# [x.y.z] need these -[x.y.z.w] # for this to work diff --git a/third_party/github.com/BurntSushi/toml/_examples/invalid-apples.toml b/third_party/github.com/BurntSushi/toml/_examples/invalid-apples.toml deleted file mode 100644 index 74e9e337e..000000000 --- a/third_party/github.com/BurntSushi/toml/_examples/invalid-apples.toml +++ /dev/null @@ -1,6 +0,0 @@ -# DO NOT WANT -[fruit] -type = "apple" - -[fruit.type] -apple = "yes" diff --git a/third_party/github.com/BurntSushi/toml/_examples/invalid.toml b/third_party/github.com/BurntSushi/toml/_examples/invalid.toml deleted file mode 100644 index beb1dba54..000000000 --- a/third_party/github.com/BurntSushi/toml/_examples/invalid.toml +++ /dev/null @@ -1,35 +0,0 @@ -# This is an INVALID TOML document. Boom. -# Can you spot the error without help? - -title = "TOML Example" - -[owner] -name = "Tom Preston-Werner" -organization = "GitHub" -bio = "GitHub Cofounder & CEO\nLikes tater tots and beer." -dob = 1979-05-27T7:32:00Z # First class dates? Why not? - -[database] -server = "192.168.1.1" -ports = [ 8001, 8001, 8002 ] -connection_max = 5000 -enabled = true - -[servers] - # You can indent as you please. Tabs or spaces. TOML don't care. - [servers.alpha] - ip = "10.0.0.1" - dc = "eqdc10" - - [servers.beta] - ip = "10.0.0.2" - dc = "eqdc10" - -[clients] -data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it - -# Line breaks are OK when inside arrays -hosts = [ - "alpha", - "omega" -] diff --git a/third_party/github.com/BurntSushi/toml/_examples/readme1.toml b/third_party/github.com/BurntSushi/toml/_examples/readme1.toml deleted file mode 100644 index 3e1261d4c..000000000 --- a/third_party/github.com/BurntSushi/toml/_examples/readme1.toml +++ /dev/null @@ -1,5 +0,0 @@ -Age = 25 -Cats = [ "Cauchy", "Plato" ] -Pi = 3.14 -Perfection = [ 6, 28, 496, 8128 ] -DOB = 1987-07-05T05:45:00Z diff --git a/third_party/github.com/BurntSushi/toml/_examples/readme2.toml b/third_party/github.com/BurntSushi/toml/_examples/readme2.toml deleted file mode 100644 index b51cd9340..000000000 --- a/third_party/github.com/BurntSushi/toml/_examples/readme2.toml +++ /dev/null @@ -1 +0,0 @@ -some_key_NAME = "wat" diff --git a/third_party/github.com/BurntSushi/toml/decode.go b/third_party/github.com/BurntSushi/toml/decode.go deleted file mode 100644 index a106f3670..000000000 --- a/third_party/github.com/BurntSushi/toml/decode.go +++ /dev/null @@ -1,544 +0,0 @@ -package toml - -import ( - "encoding" - "fmt" - "io" - "io/ioutil" - "reflect" - "strings" - "time" -) - -var e = fmt.Errorf - -// Primitive is a TOML value that hasn't been decoded into a Go value. -// When using the various `Decode*` functions, the type `Primitive` may -// be given to any value, and its decoding will be delayed. -// -// A `Primitive` value can be decoded using the `PrimitiveDecode` function. -// -// The underlying representation of a `Primitive` value is subject to change. -// Do not rely on it. -// -// N.B. Primitive values are still parsed, so using them will only avoid -// the overhead of reflection. They can be useful when you don't know the -// exact type of TOML data until run time. -type Primitive interface{} - -// PrimitiveDecode is just like the other `Decode*` functions, except it -// decodes a TOML value that has already been parsed. Valid primitive values -// can *only* be obtained from values filled by the decoder functions, -// including `PrimitiveDecode`. (i.e., `v` may contain more `Primitive` -// values.) -// -// Meta data for primitive values is included in the meta data returned by -// the `Decode*` functions. -func PrimitiveDecode(primValue Primitive, v interface{}) error { - return unify(primValue, rvalue(v)) -} - -// Decode will decode the contents of `data` in TOML format into a pointer -// `v`. -// -// TOML hashes correspond to Go structs or maps. (Dealer's choice. They can be -// used interchangeably.) -// -// TOML arrays of tables correspond to either a slice of structs or a slice -// of maps. -// -// TOML datetimes correspond to Go `time.Time` values. -// -// All other TOML types (float, string, int, bool and array) correspond -// to the obvious Go types. -// -// An exception to the above rules is if a type implements the -// encoding.TextUnmarshaler interface. In this case, any primitive TOML value -// (floats, strings, integers, booleans and datetimes) will be converted to -// a byte string and given to the value's UnmarshalText method. Here's an -// example for parsing durations: -// -// type duration struct { -// time.Duration -// } -// -// func (d *duration) UnmarshalText(text []byte) error { -// var err error -// d.Duration, err = time.ParseDuration(string(text)) -// return err -// } -// -// func ExampleUnmarshaler() { -// blob := ` -// [[song]] -// name = "Thunder Road" -// duration = "4m49s" -// -// [[song]] -// name = "Stairway to Heaven" -// duration = "8m03s" -// ` -// type song struct { -// Name string -// Duration duration -// } -// type songs struct { -// Song []song -// } -// var favorites songs -// if _, err := Decode(blob, &favorites); err != nil { -// log.Fatal(err) -// } -// -// for _, s := range favorites.Song { -// fmt.Printf("%s (%s)\n", s.Name, s.Duration) -// } -// // Output: -// // Thunder Road (4m49s) -// // Stairway to Heaven (8m3s) -// } -// -// Key mapping -// -// TOML keys can map to either keys in a Go map or field names in a Go -// struct. The special `toml` struct tag may be used to map TOML keys to -// struct fields that don't match the key name exactly. (See the example.) -// A case insensitive match to struct names will be tried if an exact match -// can't be found. -// -// The mapping between TOML values and Go values is loose. That is, there -// may exist TOML values that cannot be placed into your representation, and -// there may be parts of your representation that do not correspond to -// TOML values. -// -// This decoder will not handle cyclic types. If a cyclic type is passed, -// `Decode` will not terminate. -func Decode(data string, v interface{}) (MetaData, error) { - p, err := parse(data) - if err != nil { - return MetaData{}, err - } - return MetaData{p.mapping, p.types, p.ordered}, unify(p.mapping, rvalue(v)) -} - -// DecodeFile is just like Decode, except it will automatically read the -// contents of the file at `fpath` and decode it for you. -func DecodeFile(fpath string, v interface{}) (MetaData, error) { - bs, err := ioutil.ReadFile(fpath) - if err != nil { - return MetaData{}, err - } - return Decode(string(bs), v) -} - -// DecodeReader is just like Decode, except it will consume all bytes -// from the reader and decode it for you. -func DecodeReader(r io.Reader, v interface{}) (MetaData, error) { - bs, err := ioutil.ReadAll(r) - if err != nil { - return MetaData{}, err - } - return Decode(string(bs), v) -} - -// unify performs a sort of type unification based on the structure of `rv`, -// which is the client representation. -// -// Any type mismatch produces an error. Finding a type that we don't know -// how to handle produces an unsupported type error. -func unify(data interface{}, rv reflect.Value) error { - // Special case. Look for a `Primitive` value. - if rv.Type() == reflect.TypeOf((*Primitive)(nil)).Elem() { - return unifyAnything(data, rv) - } - - // Special case. Look for a value satisfying the TextUnmarshaler interface. - if v, ok := rv.Interface().(encoding.TextUnmarshaler); ok { - return unifyText(data, v) - } - // BUG(burntsushi) - // The behavior here is incorrect whenever a Go type satisfies the - // encoding.TextUnmarshaler interface but also corresponds to a TOML - // hash or array. In particular, the unmarshaler should only be applied - // to primitive TOML values. But at this point, it will be applied to - // all kinds of values and produce an incorrect error whenever those values - // are hashes or arrays (including arrays of tables). - - k := rv.Kind() - - // laziness - if k >= reflect.Int && k <= reflect.Uint64 { - return unifyInt(data, rv) - } - switch k { - case reflect.Ptr: - elem := reflect.New(rv.Type().Elem()) - err := unify(data, reflect.Indirect(elem)) - if err != nil { - return err - } - rv.Set(elem) - return nil - case reflect.Struct: - return unifyStruct(data, rv) - case reflect.Map: - return unifyMap(data, rv) - case reflect.Slice: - return unifySlice(data, rv) - case reflect.String: - return unifyString(data, rv) - case reflect.Bool: - return unifyBool(data, rv) - case reflect.Interface: - // we only support empty interfaces. - if rv.NumMethod() > 0 { - return e("Unsupported type '%s'.", rv.Kind()) - } - return unifyAnything(data, rv) - case reflect.Float32: - fallthrough - case reflect.Float64: - return unifyFloat64(data, rv) - } - return e("Unsupported type '%s'.", rv.Kind()) -} - -func unifyStruct(mapping interface{}, rv reflect.Value) error { - tmap, ok := mapping.(map[string]interface{}) - if !ok { - return mismatch(rv, "map", mapping) - } - - for key, datum := range tmap { - var f *field - fields := cachedTypeFields(rv.Type()) - for i := range fields { - ff := &fields[i] - if ff.name == key { - f = ff - break - } - if f == nil && strings.EqualFold(ff.name, key) { - f = ff - } - } - if f != nil { - subv := rv - for _, i := range f.index { - if subv.Kind() == reflect.Ptr { - if subv.IsNil() { - subv.Set(reflect.New(subv.Type().Elem())) - } - subv = subv.Elem() - } - subv = subv.Field(i) - } - sf := indirect(subv) - - if isUnifiable(sf) { - if err := unify(datum, sf); err != nil { - return e("Type mismatch for '%s.%s': %s", - rv.Type().String(), f.name, err) - } - } else if f.name != "" { - // Bad user! No soup for you! - return e("Field '%s.%s' is unexported, and therefore cannot "+ - "be loaded with reflection.", rv.Type().String(), f.name) - } - } - } - return nil -} - -func unifyMap(mapping interface{}, rv reflect.Value) error { - tmap, ok := mapping.(map[string]interface{}) - if !ok { - return badtype("map", mapping) - } - if rv.IsNil() { - rv.Set(reflect.MakeMap(rv.Type())) - } - for k, v := range tmap { - rvkey := indirect(reflect.New(rv.Type().Key())) - rvval := reflect.Indirect(reflect.New(rv.Type().Elem())) - if err := unify(v, rvval); err != nil { - return err - } - - rvkey.SetString(k) - rv.SetMapIndex(rvkey, rvval) - } - return nil -} - -func unifySlice(data interface{}, rv reflect.Value) error { - datav := reflect.ValueOf(data) - if datav.Kind() != reflect.Slice { - return badtype("slice", data) - } - sliceLen := datav.Len() - if rv.IsNil() { - rv.Set(reflect.MakeSlice(rv.Type(), sliceLen, sliceLen)) - } - for i := 0; i < sliceLen; i++ { - v := datav.Index(i).Interface() - sliceval := indirect(rv.Index(i)) - if err := unify(v, sliceval); err != nil { - return err - } - } - return nil -} - -func unifyDatetime(data interface{}, rv reflect.Value) error { - if _, ok := data.(time.Time); ok { - rv.Set(reflect.ValueOf(data)) - return nil - } - return badtype("time.Time", data) -} - -func unifyString(data interface{}, rv reflect.Value) error { - if s, ok := data.(string); ok { - rv.SetString(s) - return nil - } - return badtype("string", data) -} - -func unifyFloat64(data interface{}, rv reflect.Value) error { - if num, ok := data.(float64); ok { - switch rv.Kind() { - case reflect.Float32: - fallthrough - case reflect.Float64: - rv.SetFloat(num) - default: - panic("bug") - } - return nil - } - return badtype("float", data) -} - -func unifyInt(data interface{}, rv reflect.Value) error { - if num, ok := data.(int64); ok { - switch rv.Kind() { - case reflect.Int: - fallthrough - case reflect.Int8: - fallthrough - case reflect.Int16: - fallthrough - case reflect.Int32: - fallthrough - case reflect.Int64: - rv.SetInt(int64(num)) - case reflect.Uint: - fallthrough - case reflect.Uint8: - fallthrough - case reflect.Uint16: - fallthrough - case reflect.Uint32: - fallthrough - case reflect.Uint64: - rv.SetUint(uint64(num)) - default: - panic("bug") - } - return nil - } - return badtype("integer", data) -} - -func unifyBool(data interface{}, rv reflect.Value) error { - if b, ok := data.(bool); ok { - rv.SetBool(b) - return nil - } - return badtype("boolean", data) -} - -func unifyAnything(data interface{}, rv reflect.Value) error { - // too awesome to fail - rv.Set(reflect.ValueOf(data)) - return nil -} - -func unifyText(data interface{}, v encoding.TextUnmarshaler) error { - var s string - switch sdata := data.(type) { - case encoding.TextMarshaler: - text, err := sdata.MarshalText() - if err != nil { - return err - } - s = string(text) - case fmt.Stringer: - s = sdata.String() - case string: - s = sdata - case bool: - s = fmt.Sprintf("%v", sdata) - case int64: - s = fmt.Sprintf("%d", sdata) - case float64: - s = fmt.Sprintf("%f", sdata) - default: - return badtype("primitive (string-like)", data) - } - if err := v.UnmarshalText([]byte(s)); err != nil { - return err - } - return nil -} - -// rvalue returns a reflect.Value of `v`. All pointers are resolved. -func rvalue(v interface{}) reflect.Value { - return indirect(reflect.ValueOf(v)) -} - -// indirect returns the value pointed to by a pointer. -// Pointers are followed until the value is not a pointer. -// New values are allocated for each nil pointer. -// -// An exception to this rule is if the value satisfies an interface of -// interest to us (like encoding.TextUnmarshaler). -func indirect(v reflect.Value) reflect.Value { - if v.Kind() != reflect.Ptr { - if v.CanAddr() { - pv := v.Addr() - if _, ok := pv.Interface().(encoding.TextUnmarshaler); ok { - return pv - } - } - return v - } - if v.IsNil() { - v.Set(reflect.New(v.Type().Elem())) - } - return indirect(reflect.Indirect(v)) -} - -func isUnifiable(rv reflect.Value) bool { - if rv.CanSet() { - return true - } - if _, ok := rv.Interface().(encoding.TextUnmarshaler); ok { - return true - } - return false -} - -func tstring(rv reflect.Value) string { - return rv.Type().String() -} - -func badtype(expected string, data interface{}) error { - return e("Expected %s but found '%T'.", expected, data) -} - -func mismatch(user reflect.Value, expected string, data interface{}) error { - return e("Type mismatch for %s. Expected %s but found '%T'.", - tstring(user), expected, data) -} - -func insensitiveGet( - tmap map[string]interface{}, kname string) (interface{}, bool) { - - if datum, ok := tmap[kname]; ok { - return datum, true - } - for k, v := range tmap { - if strings.EqualFold(kname, k) { - return v, true - } - } - return nil, false -} - -// MetaData allows access to meta information about TOML data that may not -// be inferrable via reflection. In particular, whether a key has been defined -// and the TOML type of a key. -type MetaData struct { - mapping map[string]interface{} - types map[string]tomlType - keys []Key -} - -// IsDefined returns true if the key given exists in the TOML data. The key -// should be specified hierarchially. e.g., -// -// // access the TOML key 'a.b.c' -// IsDefined("a", "b", "c") -// -// IsDefined will return false if an empty key given. Keys are case sensitive. -func (md MetaData) IsDefined(key ...string) bool { - var hashOrVal interface{} - var hash map[string]interface{} - var ok bool - - if len(key) == 0 { - return false - } - - hashOrVal = md.mapping - for _, k := range key { - if hash, ok = hashOrVal.(map[string]interface{}); !ok { - return false - } - if hashOrVal, ok = hash[k]; !ok { - return false - } - } - return true -} - -// Type returns a string representation of the type of the key specified. -// -// Type will return the empty string if given an empty key or a key that -// does not exist. Keys are case sensitive. -func (md MetaData) Type(key ...string) string { - fullkey := strings.Join(key, ".") - if typ, ok := md.types[fullkey]; ok { - return typ.typeString() - } - return "" -} - -// Key is the type of any TOML key, including key groups. Use (MetaData).Keys -// to get values of this type. -type Key []string - -func (k Key) String() string { - return strings.Join(k, ".") -} - -func (k Key) add(piece string) Key { - newKey := make(Key, len(k)) - copy(newKey, k) - return append(newKey, piece) -} - -// Keys returns a slice of every key in the TOML data, including key groups. -// Each key is itself a slice, where the first element is the top of the -// hierarchy and the last is the most specific. -// -// The list will have the same order as the keys appeared in the TOML data. -// -// All keys returned are non-empty. -func (md MetaData) Keys() []Key { - return md.keys -} - -func allKeys(m map[string]interface{}, context Key) []Key { - keys := make([]Key, 0, len(m)) - for k, v := range m { - keys = append(keys, context.add(k)) - if t, ok := v.(map[string]interface{}); ok { - keys = append(keys, allKeys(t, context.add(k))...) - } - } - return keys -} diff --git a/third_party/github.com/BurntSushi/toml/decode_test.go b/third_party/github.com/BurntSushi/toml/decode_test.go deleted file mode 100644 index c9cb64edf..000000000 --- a/third_party/github.com/BurntSushi/toml/decode_test.go +++ /dev/null @@ -1,428 +0,0 @@ -package toml - -import ( - "encoding/json" - "fmt" - "log" - "reflect" - "testing" - "time" -) - -func init() { - log.SetFlags(0) -} - -var testSimple = ` -age = 250 - -andrew = "gallant" -kait = "brady" -now = 1987-07-05T05:45:00Z -yesOrNo = true -pi = 3.14 -colors = [ - ["red", "green", "blue"], - ["cyan", "magenta", "yellow", "black"], -] - -[Annoying.Cats] -plato = "smelly" -cauchy = "stupido" - -` - -type kitties struct { - Plato string - Cauchy string -} - -type simple struct { - Age int - Colors [][]string - Pi float64 - YesOrNo bool - Now time.Time - Andrew string - Kait string - Annoying map[string]kitties -} - -func TestDecode(t *testing.T) { - var val simple - - md, err := Decode(testSimple, &val) - if err != nil { - t.Fatal(err) - } - - testf("Is 'Annoying.Cats.plato' defined? %v\n", - md.IsDefined("Annoying", "Cats", "plato")) - testf("Is 'Cats.Stinky' defined? %v\n", md.IsDefined("Cats", "Stinky")) - testf("Type of 'colors'? %s\n\n", md.Type("colors")) - - testf("%v\n", val) -} - -func TestDecodeEmbedded(t *testing.T) { - type Dog struct{ Name string } - - tests := map[string]struct { - input string - decodeInto interface{} - wantDecoded interface{} - }{ - "embedded struct": { - input: `Name = "milton"`, - decodeInto: &struct{ Dog }{}, - wantDecoded: &struct{ Dog }{Dog{"milton"}}, - }, - "embedded non-nil pointer to struct": { - input: `Name = "milton"`, - decodeInto: &struct{ *Dog }{}, - wantDecoded: &struct{ *Dog }{&Dog{"milton"}}, - }, - "embedded nil pointer to struct": { - input: ``, - decodeInto: &struct{ *Dog }{}, - wantDecoded: &struct{ *Dog }{nil}, - }, - } - - for label, test := range tests { - _, err := Decode(test.input, test.decodeInto) - if err != nil { - t.Fatal(err) - } - - want, got := jsonstr(test.wantDecoded), jsonstr(test.decodeInto) - if want != got { - t.Errorf("%s: want decoded == %+v, got %+v", label, want, got) - } - } -} - -// jsonstr allows comparison of deeply nested structs with pointer members. -func jsonstr(o interface{}) string { - s, err := json.MarshalIndent(o, "", " ") - if err != nil { - panic(err.Error()) - } - return string(s) -} - -var tomlTableArrays = ` -[[albums]] -name = "Born to Run" - - [[albums.songs]] - name = "Jungleland" - - [[albums.songs]] - name = "Meeting Across the River" - -[[albums]] -name = "Born in the USA" - - [[albums.songs]] - name = "Glory Days" - - [[albums.songs]] - name = "Dancing in the Dark" -` - -type Music struct { - Albums []Album -} - -type Album struct { - Name string - Songs []Song -} - -type Song struct { - Name string -} - -func TestTableArrays(t *testing.T) { - expected := Music{[]Album{ - {"Born to Run", []Song{{"Jungleland"}, {"Meeting Across the River"}}}, - {"Born in the USA", []Song{{"Glory Days"}, {"Dancing in the Dark"}}}, - }} - var got Music - if _, err := Decode(tomlTableArrays, &got); err != nil { - t.Fatal(err) - } - if !reflect.DeepEqual(expected, got) { - t.Fatalf("\n%#v\n!=\n%#v\n", expected, got) - } -} - -// Case insensitive matching tests. -// A bit more comprehensive than needed given the current implementation, -// but implementations change. -// Probably still missing demonstrations of some ugly corner cases regarding -// case insensitive matching and multiple fields. -var caseToml = ` -tOpString = "string" -tOpInt = 1 -tOpFloat = 1.1 -tOpBool = true -tOpdate = 2006-01-02T15:04:05Z -tOparray = [ "array" ] -Match = "i should be in Match only" -MatcH = "i should be in MatcH only" -once = "just once" -[nEst.eD] -nEstedString = "another string" -` - -type Insensitive struct { - TopString string - TopInt int - TopFloat float64 - TopBool bool - TopDate time.Time - TopArray []string - Match string - MatcH string - Once string - OncE string - Nest InsensitiveNest -} - -type InsensitiveNest struct { - Ed InsensitiveEd -} - -type InsensitiveEd struct { - NestedString string -} - -func TestCase(t *testing.T) { - tme, err := time.Parse(time.RFC3339, time.RFC3339[:len(time.RFC3339)-5]) - if err != nil { - panic(err) - } - expected := Insensitive{ - TopString: "string", - TopInt: 1, - TopFloat: 1.1, - TopBool: true, - TopDate: tme, - TopArray: []string{"array"}, - MatcH: "i should be in MatcH only", - Match: "i should be in Match only", - Once: "just once", - OncE: "", - Nest: InsensitiveNest{ - Ed: InsensitiveEd{NestedString: "another string"}, - }, - } - var got Insensitive - _, err = Decode(caseToml, &got) - if err != nil { - t.Fatal(err) - } - if !reflect.DeepEqual(expected, got) { - t.Fatalf("\n%#v\n!=\n%#v\n", expected, got) - } -} - -func TestPointers(t *testing.T) { - type Object struct { - Type string - Description string - } - - type Dict struct { - NamedObject map[string]*Object - BaseObject *Object - Strptr *string - Strptrs []*string - } - s1, s2, s3 := "blah", "abc", "def" - expected := &Dict{ - Strptr: &s1, - Strptrs: []*string{&s2, &s3}, - NamedObject: map[string]*Object{ - "foo": {"FOO", "fooooo!!!"}, - "bar": {"BAR", "ba-ba-ba-ba-barrrr!!!"}, - }, - BaseObject: &Object{"BASE", "da base"}, - } - - ex1 := ` -Strptr = "blah" -Strptrs = ["abc", "def"] - -[NamedObject.foo] -Type = "FOO" -Description = "fooooo!!!" - -[NamedObject.bar] -Type = "BAR" -Description = "ba-ba-ba-ba-barrrr!!!" - -[BaseObject] -Type = "BASE" -Description = "da base" -` - dict := new(Dict) - _, err := Decode(ex1, dict) - if err != nil { - t.Errorf("Decode error: %v", err) - } - if !reflect.DeepEqual(expected, dict) { - t.Fatalf("\n%#v\n!=\n%#v\n", expected, dict) - } -} - -func ExamplePrimitiveDecode() { - var md MetaData - var err error - - var tomlBlob = ` -ranking = ["Springsteen", "J Geils"] - -[bands.Springsteen] -started = 1973 -albums = ["Greetings", "WIESS", "Born to Run", "Darkness"] - -[bands.J Geils] -started = 1970 -albums = ["The J. Geils Band", "Full House", "Blow Your Face Out"] -` - - type band struct { - Started int - Albums []string - } - - type classics struct { - Ranking []string - Bands map[string]Primitive - } - - // Do the initial decode. Reflection is delayed on Primitive values. - var music classics - if md, err = Decode(tomlBlob, &music); err != nil { - log.Fatal(err) - } - - // MetaData still includes information on Primitive values. - fmt.Printf("Is `bands.Springsteen` defined? %v\n", - md.IsDefined("bands", "Springsteen")) - - // Decode primitive data into Go values. - for _, artist := range music.Ranking { - // A band is a primitive value, so we need to decode it to get a - // real `band` value. - primValue := music.Bands[artist] - - var aBand band - if err = PrimitiveDecode(primValue, &aBand); err != nil { - log.Fatal(err) - } - fmt.Printf("%s started in %d.\n", artist, aBand.Started) - } - - // Output: - // Is `bands.Springsteen` defined? true - // Springsteen started in 1973. - // J Geils started in 1970. -} - -func ExampleDecode() { - var tomlBlob = ` -# Some comments. -[alpha] -ip = "10.0.0.1" - - [alpha.config] - Ports = [ 8001, 8002 ] - Location = "Toronto" - Created = 1987-07-05T05:45:00Z - -[beta] -ip = "10.0.0.2" - - [beta.config] - Ports = [ 9001, 9002 ] - Location = "New Jersey" - Created = 1887-01-05T05:55:00Z -` - - type serverConfig struct { - Ports []int - Location string - Created time.Time - } - - type server struct { - IP string `toml:"ip"` - Config serverConfig `toml:"config"` - } - - type servers map[string]server - - var config servers - if _, err := Decode(tomlBlob, &config); err != nil { - log.Fatal(err) - } - - for _, name := range []string{"alpha", "beta"} { - s := config[name] - fmt.Printf("Server: %s (ip: %s) in %s created on %s\n", - name, s.IP, s.Config.Location, - s.Config.Created.Format("2006-01-02")) - fmt.Printf("Ports: %v\n", s.Config.Ports) - } - - // Output: - // Server: alpha (ip: 10.0.0.1) in Toronto created on 1987-07-05 - // Ports: [8001 8002] - // Server: beta (ip: 10.0.0.2) in New Jersey created on 1887-01-05 - // Ports: [9001 9002] -} - -type duration struct { - time.Duration -} - -func (d *duration) UnmarshalText(text []byte) error { - var err error - d.Duration, err = time.ParseDuration(string(text)) - return err -} - -// Example Unmarshaler blah blah. -func ExampleUnmarshaler() { - blob := ` -[[song]] -name = "Thunder Road" -duration = "4m49s" - -[[song]] -name = "Stairway to Heaven" -duration = "8m03s" -` - type song struct { - Name string - Duration duration - } - type songs struct { - Song []song - } - var favorites songs - if _, err := Decode(blob, &favorites); err != nil { - log.Fatal(err) - } - - for _, s := range favorites.Song { - fmt.Printf("%s (%s)\n", s.Name, s.Duration) - } - // Output: - // Thunder Road (4m49s) - // Stairway to Heaven (8m3s) -} diff --git a/third_party/github.com/BurntSushi/toml/doc.go b/third_party/github.com/BurntSushi/toml/doc.go deleted file mode 100644 index 1c2d7dffc..000000000 --- a/third_party/github.com/BurntSushi/toml/doc.go +++ /dev/null @@ -1,10 +0,0 @@ -/* -Package toml provides facilities for decoding TOML configuration files -via reflection. - -Specification: https://github.com/mojombo/toml - -Use github.com/BurntSushi/toml/tomlv to check whether a file is valid -TOML or not, with helpful error messages. -*/ -package toml diff --git a/third_party/github.com/BurntSushi/toml/encode.go b/third_party/github.com/BurntSushi/toml/encode.go deleted file mode 100644 index b3d748743..000000000 --- a/third_party/github.com/BurntSushi/toml/encode.go +++ /dev/null @@ -1,530 +0,0 @@ -package toml - -// TODO: Build a decent encoder. -// Interestingly, this isn't as trivial as recursing down the type of the -// value given and outputting the corresponding TOML. In particular, multiple -// TOML types (especially if tuples are added) can map to a single Go type, so -// that the reverse correspondence isn't clear. -// -// One possible avenue is to choose a reasonable default (like structs map -// to hashes), but allow the user to override with struct tags. But this seems -// like a mess. -// -// The other possibility is to scrap an encoder altogether. After all, TOML -// is a configuration file format, and not a data exchange format. - -import ( - "bufio" - "encoding" - "errors" - "fmt" - "io" - "reflect" - "sort" - "strconv" - "strings" -) - -var ( - ErrArrayMixedElementTypes = errors.New( - "can't encode array with mixed element types") - ErrArrayNilElement = errors.New( - "can't encode array with nil element") -) - -type Encoder struct { - // A single indentation level. By default it is two spaces. - Indent string - - w *bufio.Writer - - // hasWritten is whether we have written any output to w yet. - hasWritten bool -} - -func NewEncoder(w io.Writer) *Encoder { - return &Encoder{ - w: bufio.NewWriter(w), - Indent: " ", - } -} - -func (enc *Encoder) Encode(v interface{}) error { - rv := eindirect(reflect.ValueOf(v)) - if err := enc.encode(Key([]string{}), rv); err != nil { - return err - } - return enc.w.Flush() -} - -func (enc *Encoder) encode(key Key, rv reflect.Value) error { - // Special case. If we can marshal the type to text, then we used that. - if _, ok := rv.Interface().(encoding.TextMarshaler); ok { - err := enc.eKeyEq(key) - if err != nil { - return err - } - return enc.eElement(rv) - } - - k := rv.Kind() - switch k { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, - reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, - reflect.Uint64, - reflect.Float32, reflect.Float64, - reflect.String, reflect.Bool: - err := enc.eKeyEq(key) - if err != nil { - return err - } - return enc.eElement(rv) - case reflect.Array, reflect.Slice: - return enc.eArrayOrSlice(key, rv) - case reflect.Interface: - if rv.IsNil() { - return nil - } - return enc.encode(key, rv.Elem()) - case reflect.Map: - if rv.IsNil() { - return nil - } - return enc.eTable(key, rv) - case reflect.Ptr: - if rv.IsNil() { - return nil - } - return enc.encode(key, rv.Elem()) - case reflect.Struct: - return enc.eTable(key, rv) - } - return e("Unsupported type for key '%s': %s", key, k) -} - -// eElement encodes any value that can be an array element (primitives and -// arrays). -func (enc *Encoder) eElement(rv reflect.Value) error { - ws := func(s string) error { - _, err := io.WriteString(enc.w, s) - return err - } - // By the TOML spec, all floats must have a decimal with at least one - // number on either side. - floatAddDecimal := func(fstr string) string { - if !strings.Contains(fstr, ".") { - return fstr + ".0" - } - return fstr - } - - // Special case. Use text marshaler if it's available for this value. - if v, ok := rv.Interface().(encoding.TextMarshaler); ok { - s, err := v.MarshalText() - if err != nil { - return err - } - return ws(string(s)) - } - - var err error - k := rv.Kind() - switch k { - case reflect.Bool: - err = ws(strconv.FormatBool(rv.Bool())) - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - err = ws(strconv.FormatInt(rv.Int(), 10)) - case reflect.Uint, reflect.Uint8, reflect.Uint16, - reflect.Uint32, reflect.Uint64: - err = ws(strconv.FormatUint(rv.Uint(), 10)) - case reflect.Float32: - err = ws(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 32))) - case reflect.Float64: - err = ws(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 64))) - case reflect.Array, reflect.Slice: - return enc.eArrayOrSliceElement(rv) - case reflect.Interface: - return enc.eElement(rv.Elem()) - case reflect.String: - s := rv.String() - s = strings.NewReplacer( - "\t", "\\t", - "\n", "\\n", - "\r", "\\r", - "\"", "\\\"", - "\\", "\\\\", - ).Replace(s) - err = ws("\"" + s + "\"") - default: - return e("Unexpected primitive type: %s", k) - } - return err -} - -func (enc *Encoder) eArrayOrSlice(key Key, rv reflect.Value) error { - // Determine whether this is an array of tables or of primitives. - elemV := reflect.ValueOf(nil) - if rv.Len() > 0 { - elemV = rv.Index(0) - } - isTableType, err := isTOMLTableType(rv.Type().Elem(), elemV) - if err != nil { - return err - } - - if len(key) > 0 && isTableType { - return enc.eArrayOfTables(key, rv) - } - - err = enc.eKeyEq(key) - if err != nil { - return err - } - return enc.eArrayOrSliceElement(rv) -} - -func (enc *Encoder) eArrayOrSliceElement(rv reflect.Value) error { - if _, err := enc.w.Write([]byte{'['}); err != nil { - return err - } - - length := rv.Len() - if length > 0 { - arrayElemType, isNil := tomlTypeName(rv.Index(0)) - if isNil { - return ErrArrayNilElement - } - - for i := 0; i < length; i++ { - elem := rv.Index(i) - - // Ensure that the array's elements each have the same TOML type. - elemType, isNil := tomlTypeName(elem) - if isNil { - return ErrArrayNilElement - } - if elemType != arrayElemType { - return ErrArrayMixedElementTypes - } - - if err := enc.eElement(elem); err != nil { - return err - } - if i != length-1 { - if _, err := enc.w.Write([]byte(", ")); err != nil { - return err - } - } - } - } - - if _, err := enc.w.Write([]byte{']'}); err != nil { - return err - } - return nil -} - -func (enc *Encoder) eArrayOfTables(key Key, rv reflect.Value) error { - if enc.hasWritten { - _, err := enc.w.Write([]byte{'\n'}) - if err != nil { - return err - } - } - - for i := 0; i < rv.Len(); i++ { - trv := rv.Index(i) - if isNil(trv) { - continue - } - - _, err := fmt.Fprintf(enc.w, "%s[[%s]]\n", - strings.Repeat(enc.Indent, len(key)-1), key.String()) - if err != nil { - return err - } - - err = enc.eMapOrStruct(key, trv) - if err != nil { - return err - } - - if i != rv.Len()-1 { - if _, err := enc.w.Write([]byte("\n\n")); err != nil { - return err - } - } - enc.hasWritten = true - } - return nil -} - -func isStructOrMap(rv reflect.Value) bool { - switch rv.Kind() { - case reflect.Interface, reflect.Ptr: - return isStructOrMap(rv.Elem()) - case reflect.Map, reflect.Struct: - return true - default: - return false - } -} - -func (enc *Encoder) eTable(key Key, rv reflect.Value) error { - if enc.hasWritten { - _, err := enc.w.Write([]byte{'\n'}) - if err != nil { - return err - } - } - if len(key) > 0 { - _, err := fmt.Fprintf(enc.w, "%s[%s]\n", - strings.Repeat(enc.Indent, len(key)-1), key.String()) - if err != nil { - return err - } - } - return enc.eMapOrStruct(key, rv) -} - -func (enc *Encoder) eMapOrStruct(key Key, rv reflect.Value) error { - switch rv.Kind() { - case reflect.Map: - return enc.eMap(key, rv) - case reflect.Struct: - return enc.eStruct(key, rv) - case reflect.Ptr, reflect.Interface: - return enc.eMapOrStruct(key, rv.Elem()) - default: - panic("eTable: unhandled reflect.Value Kind: " + rv.Kind().String()) - } -} - -func (enc *Encoder) eMap(key Key, rv reflect.Value) error { - rt := rv.Type() - if rt.Key().Kind() != reflect.String { - return errors.New("can't encode a map with non-string key type") - } - - // Sort keys so that we have deterministic output. And write keys directly - // underneath this key first, before writing sub-structs or sub-maps. - var mapKeysDirect, mapKeysSub []string - for _, mapKey := range rv.MapKeys() { - k := mapKey.String() - mrv := rv.MapIndex(mapKey) - if isStructOrMap(mrv) { - mapKeysSub = append(mapKeysSub, k) - } else { - mapKeysDirect = append(mapKeysDirect, k) - } - } - - var writeMapKeys = func(mapKeys []string) error { - sort.Strings(mapKeys) - for i, mapKey := range mapKeys { - mrv := rv.MapIndex(reflect.ValueOf(mapKey)) - if isNil(mrv) { - // Don't write anything for nil fields. - continue - } - if err := enc.encode(key.add(mapKey), mrv); err != nil { - return err - } - - if i != len(mapKeys)-1 { - if _, err := enc.w.Write([]byte{'\n'}); err != nil { - return err - } - } - enc.hasWritten = true - } - - return nil - } - - err := writeMapKeys(mapKeysDirect) - if err != nil { - return err - } - err = writeMapKeys(mapKeysSub) - if err != nil { - return err - } - return nil -} - -func (enc *Encoder) eStruct(key Key, rv reflect.Value) error { - // Write keys for fields directly under this key first, because if we write - // a field that creates a new table, then all keys under it will be in that - // table (not the one we're writing here). - rt := rv.Type() - var fieldsDirect, fieldsSub [][]int - var addFields func(rt reflect.Type, rv reflect.Value, start []int) - addFields = func(rt reflect.Type, rv reflect.Value, start []int) { - for i := 0; i < rt.NumField(); i++ { - f := rt.Field(i) - frv := rv.Field(i) - if f.Anonymous { - t := frv.Type() - if t.Kind() == reflect.Ptr { - t = t.Elem() - frv = frv.Elem() - } - addFields(t, frv, f.Index) - } else if isStructOrMap(frv) { - fieldsSub = append(fieldsSub, append(start, f.Index...)) - } else { - fieldsDirect = append(fieldsDirect, append(start, f.Index...)) - } - } - } - addFields(rt, rv, nil) - - var writeFields = func(fields [][]int) error { - for i, fieldIndex := range fields { - sft := rt.FieldByIndex(fieldIndex) - sf := rv.FieldByIndex(fieldIndex) - if isNil(sf) { - // Don't write anything for nil fields. - continue - } - - keyName := sft.Tag.Get("toml") - if keyName == "-" { - continue - } - if keyName == "" { - keyName = sft.Name - } - - if err := enc.encode(key.add(keyName), sf); err != nil { - return err - } - - if i != len(fields)-1 { - if _, err := enc.w.Write([]byte{'\n'}); err != nil { - return err - } - } - enc.hasWritten = true - } - return nil - } - - err := writeFields(fieldsDirect) - if err != nil { - return err - } - if len(fieldsDirect) > 0 && len(fieldsSub) > 0 { - _, err = enc.w.Write([]byte{'\n'}) - if err != nil { - return err - } - } - err = writeFields(fieldsSub) - if err != nil { - return err - } - return nil -} - -// tomlTypeName returns the TOML type name of the Go value's type. It is used to -// determine whether the types of array elements are mixed (which is forbidden). -// If the Go value is nil, then it is illegal for it to be an array element, and -// valueIsNil is returned as true. -func tomlTypeName(rv reflect.Value) (typeName string, valueIsNil bool) { - if isNil(rv) { - return "", true - } - k := rv.Kind() - switch k { - case reflect.Bool: - return "bool", false - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, - reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, - reflect.Uint64: - return "integer", false - case reflect.Float32, reflect.Float64: - return "float", false - case reflect.Array, reflect.Slice: - return "array", false - case reflect.Ptr, reflect.Interface: - return tomlTypeName(rv.Elem()) - case reflect.String: - return "string", false - case reflect.Map, reflect.Struct: - return "table", false - default: - panic("unexpected reflect.Kind: " + k.String()) - } -} - -// isTOMLTableType returns whether this type and value represents a TOML table -// type (true) or element type (false). Both rt and rv are needed to determine -// this, in case the Go type is interface{} or in case rv is nil. If there is -// some other impossible situation detected, an error is returned. -func isTOMLTableType(rt reflect.Type, rv reflect.Value) (bool, error) { - k := rt.Kind() - switch k { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, - reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, - reflect.Uint64, - reflect.Float32, reflect.Float64, - reflect.String, reflect.Bool: - return false, nil - case reflect.Array, reflect.Slice: - // Make sure that these eventually contain an underlying non-table type - // element. - elemV := reflect.ValueOf(nil) - if rv.Len() > 0 { - elemV = rv.Index(0) - } - hasUnderlyingTableType, err := isTOMLTableType(rt.Elem(), elemV) - if err != nil { - return false, err - } - if hasUnderlyingTableType { - return true, errors.New("TOML array element can't contain a table") - } - return false, nil - case reflect.Ptr: - return isTOMLTableType(rt.Elem(), rv.Elem()) - case reflect.Interface: - if rv.Kind() == reflect.Interface { - return false, nil - } - return isTOMLTableType(rv.Type(), rv) - case reflect.Map, reflect.Struct: - return true, nil - default: - panic("unexpected reflect.Kind: " + k.String()) - } -} - -func isNil(rv reflect.Value) bool { - switch rv.Kind() { - case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: - return rv.IsNil() - default: - return false - } -} - -func (enc *Encoder) eKeyEq(key Key) error { - _, err := io.WriteString(enc.w, strings.Repeat(enc.Indent, len(key)-1)) - if err != nil { - return err - } - _, err = io.WriteString(enc.w, key[len(key)-1]+" = ") - if err != nil { - return err - } - return nil -} - -func eindirect(v reflect.Value) reflect.Value { - if v.Kind() != reflect.Ptr { - return v - } - return eindirect(reflect.Indirect(v)) -} diff --git a/third_party/github.com/BurntSushi/toml/encode_test.go b/third_party/github.com/BurntSushi/toml/encode_test.go deleted file mode 100644 index 3e2b13b2e..000000000 --- a/third_party/github.com/BurntSushi/toml/encode_test.go +++ /dev/null @@ -1,282 +0,0 @@ -package toml - -import ( - "bytes" - "testing" -) - -// XXX(burntsushi) -// I think these tests probably should be removed. They are good, but they -// ought to be obsolete by toml-test. -func TestEncode(t *testing.T) { - tests := map[string]struct { - input interface{} - wantOutput string - wantError error - }{ - "bool field": { - input: struct { - BoolTrue bool - BoolFalse bool - }{true, false}, - wantOutput: "BoolTrue = true\nBoolFalse = false", - }, - "int fields": { - input: struct { - Int int - Int8 int8 - Int16 int16 - Int32 int32 - Int64 int64 - }{1, 2, 3, 4, 5}, - wantOutput: "Int = 1\nInt8 = 2\nInt16 = 3\nInt32 = 4\nInt64 = 5", - }, - "uint fields": { - input: struct { - Uint uint - Uint8 uint8 - Uint16 uint16 - Uint32 uint32 - Uint64 uint64 - }{1, 2, 3, 4, 5}, - wantOutput: "Uint = 1\nUint8 = 2\nUint16 = 3\nUint32 = 4" + - "\nUint64 = 5", - }, - "float fields": { - input: struct { - Float32 float32 - Float64 float64 - }{1.5, 2.5}, - wantOutput: "Float32 = 1.5\nFloat64 = 2.5", - }, - "string field": { - input: struct{ String string }{"foo"}, - wantOutput: `String = "foo"`, - }, - "array fields": { - input: struct { - IntArray0 [0]int - IntArray3 [3]int - }{[0]int{}, [3]int{1, 2, 3}}, - wantOutput: "IntArray0 = []\nIntArray3 = [1, 2, 3]", - }, - "slice fields": { - input: struct{ IntSliceNil, IntSlice0, IntSlice3 []int }{ - nil, []int{}, []int{1, 2, 3}, - }, - wantOutput: "IntSlice0 = []\nIntSlice3 = [1, 2, 3]", - }, - "nested arrays and slices": { - input: struct { - SliceOfArrays [][2]int - ArrayOfSlices [2][]int - SliceOfArraysOfSlices [][2][]int - ArrayOfSlicesOfArrays [2][][2]int - SliceOfMixedArrays [][2]interface{} - ArrayOfMixedSlices [2][]interface{} - }{ - [][2]int{[2]int{1, 2}, [2]int{3, 4}}, - [2][]int{[]int{1, 2}, []int{3, 4}}, - [][2][]int{ - [2][]int{ - []int{1, 2}, []int{3, 4}, - }, - [2][]int{ - []int{5, 6}, []int{7, 8}, - }, - }, - [2][][2]int{ - [][2]int{ - [2]int{1, 2}, [2]int{3, 4}, - }, - [][2]int{ - [2]int{5, 6}, [2]int{7, 8}, - }, - }, - [][2]interface{}{ - [2]interface{}{1, 2}, [2]interface{}{"a", "b"}, - }, - [2][]interface{}{ - []interface{}{1, 2}, []interface{}{"a", "b"}, - }, - }, - wantOutput: `SliceOfArrays = [[1, 2], [3, 4]] -ArrayOfSlices = [[1, 2], [3, 4]] -SliceOfArraysOfSlices = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]] -ArrayOfSlicesOfArrays = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]] -SliceOfMixedArrays = [[1, 2], ["a", "b"]] -ArrayOfMixedSlices = [[1, 2], ["a", "b"]]`, - }, - "(error) slice with element type mismatch (string and integer)": { - input: struct{ Mixed []interface{} }{[]interface{}{1, "a"}}, - wantError: ErrArrayMixedElementTypes, - }, - "(error) slice with element type mismatch (integer and float)": { - input: struct{ Mixed []interface{} }{[]interface{}{1, 2.5}}, - wantError: ErrArrayMixedElementTypes, - }, - "slice with elems of differing Go types, same TOML types": { - input: struct { - MixedInts []interface{} - MixedFloats []interface{} - }{ - []interface{}{ - int(1), int8(2), int16(3), int32(4), int64(5), - uint(1), uint8(2), uint16(3), uint32(4), uint64(5), - }, - []interface{}{float32(1.5), float64(2.5)}, - }, - wantOutput: "MixedInts = [1, 2, 3, 4, 5, 1, 2, 3, 4, 5]\n" + - "MixedFloats = [1.5, 2.5]", - }, - "(error) slice w/ element type mismatch (one is nested array)": { - input: struct{ Mixed []interface{} }{ - []interface{}{1, []interface{}{2}}, - }, - wantError: ErrArrayMixedElementTypes, - }, - "(error) slice with 1 nil element": { - input: struct{ NilElement1 []interface{} }{[]interface{}{nil}}, - wantError: ErrArrayNilElement, - }, - "(error) slice with 1 nil element (and other non-nil elements)": { - input: struct{ NilElement []interface{} }{ - []interface{}{1, nil}, - }, - wantError: ErrArrayNilElement, - }, - "simple map": { - input: map[string]int{"a": 1, "b": 2}, - wantOutput: "a = 1\nb = 2", - }, - "map with interface{} value type": { - input: map[string]interface{}{"a": 1, "b": "c"}, - wantOutput: "a = 1\nb = \"c\"", - }, - "map with interface{} value type, some of which are structs": { - input: map[string]interface{}{ - "a": struct{ Int int }{2}, - "b": 1, - }, - wantOutput: "b = 1\n[a]\n Int = 2", - }, - "nested map": { - input: map[string]map[string]int{ - "a": map[string]int{"b": 1}, - "c": map[string]int{"d": 2}, - }, - wantOutput: "[a]\n b = 1\n\n[c]\n d = 2", - }, - "nested struct": { - input: struct{ Struct struct{ Int int } }{ - struct{ Int int }{1}, - }, - wantOutput: "[Struct]\n Int = 1", - }, - "nested struct and non-struct field": { - input: struct { - Struct struct{ Int int } - Bool bool - }{struct{ Int int }{1}, true}, - wantOutput: "Bool = true\n\n[Struct]\n Int = 1", - }, - "2 nested structs": { - input: struct{ Struct1, Struct2 struct{ Int int } }{ - struct{ Int int }{1}, struct{ Int int }{2}, - }, - wantOutput: "[Struct1]\n Int = 1\n\n[Struct2]\n Int = 2", - }, - "deeply nested structs": { - input: struct { - Struct1, Struct2 struct{ Struct3 *struct{ Int int } } - }{ - struct{ Struct3 *struct{ Int int } }{&struct{ Int int }{1}}, - struct{ Struct3 *struct{ Int int } }{nil}, - }, - wantOutput: "[Struct1]\n [Struct1.Struct3]\n Int = 1" + - "\n\n[Struct2]\n", - }, - "nested struct with nil struct elem": { - input: struct { - Struct struct{ Inner *struct{ Int int } } - }{ - struct{ Inner *struct{ Int int } }{nil}, - }, - wantOutput: "[Struct]\n", - }, - "nested struct with no fields": { - input: struct { - Struct struct{ Inner struct{} } - }{ - struct{ Inner struct{} }{struct{}{}}, - }, - wantOutput: "[Struct]\n [Struct.Inner]\n", - }, - "struct with tags": { - input: struct { - Struct struct { - Int int `toml:"_int"` - } `toml:"_struct"` - Bool bool `toml:"_bool"` - }{ - struct { - Int int `toml:"_int"` - }{1}, true, - }, - wantOutput: "_bool = true\n\n[_struct]\n _int = 1", - }, - "embedded struct": { - input: struct{ Embedded }{Embedded{1}}, - wantOutput: "_int = 1", - }, - "embedded *struct": { - input: struct{ *Embedded }{&Embedded{1}}, - wantOutput: "_int = 1", - }, - "nested embedded struct": { - input: struct { - Struct struct{ Embedded } `toml:"_struct"` - }{struct{ Embedded }{Embedded{1}}}, - wantOutput: "[_struct]\n _int = 1", - }, - "nested embedded *struct": { - input: struct { - Struct struct{ *Embedded } `toml:"_struct"` - }{struct{ *Embedded }{&Embedded{1}}}, - wantOutput: "[_struct]\n _int = 1", - }, - "array of tables": { - input: struct { - Structs []*struct{ Int int } `toml:"struct"` - }{ - []*struct{ Int int }{ - {1}, nil, {3}, - }, - }, - wantOutput: "[[struct]]\n Int = 1\n\n[[struct]]\n Int = 3", - }, - } - for label, test := range tests { - var buf bytes.Buffer - e := NewEncoder(&buf) - err := e.Encode(test.input) - if err != test.wantError { - if test.wantError != nil { - t.Errorf("%s: want Encode error %v, got %v", - label, test.wantError, err) - } else { - t.Errorf("%s: Encode failed: %s", label, err) - } - } - if err != nil { - continue - } - if got := buf.String(); test.wantOutput != got { - t.Errorf("%s: want %q, got %q", label, test.wantOutput, got) - } - } -} - -type Embedded struct { - Int int `toml:"_int"` -} diff --git a/third_party/github.com/BurntSushi/toml/lex.go b/third_party/github.com/BurntSushi/toml/lex.go deleted file mode 100644 index 3dcae4923..000000000 --- a/third_party/github.com/BurntSushi/toml/lex.go +++ /dev/null @@ -1,741 +0,0 @@ -package toml - -import ( - "fmt" - "unicode/utf8" -) - -type itemType int - -const ( - itemError itemType = iota - itemNIL // used in the parser to indicate no type - itemEOF - itemText - itemString - itemBool - itemInteger - itemFloat - itemDatetime - itemArray // the start of an array - itemArrayEnd - itemTableStart - itemTableEnd - itemArrayTableStart - itemArrayTableEnd - itemKeyStart - itemCommentStart -) - -const ( - eof = 0 - tableStart = '[' - tableEnd = ']' - arrayTableStart = '[' - arrayTableEnd = ']' - tableSep = '.' - keySep = '=' - arrayStart = '[' - arrayEnd = ']' - arrayValTerm = ',' - commentStart = '#' - stringStart = '"' - stringEnd = '"' -) - -type stateFn func(lx *lexer) stateFn - -type lexer struct { - input string - start int - pos int - width int - line int - state stateFn - items chan item - - // A stack of state functions used to maintain context. - // The idea is to reuse parts of the state machine in various places. - // For example, values can appear at the top level or within arbitrarily - // nested arrays. The last state on the stack is used after a value has - // been lexed. Similarly for comments. - stack []stateFn -} - -type item struct { - typ itemType - val string - line int -} - -func (lx *lexer) nextItem() item { - for { - select { - case item := <-lx.items: - return item - default: - lx.state = lx.state(lx) - } - } - panic("not reached") -} - -func lex(input string) *lexer { - lx := &lexer{ - input: input, - state: lexTop, - line: 1, - items: make(chan item, 10), - stack: make([]stateFn, 0, 10), - } - return lx -} - -func (lx *lexer) push(state stateFn) { - lx.stack = append(lx.stack, state) -} - -func (lx *lexer) pop() stateFn { - if len(lx.stack) == 0 { - return lx.errorf("BUG in lexer: no states to pop.") - } - last := lx.stack[len(lx.stack)-1] - lx.stack = lx.stack[0 : len(lx.stack)-1] - return last -} - -func (lx *lexer) current() string { - return lx.input[lx.start:lx.pos] -} - -func (lx *lexer) emit(typ itemType) { - lx.items <- item{typ, lx.current(), lx.line} - lx.start = lx.pos -} - -func (lx *lexer) next() (r rune) { - if lx.pos >= len(lx.input) { - lx.width = 0 - return eof - } - - if lx.input[lx.pos] == '\n' { - lx.line++ - } - r, lx.width = utf8.DecodeRuneInString(lx.input[lx.pos:]) - lx.pos += lx.width - return r -} - -// ignore skips over the pending input before this point. -func (lx *lexer) ignore() { - lx.start = lx.pos -} - -// backup steps back one rune. Can be called only once per call of next. -func (lx *lexer) backup() { - lx.pos -= lx.width - if lx.pos < len(lx.input) && lx.input[lx.pos] == '\n' { - lx.line-- - } -} - -// accept consumes the next rune if it's equal to `valid`. -func (lx *lexer) accept(valid rune) bool { - if lx.next() == valid { - return true - } - lx.backup() - return false -} - -// peek returns but does not consume the next rune in the input. -func (lx *lexer) peek() rune { - r := lx.next() - lx.backup() - return r -} - -// errorf stops all lexing by emitting an error and returning `nil`. -// Note that any value that is a character is escaped if it's a special -// character (new lines, tabs, etc.). -func (lx *lexer) errorf(format string, values ...interface{}) stateFn { - for i, value := range values { - if v, ok := value.(rune); ok { - values[i] = escapeSpecial(v) - } - } - lx.items <- item{ - itemError, - fmt.Sprintf(format, values...), - lx.line, - } - return nil -} - -// lexTop consumes elements at the top level of TOML data. -func lexTop(lx *lexer) stateFn { - r := lx.next() - if isWhitespace(r) || isNL(r) { - return lexSkip(lx, lexTop) - } - - switch r { - case commentStart: - lx.push(lexTop) - return lexCommentStart - case tableStart: - return lexTableStart - case eof: - if lx.pos > lx.start { - return lx.errorf("Unexpected EOF.") - } - lx.emit(itemEOF) - return nil - } - - // At this point, the only valid item can be a key, so we back up - // and let the key lexer do the rest. - lx.backup() - lx.push(lexTopEnd) - return lexKeyStart -} - -// lexTopEnd is entered whenever a top-level item has been consumed. (A value -// or a table.) It must see only whitespace, and will turn back to lexTop -// upon a new line. If it sees EOF, it will quit the lexer successfully. -func lexTopEnd(lx *lexer) stateFn { - r := lx.next() - switch { - case r == commentStart: - // a comment will read to a new line for us. - lx.push(lexTop) - return lexCommentStart - case isWhitespace(r): - return lexTopEnd - case isNL(r): - lx.ignore() - return lexTop - case r == eof: - lx.ignore() - return lexTop - } - return lx.errorf("Expected a top-level item to end with a new line, "+ - "comment or EOF, but got '%s' instead.", r) -} - -// lexTable lexes the beginning of a table. Namely, it makes sure that -// it starts with a character other than '.' and ']'. -// It assumes that '[' has already been consumed. -// It also handles the case that this is an item in an array of tables. -// e.g., '[[name]]'. -func lexTableStart(lx *lexer) stateFn { - if lx.peek() == arrayTableStart { - lx.next() - lx.emit(itemArrayTableStart) - lx.push(lexArrayTableEnd) - } else { - lx.emit(itemTableStart) - lx.push(lexTableEnd) - } - return lexTableNameStart -} - -func lexTableEnd(lx *lexer) stateFn { - lx.emit(itemTableEnd) - return lexTopEnd -} - -func lexArrayTableEnd(lx *lexer) stateFn { - if r := lx.next(); r != arrayTableEnd { - return lx.errorf("Expected end of table array name delimiter '%s', "+ - "but got '%s' instead.", arrayTableEnd, r) - } - lx.emit(itemArrayTableEnd) - return lexTopEnd -} - -func lexTableNameStart(lx *lexer) stateFn { - switch lx.next() { - case tableEnd: - return lx.errorf("Unexpected end of table. (Tables cannot " + - "be empty.)") - case tableSep: - return lx.errorf("Unexpected table separator. (Tables cannot " + - "be empty.)") - } - return lexTableName -} - -// lexTableName lexes the name of a table. It assumes that at least one -// valid character for the table has already been read. -func lexTableName(lx *lexer) stateFn { - switch lx.peek() { - case tableStart: - return lx.errorf("Table names cannot contain '%s' or '%s'.", - tableStart, tableEnd) - case tableEnd: - lx.emit(itemText) - lx.next() - return lx.pop() - case tableSep: - lx.emit(itemText) - lx.next() - lx.ignore() - return lexTableNameStart - } - lx.next() - return lexTableName -} - -// lexKeyStart consumes a key name up until the first non-whitespace character. -// lexKeyStart will ignore whitespace. -func lexKeyStart(lx *lexer) stateFn { - r := lx.peek() - switch { - case r == keySep: - return lx.errorf("Unexpected key separator '%s'.", keySep) - case isWhitespace(r) || isNL(r): - lx.next() - return lexSkip(lx, lexKeyStart) - } - - lx.ignore() - lx.emit(itemKeyStart) - lx.next() - return lexKey -} - -// lexKey consumes the text of a key. Assumes that the first character (which -// is not whitespace) has already been consumed. -func lexKey(lx *lexer) stateFn { - r := lx.peek() - - // XXX: Possible divergence from spec? - // "Keys start with the first non-whitespace character and end with the - // last non-whitespace character before the equals sign." - // Note here that whitespace is either a tab or a space. - // But we'll call it quits if we see a new line too. - if isWhitespace(r) || isNL(r) { - lx.emit(itemText) - return lexKeyEnd - } - - // Let's also call it quits if we see an equals sign. - if r == keySep { - lx.emit(itemText) - return lexKeyEnd - } - - lx.next() - return lexKey -} - -// lexKeyEnd consumes the end of a key (up to the key separator). -// Assumes that the first whitespace character after a key (or the '=' -// separator) has NOT been consumed. -func lexKeyEnd(lx *lexer) stateFn { - r := lx.next() - switch { - case isWhitespace(r) || isNL(r): - return lexSkip(lx, lexKeyEnd) - case r == keySep: - return lexSkip(lx, lexValue) - } - return lx.errorf("Expected key separator '%s', but got '%s' instead.", - keySep, r) -} - -// lexValue starts the consumption of a value anywhere a value is expected. -// lexValue will ignore whitespace. -// After a value is lexed, the last state on the next is popped and returned. -func lexValue(lx *lexer) stateFn { - // We allow whitespace to precede a value, but NOT new lines. - // In array syntax, the array states are responsible for ignoring new lines. - r := lx.next() - if isWhitespace(r) { - return lexSkip(lx, lexValue) - } - - switch { - case r == arrayStart: - lx.ignore() - lx.emit(itemArray) - return lexArrayValue - case r == stringStart: - lx.ignore() // ignore the '"' - return lexString - case r == 't': - return lexTrue - case r == 'f': - return lexFalse - case r == '-': - return lexNumberStart - case isDigit(r): - lx.backup() // avoid an extra state and use the same as above - return lexNumberOrDateStart - case r == '.': // special error case, be kind to users - return lx.errorf("Floats must start with a digit, not '.'.") - } - return lx.errorf("Expected value but found '%s' instead.", r) -} - -// lexArrayValue consumes one value in an array. It assumes that '[' or ',' -// have already been consumed. All whitespace and new lines are ignored. -func lexArrayValue(lx *lexer) stateFn { - r := lx.next() - switch { - case isWhitespace(r) || isNL(r): - return lexSkip(lx, lexArrayValue) - case r == commentStart: - lx.push(lexArrayValue) - return lexCommentStart - case r == arrayValTerm: - return lx.errorf("Unexpected array value terminator '%s'.", - arrayValTerm) - case r == arrayEnd: - return lexArrayEnd - } - - lx.backup() - lx.push(lexArrayValueEnd) - return lexValue -} - -// lexArrayValueEnd consumes the cruft between values of an array. Namely, -// it ignores whitespace and expects either a ',' or a ']'. -func lexArrayValueEnd(lx *lexer) stateFn { - r := lx.next() - switch { - case isWhitespace(r) || isNL(r): - return lexSkip(lx, lexArrayValueEnd) - case r == commentStart: - lx.push(lexArrayValueEnd) - return lexCommentStart - case r == arrayValTerm: - return lexArrayValue // move on to the next value - case r == arrayEnd: - return lexArrayEnd - } - return lx.errorf("Expected an array value terminator '%s' or an array "+ - "terminator '%s', but got '%s' instead.", arrayValTerm, arrayEnd, r) -} - -// lexArrayEnd finishes the lexing of an array. It assumes that a ']' has -// just been consumed. -func lexArrayEnd(lx *lexer) stateFn { - lx.ignore() - lx.emit(itemArrayEnd) - return lx.pop() -} - -// lexString consumes the inner contents of a string. It assumes that the -// beginning '"' has already been consumed and ignored. -func lexString(lx *lexer) stateFn { - r := lx.next() - switch { - case isNL(r): - return lx.errorf("Strings cannot contain new lines.") - case r == '\\': - return lexStringEscape - case r == stringEnd: - lx.backup() - lx.emit(itemString) - lx.next() - lx.ignore() - return lx.pop() - } - return lexString -} - -// lexStringEscape consumes an escaped character. It assumes that the preceding -// '\\' has already been consumed. -func lexStringEscape(lx *lexer) stateFn { - r := lx.next() - switch r { - case 'b': - fallthrough - case 't': - fallthrough - case 'n': - fallthrough - case 'f': - fallthrough - case 'r': - fallthrough - case '"': - fallthrough - case '/': - fallthrough - case '\\': - return lexString - case 'u': - return lexStringUnicode - } - return lx.errorf("Invalid escape character '%s'. Only the following "+ - "escape characters are allowed: "+ - "\\b, \\t, \\n, \\f, \\r, \\\", \\/, \\\\, and \\uXXXX.", r) -} - -// lexStringBinary consumes two hexadecimal digits following '\x'. It assumes -// that the '\x' has already been consumed. -func lexStringUnicode(lx *lexer) stateFn { - var r rune - - for i := 0; i < 4; i++ { - r = lx.next() - if !isHexadecimal(r) { - return lx.errorf("Expected four hexadecimal digits after '\\x', "+ - "but got '%s' instead.", lx.current()) - } - } - return lexString -} - -// lexNumberOrDateStart consumes either a (positive) integer, float or datetime. -// It assumes that NO negative sign has been consumed. -func lexNumberOrDateStart(lx *lexer) stateFn { - r := lx.next() - if !isDigit(r) { - if r == '.' { - return lx.errorf("Floats must start with a digit, not '.'.") - } else { - return lx.errorf("Expected a digit but got '%s'.", r) - } - } - return lexNumberOrDate -} - -// lexNumberOrDate consumes either a (positive) integer, float or datetime. -func lexNumberOrDate(lx *lexer) stateFn { - r := lx.next() - switch { - case r == '-': - if lx.pos-lx.start != 5 { - return lx.errorf("All ISO8601 dates must be in full Zulu form.") - } - return lexDateAfterYear - case isDigit(r): - return lexNumberOrDate - case r == '.': - return lexFloatStart - } - - lx.backup() - lx.emit(itemInteger) - return lx.pop() -} - -// lexDateAfterYear consumes a full Zulu Datetime in ISO8601 format. -// It assumes that "YYYY-" has already been consumed. -func lexDateAfterYear(lx *lexer) stateFn { - formats := []rune{ - // digits are '0'. - // everything else is direct equality. - '0', '0', '-', '0', '0', - 'T', - '0', '0', ':', '0', '0', ':', '0', '0', - 'Z', - } - for _, f := range formats { - r := lx.next() - if f == '0' { - if !isDigit(r) { - return lx.errorf("Expected digit in ISO8601 datetime, "+ - "but found '%s' instead.", r) - } - } else if f != r { - return lx.errorf("Expected '%s' in ISO8601 datetime, "+ - "but found '%s' instead.", f, r) - } - } - lx.emit(itemDatetime) - return lx.pop() -} - -// lexNumberStart consumes either an integer or a float. It assumes that a -// negative sign has already been read, but that *no* digits have been consumed. -// lexNumberStart will move to the appropriate integer or float states. -func lexNumberStart(lx *lexer) stateFn { - // we MUST see a digit. Even floats have to start with a digit. - r := lx.next() - if !isDigit(r) { - if r == '.' { - return lx.errorf("Floats must start with a digit, not '.'.") - } else { - return lx.errorf("Expected a digit but got '%s'.", r) - } - } - return lexNumber -} - -// lexNumber consumes an integer or a float after seeing the first digit. -func lexNumber(lx *lexer) stateFn { - r := lx.next() - switch { - case isDigit(r): - return lexNumber - case r == '.': - return lexFloatStart - } - - lx.backup() - lx.emit(itemInteger) - return lx.pop() -} - -// lexFloatStart starts the consumption of digits of a float after a '.'. -// Namely, at least one digit is required. -func lexFloatStart(lx *lexer) stateFn { - r := lx.next() - if !isDigit(r) { - return lx.errorf("Floats must have a digit after the '.', but got "+ - "'%s' instead.", r) - } - return lexFloat -} - -// lexFloat consumes the digits of a float after a '.'. -// Assumes that one digit has been consumed after a '.' already. -func lexFloat(lx *lexer) stateFn { - r := lx.next() - if isDigit(r) { - return lexFloat - } - - lx.backup() - lx.emit(itemFloat) - return lx.pop() -} - -// lexTrue consumes the "rue" in "true". It assumes that 't' has already -// been consumed. -func lexTrue(lx *lexer) stateFn { - if r := lx.next(); r != 'r' { - return lx.errorf("Expected 'tr', but found 't%s' instead.", r) - } - if r := lx.next(); r != 'u' { - return lx.errorf("Expected 'tru', but found 'tr%s' instead.", r) - } - if r := lx.next(); r != 'e' { - return lx.errorf("Expected 'true', but found 'tru%s' instead.", r) - } - lx.emit(itemBool) - return lx.pop() -} - -// lexFalse consumes the "alse" in "false". It assumes that 'f' has already -// been consumed. -func lexFalse(lx *lexer) stateFn { - if r := lx.next(); r != 'a' { - return lx.errorf("Expected 'fa', but found 'f%s' instead.", r) - } - if r := lx.next(); r != 'l' { - return lx.errorf("Expected 'fal', but found 'fa%s' instead.", r) - } - if r := lx.next(); r != 's' { - return lx.errorf("Expected 'fals', but found 'fal%s' instead.", r) - } - if r := lx.next(); r != 'e' { - return lx.errorf("Expected 'false', but found 'fals%s' instead.", r) - } - lx.emit(itemBool) - return lx.pop() -} - -// lexCommentStart begins the lexing of a comment. It will emit -// itemCommentStart and consume no characters, passing control to lexComment. -func lexCommentStart(lx *lexer) stateFn { - lx.ignore() - lx.emit(itemCommentStart) - return lexComment -} - -// lexComment lexes an entire comment. It assumes that '#' has been consumed. -// It will consume *up to* the first new line character, and pass control -// back to the last state on the stack. -func lexComment(lx *lexer) stateFn { - r := lx.peek() - if isNL(r) || r == eof { - lx.emit(itemText) - return lx.pop() - } - lx.next() - return lexComment -} - -// lexSkip ignores all slurped input and moves on to the next state. -func lexSkip(lx *lexer, nextState stateFn) stateFn { - return func(lx *lexer) stateFn { - lx.ignore() - return nextState - } -} - -// isWhitespace returns true if `r` is a whitespace character according -// to the spec. -func isWhitespace(r rune) bool { - return r == '\t' || r == ' ' -} - -func isNL(r rune) bool { - return r == '\n' || r == '\r' -} - -func isDigit(r rune) bool { - return r >= '0' && r <= '9' -} - -func isHexadecimal(r rune) bool { - return (r >= '0' && r <= '9') || - (r >= 'a' && r <= 'f') || - (r >= 'A' && r <= 'F') -} - -func (itype itemType) String() string { - switch itype { - case itemError: - return "Error" - case itemNIL: - return "NIL" - case itemEOF: - return "EOF" - case itemText: - return "Text" - case itemString: - return "String" - case itemBool: - return "Bool" - case itemInteger: - return "Integer" - case itemFloat: - return "Float" - case itemDatetime: - return "DateTime" - case itemTableStart: - return "TableStart" - case itemTableEnd: - return "TableEnd" - case itemKeyStart: - return "KeyStart" - case itemArray: - return "Array" - case itemArrayEnd: - return "ArrayEnd" - case itemCommentStart: - return "CommentStart" - } - panic(fmt.Sprintf("BUG: Unknown type '%s'.", itype)) -} - -func (item item) String() string { - return fmt.Sprintf("(%s, %s)", item.typ.String(), item.val) -} - -func escapeSpecial(c rune) string { - switch c { - case '\n': - return "\\n" - } - return string(c) -} diff --git a/third_party/github.com/BurntSushi/toml/lex_test.go b/third_party/github.com/BurntSushi/toml/lex_test.go deleted file mode 100644 index 6cfa21088..000000000 --- a/third_party/github.com/BurntSushi/toml/lex_test.go +++ /dev/null @@ -1,59 +0,0 @@ -package toml - -import ( - "log" - "testing" -) - -func init() { - log.SetFlags(0) -} - -var testSmall = ` -# This is a TOML document. Boom. - -[owner] -[owner] # Whoa there. -andrew = "gallant # poopy" # weeeee -predicate = false -num = -5192 -f = -0.5192 -zulu = 1979-05-27T07:32:00Z -whoop = "poop" -arrs = [ - 1987-07-05T05:45:00Z, - 5, - "wat?", - "hehe \n\r kewl", - [6], [], - 5.0, - # sweetness -] # more comments -# hehe -` - -var testSmaller = ` -[a.b] # Do you ignore me? -andrew = "ga# ll\"ant" # what about me? -kait = "brady" -awesomeness = true -pi = 3.14 -dob = 1987-07-05T17:45:00Z -perfection = [ - [6, 28], - [496, 8128] -] -` - -func TestLexer(t *testing.T) { - lx := lex(testSmaller) - for { - item := lx.nextItem() - if item.typ == itemEOF { - break - } else if item.typ == itemError { - t.Fatal(item.val) - } - testf("%s\n", item) - } -} diff --git a/third_party/github.com/BurntSushi/toml/out_test.go b/third_party/github.com/BurntSushi/toml/out_test.go deleted file mode 100644 index ab121e375..000000000 --- a/third_party/github.com/BurntSushi/toml/out_test.go +++ /dev/null @@ -1,19 +0,0 @@ -package toml - -import ( - "flag" - "fmt" -) - -var flagOut = false - -func init() { - flag.BoolVar(&flagOut, "out", flagOut, "Print debug output.") - flag.Parse() -} - -func testf(format string, v ...interface{}) { - if flagOut { - fmt.Printf(format, v...) - } -} diff --git a/third_party/github.com/BurntSushi/toml/parse.go b/third_party/github.com/BurntSushi/toml/parse.go deleted file mode 100644 index 2abb173f8..000000000 --- a/third_party/github.com/BurntSushi/toml/parse.go +++ /dev/null @@ -1,417 +0,0 @@ -package toml - -import ( - "fmt" - "log" - "strconv" - "strings" - "time" - "unicode/utf8" -) - -type parser struct { - mapping map[string]interface{} - types map[string]tomlType - lx *lexer - - // A list of keys in the order that they appear in the TOML data. - ordered []Key - - // the full key for the current hash in scope - context Key - - // the base key name for everything except hashes - currentKey string - - // rough approximation of line number - approxLine int - - // A map of 'key.group.names' to whether they were created implicitly. - implicits map[string]bool -} - -type parseError string - -func (pe parseError) Error() string { - return string(pe) -} - -func parse(data string) (p *parser, err error) { - defer func() { - if r := recover(); r != nil { - var ok bool - if err, ok = r.(parseError); ok { - return - } - panic(r) - } - }() - - p = &parser{ - mapping: make(map[string]interface{}), - types: make(map[string]tomlType), - lx: lex(data), - ordered: make([]Key, 0), - implicits: make(map[string]bool), - } - for { - item := p.next() - if item.typ == itemEOF { - break - } - p.topLevel(item) - } - - return p, nil -} - -func (p *parser) panic(format string, v ...interface{}) { - msg := fmt.Sprintf("Near line %d, key '%s': %s", - p.approxLine, p.current(), fmt.Sprintf(format, v...)) - panic(parseError(msg)) -} - -func (p *parser) next() item { - it := p.lx.nextItem() - if it.typ == itemError { - p.panic("Near line %d: %s", it.line, it.val) - } - return it -} - -func (p *parser) bug(format string, v ...interface{}) { - log.Fatalf("BUG: %s\n\n", fmt.Sprintf(format, v...)) -} - -func (p *parser) expect(typ itemType) item { - it := p.next() - p.assertEqual(typ, it.typ) - return it -} - -func (p *parser) assertEqual(expected, got itemType) { - if expected != got { - p.bug("Expected '%s' but got '%s'.", expected, got) - } -} - -func (p *parser) topLevel(item item) { - switch item.typ { - case itemCommentStart: - p.approxLine = item.line - p.expect(itemText) - case itemTableStart: - kg := p.expect(itemText) - p.approxLine = kg.line - - key := make(Key, 0) - for ; kg.typ == itemText; kg = p.next() { - key = append(key, kg.val) - } - p.assertEqual(itemTableEnd, kg.typ) - - p.establishContext(key, false) - p.setType("", tomlHash) - p.ordered = append(p.ordered, key) - case itemArrayTableStart: - kg := p.expect(itemText) - p.approxLine = kg.line - - key := make(Key, 0) - for ; kg.typ == itemText; kg = p.next() { - key = append(key, kg.val) - } - p.assertEqual(itemArrayTableEnd, kg.typ) - - p.establishContext(key, true) - p.setType("", tomlArrayHash) - p.ordered = append(p.ordered, key) - case itemKeyStart: - kname := p.expect(itemText) - p.currentKey = kname.val - p.approxLine = kname.line - - val, typ := p.value(p.next()) - p.setValue(p.currentKey, val) - p.setType(p.currentKey, typ) - p.ordered = append(p.ordered, p.context.add(p.currentKey)) - - p.currentKey = "" - default: - p.bug("Unexpected type at top level: %s", item.typ) - } -} - -// value translates an expected value from the lexer into a Go value wrapped -// as an empty interface. -func (p *parser) value(it item) (interface{}, tomlType) { - switch it.typ { - case itemString: - return p.replaceUnicode(replaceEscapes(it.val)), p.typeOfPrimitive(it) - case itemBool: - switch it.val { - case "true": - return true, p.typeOfPrimitive(it) - case "false": - return false, p.typeOfPrimitive(it) - } - p.bug("Expected boolean value, but got '%s'.", it.val) - case itemInteger: - num, err := strconv.ParseInt(it.val, 10, 64) - if err != nil { - // See comment below for floats describing why we make a - // distinction between a bug and a user error. - if e, ok := err.(*strconv.NumError); ok && - e.Err == strconv.ErrRange { - - p.panic("Integer '%s' is out of the range of 64-bit "+ - "signed integers.", it.val) - } else { - p.bug("Expected integer value, but got '%s'.", it.val) - } - } - return num, p.typeOfPrimitive(it) - case itemFloat: - num, err := strconv.ParseFloat(it.val, 64) - if err != nil { - // Distinguish float values. Normally, it'd be a bug if the lexer - // provides an invalid float, but it's possible that the float is - // out of range of valid values (which the lexer cannot determine). - // So mark the former as a bug but the latter as a legitimate user - // error. - // - // This is also true for integers. - if e, ok := err.(*strconv.NumError); ok && - e.Err == strconv.ErrRange { - - p.panic("Float '%s' is out of the range of 64-bit "+ - "IEEE-754 floating-point numbers.", it.val) - } else { - p.bug("Expected float value, but got '%s'.", it.val) - } - } - return num, p.typeOfPrimitive(it) - case itemDatetime: - t, err := time.Parse("2006-01-02T15:04:05Z", it.val) - if err != nil { - p.bug("Expected Zulu formatted DateTime, but got '%s'.", it.val) - } - return t, p.typeOfPrimitive(it) - case itemArray: - array := make([]interface{}, 0) - types := make([]tomlType, 0) - - for it = p.next(); it.typ != itemArrayEnd; it = p.next() { - if it.typ == itemCommentStart { - p.expect(itemText) - continue - } - - val, typ := p.value(it) - array = append(array, val) - types = append(types, typ) - } - return array, p.typeOfArray(types) - } - p.bug("Unexpected value type: %s", it.typ) - panic("unreachable") -} - -// establishContext sets the current context of the parser, -// where the context is either a hash or an array of hashes. Which one is -// set depends on the value of the `array` parameter. -// -// Establishing the context also makes sure that the key isn't a duplicate, and -// will create implicit hashes automatically. -func (p *parser) establishContext(key Key, array bool) { - var ok bool - - // Always start at the top level and drill down for our context. - hashContext := p.mapping - keyContext := make(Key, 0) - - // We only need implicit hashes for key[0:-1] - for _, k := range key[0 : len(key)-1] { - _, ok = hashContext[k] - keyContext = append(keyContext, k) - - // No key? Make an implicit hash and move on. - if !ok { - p.addImplicit(keyContext) - hashContext[k] = make(map[string]interface{}) - } - - // If the hash context is actually an array of tables, then set - // the hash context to the last element in that array. - // - // Otherwise, it better be a table, since this MUST be a key group (by - // virtue of it not being the last element in a key). - switch t := hashContext[k].(type) { - case []map[string]interface{}: - hashContext = t[len(t)-1] - case map[string]interface{}: - hashContext = t - default: - p.panic("Key '%s' was already created as a hash.", keyContext) - } - } - - p.context = keyContext - if array { - // If this is the first element for this array, then allocate a new - // list of tables for it. - k := key[len(key)-1] - if _, ok := hashContext[k]; !ok { - hashContext[k] = make([]map[string]interface{}, 0, 5) - } - - // Add a new table. But make sure the key hasn't already been used - // for something else. - if hash, ok := hashContext[k].([]map[string]interface{}); ok { - hashContext[k] = append(hash, make(map[string]interface{})) - } else { - p.panic("Key '%s' was already created and cannot be used as "+ - "an array.", keyContext) - } - } else { - p.setValue(key[len(key)-1], make(map[string]interface{})) - } - p.context = append(p.context, key[len(key)-1]) -} - -// setValue sets the given key to the given value in the current context. -// It will make sure that the key hasn't already been defined, account for -// implicit key groups. -func (p *parser) setValue(key string, value interface{}) { - var tmpHash interface{} - var ok bool - - hash := p.mapping - keyContext := make(Key, 0) - for _, k := range p.context { - keyContext = append(keyContext, k) - if tmpHash, ok = hash[k]; !ok { - p.bug("Context for key '%s' has not been established.", keyContext) - } - switch t := tmpHash.(type) { - case []map[string]interface{}: - // The context is a table of hashes. Pick the most recent table - // defined as the current hash. - hash = t[len(t)-1] - case map[string]interface{}: - hash = t - default: - p.bug("Expected hash to have type 'map[string]interface{}', but "+ - "it has '%T' instead.", tmpHash) - } - } - keyContext = append(keyContext, key) - - if _, ok := hash[key]; ok { - // Typically, if the given key has already been set, then we have - // to raise an error since duplicate keys are disallowed. However, - // it's possible that a key was previously defined implicitly. In this - // case, it is allowed to be redefined concretely. (See the - // `tests/valid/implicit-and-explicit-after.toml` test in `toml-test`.) - // - // But we have to make sure to stop marking it as an implicit. (So that - // another redefinition provokes an error.) - // - // Note that since it has already been defined (as a hash), we don't - // want to overwrite it. So our business is done. - if p.isImplicit(keyContext) { - p.removeImplicit(keyContext) - return - } - - // Otherwise, we have a concrete key trying to override a previous - // key, which is *always* wrong. - p.panic("Key '%s' has already been defined.", keyContext) - } - hash[key] = value -} - -// setType sets the type of a particular value at a given key. -// It should be called immediately AFTER setValue. -// -// Note that if `key` is empty, then the type given will be applied to the -// current context (which is either a table or an array of tables). -func (p *parser) setType(key string, typ tomlType) { - keyContext := make(Key, 0, len(p.context)+1) - for _, k := range p.context { - keyContext = append(keyContext, k) - } - if len(key) > 0 { // allow type setting for hashes - keyContext = append(keyContext, key) - } - p.types[keyContext.String()] = typ -} - -// addImplicit sets the given Key as having been created implicitly. -func (p *parser) addImplicit(key Key) { - p.implicits[key.String()] = true -} - -// removeImplicit stops tagging the given key as having been implicitly created. -func (p *parser) removeImplicit(key Key) { - p.implicits[key.String()] = false -} - -// isImplicit returns true if the key group pointed to by the key was created -// implicitly. -func (p *parser) isImplicit(key Key) bool { - return p.implicits[key.String()] -} - -// current returns the full key name of the current context. -func (p *parser) current() string { - if len(p.currentKey) == 0 { - return p.context.String() - } - if len(p.context) == 0 { - return p.currentKey - } - return fmt.Sprintf("%s.%s", p.context, p.currentKey) -} - -func replaceEscapes(s string) string { - return strings.NewReplacer( - "\\b", "\u0008", - "\\t", "\u0009", - "\\n", "\u000A", - "\\f", "\u000C", - "\\r", "\u000D", - "\\\"", "\u0022", - "\\/", "\u002F", - "\\\\", "\u005C", - ).Replace(s) -} - -func (p *parser) replaceUnicode(s string) string { - indexEsc := func() int { - return strings.Index(s, "\\u") - } - for i := indexEsc(); i != -1; i = indexEsc() { - asciiBytes := s[i+2 : i+6] - s = strings.Replace(s, s[i:i+6], p.asciiEscapeToUnicode(asciiBytes), -1) - } - return s -} - -func (p *parser) asciiEscapeToUnicode(s string) string { - hex, err := strconv.ParseUint(strings.ToLower(s), 16, 32) - if err != nil { - p.bug("Could not parse '%s' as a hexadecimal number, but the "+ - "lexer claims it's OK: %s", s, err) - } - - // BUG(burntsushi) - // I honestly don't understand how this works. I can't seem - // to find a way to make this fail. I figured this would fail on invalid - // UTF-8 characters like U+DCFF, but it doesn't. - r := string(rune(hex)) - if !utf8.ValidString(r) { - p.panic("Escaped character '\\u%s' is not valid UTF-8.", s) - } - return string(r) -} diff --git a/third_party/github.com/BurntSushi/toml/parse_test.go b/third_party/github.com/BurntSushi/toml/parse_test.go deleted file mode 100644 index d6f0d32fb..000000000 --- a/third_party/github.com/BurntSushi/toml/parse_test.go +++ /dev/null @@ -1,61 +0,0 @@ -package toml - -import ( - "strings" - "testing" -) - -var testParseSmall = ` -# This is a TOML document. Boom. - -wat = "chipper" - -[owner.andrew.gallant] -hmm = "hi" - -[owner] # Whoa there. -andreW = "gallant # poopy" # weeeee -predicate = false -num = -5192 -f = -0.5192 -zulu = 1979-05-27T07:32:00Z -whoop = "poop" -tests = [ [1, 2, 3], ["abc", "xyz"] ] -arrs = [ # hmm - # more comments are awesome. - 1987-07-05T05:45:00Z, - # say wat? - 1987-07-05T05:45:00Z, - 1987-07-05T05:45:00Z, - # sweetness -] # more comments -# hehe -` - -var testParseSmall2 = ` -[a] -better = 43 - -[a.b.c] -answer = 42 -` - -func TestParse(t *testing.T) { - m, err := parse(testParseSmall) - if err != nil { - t.Fatal(err) - } - printMap(m.mapping, 0) -} - -func printMap(m map[string]interface{}, depth int) { - for k, v := range m { - testf("%s%s\n", strings.Repeat(" ", depth), k) - switch subm := v.(type) { - case map[string]interface{}: - printMap(subm, depth+1) - default: - testf("%s%v\n", strings.Repeat(" ", depth+1), v) - } - } -} diff --git a/third_party/github.com/BurntSushi/toml/session.vim b/third_party/github.com/BurntSushi/toml/session.vim deleted file mode 100644 index 562164be0..000000000 --- a/third_party/github.com/BurntSushi/toml/session.vim +++ /dev/null @@ -1 +0,0 @@ -au BufWritePost *.go silent!make tags > /dev/null 2>&1 diff --git a/third_party/github.com/BurntSushi/toml/toml-test-encoder/COPYING b/third_party/github.com/BurntSushi/toml/toml-test-encoder/COPYING deleted file mode 100644 index 5a8e33254..000000000 --- a/third_party/github.com/BurntSushi/toml/toml-test-encoder/COPYING +++ /dev/null @@ -1,14 +0,0 @@ - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2, December 2004 - - Copyright (C) 2004 Sam Hocevar - - Everyone is permitted to copy and distribute verbatim or modified - copies of this license document, and changing it is allowed as long - as the name is changed. - - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. You just DO WHAT THE FUCK YOU WANT TO. - diff --git a/third_party/github.com/BurntSushi/toml/toml-test-encoder/README.md b/third_party/github.com/BurntSushi/toml/toml-test-encoder/README.md deleted file mode 100644 index 45a603f29..000000000 --- a/third_party/github.com/BurntSushi/toml/toml-test-encoder/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# Implements the TOML test suite interface for TOML encoders - -This is an implementation of the interface expected by -[toml-test](https://github.com/BurntSushi/toml-test) for the -[TOML encoder](https://github.com/BurntSushi/toml). -In particular, it maps JSON data on `stdin` to a TOML format on `stdout`. - - -Compatible with TOML version -[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md) - -Compatible with `toml-test` version -[v0.2.0](https://github.com/BurntSushi/toml-test/tree/v0.2.0) - diff --git a/third_party/github.com/BurntSushi/toml/toml-test-encoder/main.go b/third_party/github.com/BurntSushi/toml/toml-test-encoder/main.go deleted file mode 100644 index 7066b2f65..000000000 --- a/third_party/github.com/BurntSushi/toml/toml-test-encoder/main.go +++ /dev/null @@ -1,129 +0,0 @@ -package main - -import ( - "encoding/json" - "flag" - "log" - "os" - "path" - "strconv" - "time" - - "github.com/coreos/etcd/third_party/github.com/BurntSushi/toml" -) - -func init() { - log.SetFlags(0) - - flag.Usage = usage - flag.Parse() -} - -func usage() { - log.Printf("Usage: %s < json-file\n", path.Base(os.Args[0])) - flag.PrintDefaults() - - os.Exit(1) -} - -func main() { - if flag.NArg() != 0 { - flag.Usage() - } - - var tmp interface{} - if err := json.NewDecoder(os.Stdin).Decode(&tmp); err != nil { - log.Fatalf("Error decoding JSON: %s", err) - } - - tomlData := translate(tmp) - if err := toml.NewEncoder(os.Stdout).Encode(tomlData); err != nil { - log.Fatalf("Error encoding TOML: %s", err) - } -} - -func translate(typedJson interface{}) interface{} { - switch v := typedJson.(type) { - case map[string]interface{}: - if len(v) == 2 && in("type", v) && in("value", v) { - return untag(v) - } - m := make(map[string]interface{}, len(v)) - for k, v2 := range v { - m[k] = translate(v2) - } - return m - case []interface{}: - tabArray := make([]map[string]interface{}, len(v)) - for i := range v { - if m, ok := translate(v[i]).(map[string]interface{}); ok { - tabArray[i] = m - } else { - log.Fatalf("JSON arrays may only contain objects. This " + - "corresponds to only tables being allowed in " + - "TOML table arrays.") - } - } - return tabArray - } - log.Fatalf("Unrecognized JSON format '%T'.", typedJson) - panic("unreachable") -} - -func untag(typed map[string]interface{}) interface{} { - t := typed["type"].(string) - v := typed["value"] - switch t { - case "string": - return v.(string) - case "integer": - v := v.(string) - n, err := strconv.Atoi(v) - if err != nil { - log.Fatalf("Could not parse '%s' as integer: %s", v, err) - } - return n - case "float": - v := v.(string) - f, err := strconv.ParseFloat(v, 64) - if err != nil { - log.Fatalf("Could not parse '%s' as float64: %s", v, err) - } - return f - case "datetime": - v := v.(string) - t, err := time.Parse("2006-01-02T15:04:05Z", v) - if err != nil { - log.Fatalf("Could not parse '%s' as a datetime: %s", v, err) - } - return t - case "bool": - v := v.(string) - switch v { - case "true": - return true - case "false": - return false - } - log.Fatalf("Could not parse '%s' as a boolean.", v) - case "array": - v := v.([]interface{}) - array := make([]interface{}, len(v)) - for i := range v { - if m, ok := v[i].(map[string]interface{}); ok { - array[i] = untag(m) - } else { - log.Fatalf("Arrays may only contain other arrays or "+ - "primitive values, but found a '%T'.", m) - } - } - return array - } - log.Fatalf("Unrecognized tag type '%s'.", t) - panic("unreachable") -} - -func in(key string, m map[string]interface{}) bool { - _, ok := m[key] - return ok -} diff --git a/third_party/github.com/BurntSushi/toml/toml-test-go/COPYING b/third_party/github.com/BurntSushi/toml/toml-test-go/COPYING deleted file mode 100644 index 5a8e33254..000000000 --- a/third_party/github.com/BurntSushi/toml/toml-test-go/COPYING +++ /dev/null @@ -1,14 +0,0 @@ - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2, December 2004 - - Copyright (C) 2004 Sam Hocevar - - Everyone is permitted to copy and distribute verbatim or modified - copies of this license document, and changing it is allowed as long - as the name is changed. - - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. You just DO WHAT THE FUCK YOU WANT TO. - diff --git a/third_party/github.com/BurntSushi/toml/toml-test-go/README.md b/third_party/github.com/BurntSushi/toml/toml-test-go/README.md deleted file mode 100644 index 24421eb70..000000000 --- a/third_party/github.com/BurntSushi/toml/toml-test-go/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# Implements the TOML test suite interface - -This is an implementation of the interface expected by -[toml-test](https://github.com/BurntSushi/toml-test) for my -[toml parser written in Go](https://github.com/BurntSushi/toml). -In particular, it maps TOML data on `stdin` to a JSON format on `stdout`. - - -Compatible with TOML version -[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md) - -Compatible with `toml-test` version -[v0.2.0](https://github.com/BurntSushi/toml-test/tree/v0.2.0) - diff --git a/third_party/github.com/BurntSushi/toml/toml-test-go/main.go b/third_party/github.com/BurntSushi/toml/toml-test-go/main.go deleted file mode 100644 index afd60c1d1..000000000 --- a/third_party/github.com/BurntSushi/toml/toml-test-go/main.go +++ /dev/null @@ -1,88 +0,0 @@ -package main - -import ( - "encoding/json" - "flag" - "fmt" - "log" - "os" - "path" - "time" - - "github.com/coreos/etcd/third_party/github.com/BurntSushi/toml" -) - -func init() { - log.SetFlags(0) - - flag.Usage = usage - flag.Parse() -} - -func usage() { - log.Printf("Usage: %s < toml-file\n", path.Base(os.Args[0])) - flag.PrintDefaults() - - os.Exit(1) -} - -func main() { - if flag.NArg() != 0 { - flag.Usage() - } - - var tmp interface{} - if _, err := toml.DecodeReader(os.Stdin, &tmp); err != nil { - log.Fatalf("Error decoding TOML: %s", err) - } - - typedTmp := translate(tmp) - if err := json.NewEncoder(os.Stdout).Encode(typedTmp); err != nil { - log.Fatalf("Error encoding JSON: %s", err) - } -} - -func translate(tomlData interface{}) interface{} { - switch orig := tomlData.(type) { - case map[string]interface{}: - typed := make(map[string]interface{}, len(orig)) - for k, v := range orig { - typed[k] = translate(v) - } - return typed - case []map[string]interface{}: - typed := make([]map[string]interface{}, len(orig)) - for i, v := range orig { - typed[i] = translate(v).(map[string]interface{}) - } - return typed - case []interface{}: - typed := make([]interface{}, len(orig)) - for i, v := range orig { - typed[i] = translate(v) - } - - // We don't really need to tag arrays, but let's be future proof. - // (If TOML ever supports tuples, we'll need this.) - return tag("array", typed) - case time.Time: - return tag("datetime", orig.Format("2006-01-02T15:04:05Z")) - case bool: - return tag("bool", fmt.Sprintf("%v", orig)) - case int64: - return tag("integer", fmt.Sprintf("%d", orig)) - case float64: - return tag("float", fmt.Sprintf("%v", orig)) - case string: - return tag("string", orig) - } - - panic(fmt.Sprintf("Unknown type: %T", tomlData)) -} - -func tag(typeName string, data interface{}) map[string]interface{} { - return map[string]interface{}{ - "type": typeName, - "value": data, - } -} diff --git a/third_party/github.com/BurntSushi/toml/tomlv/COPYING b/third_party/github.com/BurntSushi/toml/tomlv/COPYING deleted file mode 100644 index 5a8e33254..000000000 --- a/third_party/github.com/BurntSushi/toml/tomlv/COPYING +++ /dev/null @@ -1,14 +0,0 @@ - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2, December 2004 - - Copyright (C) 2004 Sam Hocevar - - Everyone is permitted to copy and distribute verbatim or modified - copies of this license document, and changing it is allowed as long - as the name is changed. - - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. You just DO WHAT THE FUCK YOU WANT TO. - diff --git a/third_party/github.com/BurntSushi/toml/tomlv/README.md b/third_party/github.com/BurntSushi/toml/tomlv/README.md deleted file mode 100644 index bcc3f47b1..000000000 --- a/third_party/github.com/BurntSushi/toml/tomlv/README.md +++ /dev/null @@ -1,22 +0,0 @@ -# TOML Validator - -If Go is installed, it's simple to try it out: - -```bash -go get github.com/BurntSushi/toml/tomlv -tomlv some-toml-file.toml -``` - -You can see the types of every key in a TOML file with: - -```bash -tomlv -types some-toml-file.toml -``` - -At the moment, only one error message is reported at a time. Error messages -include line numbers. No output means that the files given are valid TOML, or -there is a bug in `tomlv`. - -Compatible with TOML version -[v0.1.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.1.0.md) - diff --git a/third_party/github.com/BurntSushi/toml/tomlv/main.go b/third_party/github.com/BurntSushi/toml/tomlv/main.go deleted file mode 100644 index a5b22b154..000000000 --- a/third_party/github.com/BurntSushi/toml/tomlv/main.go +++ /dev/null @@ -1,60 +0,0 @@ -package main - -import ( - "flag" - "fmt" - "log" - "os" - "path" - "strings" - "text/tabwriter" - - "github.com/coreos/etcd/third_party/github.com/BurntSushi/toml" -) - -var ( - flagTypes = false -) - -func init() { - log.SetFlags(0) - - flag.BoolVar(&flagTypes, "types", flagTypes, - "When set, the types of every defined key will be shown.") - - flag.Usage = usage - flag.Parse() -} - -func usage() { - log.Printf("Usage: %s toml-file [ toml-file ... ]\n", - path.Base(os.Args[0])) - flag.PrintDefaults() - - os.Exit(1) -} - -func main() { - if flag.NArg() < 1 { - flag.Usage() - } - for _, f := range flag.Args() { - var tmp interface{} - md, err := toml.DecodeFile(f, &tmp) - if err != nil { - log.Fatalf("Error in '%s': %s", f, err) - } - if flagTypes { - printTypes(md) - } - } -} - -func printTypes(md toml.MetaData) { - tabw := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0) - for _, key := range md.Keys() { - fmt.Fprintf(tabw, "%s%s\t%s\n", - strings.Repeat(" ", len(key)-1), key, md.Type(key...)) - } - tabw.Flush() -} diff --git a/third_party/github.com/BurntSushi/toml/type_check.go b/third_party/github.com/BurntSushi/toml/type_check.go deleted file mode 100644 index 22f188d42..000000000 --- a/third_party/github.com/BurntSushi/toml/type_check.go +++ /dev/null @@ -1,78 +0,0 @@ -package toml - -// tomlType represents any Go type that corresponds to a TOML type. -// While the first draft of the TOML spec has a simplistic type system that -// probably doesn't need this level of sophistication, we seem to be militating -// toward adding real composite types. -type tomlType interface { - typeString() string -} - -// typeEqual accepts any two types and returns true if they are equal. -func typeEqual(t1, t2 tomlType) bool { - return t1.typeString() == t2.typeString() -} - -type tomlBaseType string - -func (btype tomlBaseType) typeString() string { - return string(btype) -} - -func (btype tomlBaseType) String() string { - return btype.typeString() -} - -var ( - tomlInteger tomlBaseType = "Integer" - tomlFloat tomlBaseType = "Float" - tomlDatetime tomlBaseType = "Datetime" - tomlString tomlBaseType = "String" - tomlBool tomlBaseType = "Bool" - tomlArray tomlBaseType = "Array" - tomlHash tomlBaseType = "Hash" - tomlArrayHash tomlBaseType = "ArrayHash" -) - -// typeOfPrimitive returns a tomlType of any primitive value in TOML. -// Primitive values are: Integer, Float, Datetime, String and Bool. -// -// Passing a lexer item other than the following will cause a BUG message -// to occur: itemString, itemBool, itemInteger, itemFloat, itemDatetime. -func (p *parser) typeOfPrimitive(lexItem item) tomlType { - switch lexItem.typ { - case itemInteger: - return tomlInteger - case itemFloat: - return tomlFloat - case itemDatetime: - return tomlDatetime - case itemString: - return tomlString - case itemBool: - return tomlBool - } - p.bug("Cannot infer primitive type of lex item '%s'.", lexItem) - panic("unreachable") -} - -// typeOfArray returns a tomlType for an array given a list of types of its -// values. -// -// In the current spec, if an array is homogeneous, then its type is always -// "Array". If the array is not homogeneous, an error is generated. -func (p *parser) typeOfArray(types []tomlType) tomlType { - // Empty arrays are cool. - if len(types) == 0 { - return tomlArray - } - - theType := types[0] - for _, t := range types[1:] { - if !typeEqual(theType, t) { - p.panic("Array contains values of type '%s' and '%s', but arrays "+ - "must be homogeneous.", theType, t) - } - } - return tomlArray -} diff --git a/third_party/github.com/BurntSushi/toml/type_fields.go b/third_party/github.com/BurntSushi/toml/type_fields.go deleted file mode 100644 index 138fc0037..000000000 --- a/third_party/github.com/BurntSushi/toml/type_fields.go +++ /dev/null @@ -1,241 +0,0 @@ -package toml - -// Struct field handling is adapted from code in encoding/json: -// -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -import ( - "reflect" - "sort" - "sync" -) - -// A field represents a single field found in a struct. -type field struct { - name string // the name of the field (`toml` tag included) - tag bool // whether field has a `toml` tag - index []int // represents the depth of an anonymous field - typ reflect.Type // the type of the field -} - -// byName sorts field by name, breaking ties with depth, -// then breaking ties with "name came from toml tag", then -// breaking ties with index sequence. -type byName []field - -func (x byName) Len() int { return len(x) } - -func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } - -func (x byName) Less(i, j int) bool { - if x[i].name != x[j].name { - return x[i].name < x[j].name - } - if len(x[i].index) != len(x[j].index) { - return len(x[i].index) < len(x[j].index) - } - if x[i].tag != x[j].tag { - return x[i].tag - } - return byIndex(x).Less(i, j) -} - -// byIndex sorts field by index sequence. -type byIndex []field - -func (x byIndex) Len() int { return len(x) } - -func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] } - -func (x byIndex) Less(i, j int) bool { - for k, xik := range x[i].index { - if k >= len(x[j].index) { - return false - } - if xik != x[j].index[k] { - return xik < x[j].index[k] - } - } - return len(x[i].index) < len(x[j].index) -} - -// typeFields returns a list of fields that TOML should recognize for the given -// type. The algorithm is breadth-first search over the set of structs to -// include - the top struct and then any reachable anonymous structs. -func typeFields(t reflect.Type) []field { - // Anonymous fields to explore at the current level and the next. - current := []field{} - next := []field{{typ: t}} - - // Count of queued names for current level and the next. - count := map[reflect.Type]int{} - nextCount := map[reflect.Type]int{} - - // Types already visited at an earlier level. - visited := map[reflect.Type]bool{} - - // Fields found. - var fields []field - - for len(next) > 0 { - current, next = next, current[:0] - count, nextCount = nextCount, map[reflect.Type]int{} - - for _, f := range current { - if visited[f.typ] { - continue - } - visited[f.typ] = true - - // Scan f.typ for fields to include. - for i := 0; i < f.typ.NumField(); i++ { - sf := f.typ.Field(i) - if sf.PkgPath != "" { // unexported - continue - } - name := sf.Tag.Get("toml") - if name == "-" { - continue - } - index := make([]int, len(f.index)+1) - copy(index, f.index) - index[len(f.index)] = i - - ft := sf.Type - if ft.Name() == "" && ft.Kind() == reflect.Ptr { - // Follow pointer. - ft = ft.Elem() - } - - // Record found field and index sequence. - if name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct { - tagged := name != "" - if name == "" { - name = sf.Name - } - fields = append(fields, field{name, tagged, index, ft}) - if count[f.typ] > 1 { - // If there were multiple instances, add a second, - // so that the annihilation code will see a duplicate. - // It only cares about the distinction between 1 or 2, - // so don't bother generating any more copies. - fields = append(fields, fields[len(fields)-1]) - } - continue - } - - // Record new anonymous struct to explore in next round. - nextCount[ft]++ - if nextCount[ft] == 1 { - f := field{name: ft.Name(), index: index, typ: ft} - next = append(next, f) - } - } - } - } - - sort.Sort(byName(fields)) - - // Delete all fields that are hidden by the Go rules for embedded fields, - // except that fields with TOML tags are promoted. - - // The fields are sorted in primary order of name, secondary order - // of field index length. Loop over names; for each name, delete - // hidden fields by choosing the one dominant field that survives. - out := fields[:0] - for advance, i := 0, 0; i < len(fields); i += advance { - // One iteration per name. - // Find the sequence of fields with the name of this first field. - fi := fields[i] - name := fi.name - for advance = 1; i+advance < len(fields); advance++ { - fj := fields[i+advance] - if fj.name != name { - break - } - } - if advance == 1 { // Only one field with this name - out = append(out, fi) - continue - } - dominant, ok := dominantField(fields[i : i+advance]) - if ok { - out = append(out, dominant) - } - } - - fields = out - sort.Sort(byIndex(fields)) - - return fields -} - -// dominantField looks through the fields, all of which are known to -// have the same name, to find the single field that dominates the -// others using Go's embedding rules, modified by the presence of -// TOML tags. If there are multiple top-level fields, the boolean -// will be false: This condition is an error in Go and we skip all -// the fields. -func dominantField(fields []field) (field, bool) { - // The fields are sorted in increasing index-length order. The winner - // must therefore be one with the shortest index length. Drop all - // longer entries, which is easy: just truncate the slice. - length := len(fields[0].index) - tagged := -1 // Index of first tagged field. - for i, f := range fields { - if len(f.index) > length { - fields = fields[:i] - break - } - if f.tag { - if tagged >= 0 { - // Multiple tagged fields at the same level: conflict. - // Return no field. - return field{}, false - } - tagged = i - } - } - if tagged >= 0 { - return fields[tagged], true - } - // All remaining fields have the same length. If there's more than one, - // we have a conflict (two fields named "X" at the same level) and we - // return no field. - if len(fields) > 1 { - return field{}, false - } - return fields[0], true -} - -var fieldCache struct { - sync.RWMutex - m map[reflect.Type][]field -} - -// cachedTypeFields is like typeFields but uses a cache to avoid repeated work. -func cachedTypeFields(t reflect.Type) []field { - fieldCache.RLock() - f := fieldCache.m[t] - fieldCache.RUnlock() - if f != nil { - return f - } - - // Compute fields without lock. - // Might duplicate effort but won't hold other computations back. - f = typeFields(t) - if f == nil { - f = []field{} - } - - fieldCache.Lock() - if fieldCache.m == nil { - fieldCache.m = map[reflect.Type][]field{} - } - fieldCache.m[t] = f - fieldCache.Unlock() - return f -}