Support config file

This commit is contained in:
Iwasaki Yudai 2015-08-26 23:23:54 -07:00
parent 6e39085a53
commit 4b67e3059d
111 changed files with 8844 additions and 164 deletions

16
Godeps/Godeps.json generated
View File

@ -16,10 +16,26 @@
"ImportPath": "github.com/elazarl/go-bindata-assetfs", "ImportPath": "github.com/elazarl/go-bindata-assetfs",
"Rev": "d5cac425555ca5cf00694df246e04f05e6a55150" "Rev": "d5cac425555ca5cf00694df246e04f05e6a55150"
}, },
{
"ImportPath": "github.com/fatih/camelcase",
"Rev": "332844f2fb0193cce955f4687646abbdcc43ceeb"
},
{
"ImportPath": "github.com/fatih/structs",
"Rev": "a9f7daa9c2729e97450c2da2feda19130a367d8f"
},
{ {
"ImportPath": "github.com/gorilla/websocket", "ImportPath": "github.com/gorilla/websocket",
"Rev": "b6ab76f1fe9803ee1d59e7e5b2a797c1fe897ce5" "Rev": "b6ab76f1fe9803ee1d59e7e5b2a797c1fe897ce5"
}, },
{
"ImportPath": "github.com/hashicorp/go-multierror",
"Rev": "56912fb08d85084aa318edcf2bba735b97cf35c5"
},
{
"ImportPath": "github.com/hashicorp/hcl",
"Rev": "54864211433d45cb780682431585b3e573b49e4a"
},
{ {
"ImportPath": "github.com/kr/pty", "ImportPath": "github.com/kr/pty",
"Comment": "release.r56-28-g5cf931e", "Comment": "release.r56-28-g5cf931e",

View File

@ -0,0 +1,3 @@
language: go
go: 1.4

View File

@ -0,0 +1,20 @@
The MIT License (MIT)
Copyright (c) 2015 Fatih Arslan
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,40 @@
# CamelCase [![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](http://godoc.org/github.com/fatih/camelcase) [![Build Status](http://img.shields.io/travis/fatih/camelcase.svg?style=flat-square)](https://travis-ci.org/fatih/camelcase)
CamelCase is a Golang (Go) package to split the words of a camelcase type
string into a slice of words. It can be used to convert a camelcase word (lower
or upper case) into any type of word.
## Install
```bash
go get github.com/fatih/camelcase
```
## Usage and examples
```go
splitted := camelcase.Split("GolangPackage")
fmt.Println(splitted[0], splitted[1]) // prints: "Golang", "Package"
```
Both lower camel case and upper camel case are supported. For more info please
check: [http://en.wikipedia.org/wiki/CamelCase](http://en.wikipedia.org/wiki/CamelCase)
Below are some example cases:
```
lowercase => ["lowercase"]
Class => ["Class"]
MyClass => ["My", "Class"]
MyC => ["My", "C"]
HTML => ["HTML"]
PDFLoader => ["PDF", "Loader"]
AString => ["A", "String"]
SimpleXMLParser => ["Simple", "XML", "Parser"]
vimRPCPlugin => ["vim", "RPC", "Plugin"]
GL11Version => ["GL", "11", "Version"]
99Bottles => ["99", "Bottles"]
May5 => ["May", "5"]
BFG9000 => ["BFG", "9000"]
```

View File

@ -0,0 +1,91 @@
// Package camelcase is a micro package to split the words of a camelcase type
// string into a slice of words.
package camelcase
import "unicode"
// Split splits the camelcase word and returns a list of words. It also
// supports digits. Both lower camel case and upper camel case are supported.
// For more info please check: http://en.wikipedia.org/wiki/CamelCase
//
// Below are some example cases:
// lowercase => ["lowercase"]
// Class => ["Class"]
// MyClass => ["My", "Class"]
// MyC => ["My", "C"]
// HTML => ["HTML"]
// PDFLoader => ["PDF", "Loader"]
// AString => ["A", "String"]
// SimpleXMLParser => ["Simple", "XML", "Parser"]
// vimRPCPlugin => ["vim", "RPC", "Plugin"]
// GL11Version => ["GL", "11", "Version"]
// 99Bottles => ["99", "Bottles"]
// May5 => ["May", "5"]
// BFG9000 => ["BFG", "9000"]
func Split(src string) []string {
if src == "" {
return []string{}
}
splitIndex := []int{}
for i, r := range src {
// we don't care about first index
if i == 0 {
continue
}
// search till we find an upper case
if unicode.IsLower(r) {
continue
}
prevRune := rune(src[i-1])
// for cases like: GL11Version, BFG9000
if unicode.IsDigit(r) && !unicode.IsDigit(prevRune) {
splitIndex = append(splitIndex, i)
continue
}
if !unicode.IsDigit(r) && !unicode.IsUpper(prevRune) {
// for cases like: MyC
if i+1 == len(src) {
splitIndex = append(splitIndex, i)
continue
}
// for cases like: SimpleXMLParser, eclipseRCPExt
if unicode.IsUpper(rune(src[i+1])) {
splitIndex = append(splitIndex, i)
continue
}
}
// If the next char is lower case, we have found a split index
if i+1 != len(src) && unicode.IsLower(rune(src[i+1])) {
splitIndex = append(splitIndex, i)
}
}
// nothing to split, such as "hello", "Class", "HTML"
if len(splitIndex) == 0 {
return []string{src}
}
// now split the input string into pieces
splitted := make([]string, len(splitIndex)+1)
for i := 0; i < len(splitIndex)+1; i++ {
if i == 0 {
// first index
splitted[i] = src[:splitIndex[0]]
} else if i == len(splitIndex) {
// last index
splitted[i] = src[splitIndex[i-1]:]
} else {
// between first and last index
splitted[i] = src[splitIndex[i-1]:splitIndex[i]]
}
}
return splitted
}

View File

@ -0,0 +1,35 @@
package camelcase
import (
"reflect"
"testing"
)
func TestSplit(t *testing.T) {
var testCases = []struct {
input string
output []string
}{
{input: "", output: []string{}},
{input: "lowercase", output: []string{"lowercase"}},
{input: "Class", output: []string{"Class"}},
{input: "MyClass", output: []string{"My", "Class"}},
{input: "MyC", output: []string{"My", "C"}},
{input: "HTML", output: []string{"HTML"}},
{input: "PDFLoader", output: []string{"PDF", "Loader"}},
{input: "AString", output: []string{"A", "String"}},
{input: "SimpleXMLParser", output: []string{"Simple", "XML", "Parser"}},
{input: "vimRPCPlugin", output: []string{"vim", "RPC", "Plugin"}},
{input: "GL11Version", output: []string{"GL", "11", "Version"}},
{input: "99Bottles", output: []string{"99", "Bottles"}},
{input: "May5", output: []string{"May", "5"}},
{input: "BFG9000", output: []string{"BFG", "9000"}},
}
for _, c := range testCases {
res := Split(c.input)
if !reflect.DeepEqual(res, c.output) {
t.Errorf("input: '%s'\n\twant: %v\n\tgot : %v\n", c.input, c.output, res)
}
}
}

View File

@ -0,0 +1,23 @@
# Compiled Object files, Static and Dynamic libs (Shared Objects)
*.o
*.a
*.so
# Folders
_obj
_test
# Architecture specific extensions/prefixes
*.[568vq]
[568vq].out
*.cgo1.go
*.cgo2.c
_cgo_defun.c
_cgo_gotypes.go
_cgo_export.*
_testmain.go
*.exe
*.test

View File

@ -0,0 +1,11 @@
language: go
go: 1.3
before_install:
- go get github.com/axw/gocov/gocov
- go get github.com/mattn/goveralls
- go get code.google.com/p/go.tools/cmd/cover
script:
- $HOME/gopath/bin/goveralls -repotoken $COVERALLS_TOKEN
env:
global:
- secure: hkc+92KPmMFqIH9n4yWdnH1JpZjahmOyDJwpTh8Yl0JieJNG0XEXpOqNao27eA0cLF+UHdyjFeGcPUJKNmgE46AoQjtovt+ICjCXKR2yF6S2kKJcUOz/Vd6boZF7qHV06jjxyxOebpID5iSoW6UfFr001bFxpd3jaSLFTzSHWRQ=

21
Godeps/_workspace/src/github.com/fatih/structs/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2014 Fatih Arslan
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,164 @@
# Structs [![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](http://godoc.org/github.com/fatih/structs) [![Build Status](http://img.shields.io/travis/fatih/structs.svg?style=flat-square)](https://travis-ci.org/fatih/structs) [![Coverage Status](http://img.shields.io/coveralls/fatih/structs.svg?style=flat-square)](https://coveralls.io/r/fatih/structs)
Structs contains various utilities to work with Go (Golang) structs. It was
initially used by me to convert a struct into a `map[string]interface{}`. With
time I've added other utilities for structs. It's basically a high level
package based on primitives from the reflect package. Feel free to add new
functions or improve the existing code.
## Install
```bash
go get github.com/fatih/structs
```
## Usage and Examples
Just like the standard lib `strings`, `bytes` and co packages, `structs` has
many global functions to manipulate or organize your struct data. Lets define
and declare a struct:
```go
type Server struct {
Name string `json:"name,omitempty"`
ID int
Enabled bool
users []string // not exported
http.Server // embedded
}
server := &Server{
Name: "gopher",
ID: 123456,
Enabled: true,
}
```
```go
// Convert a struct to a map[string]interface{}
// => {"Name":"gopher", "ID":123456, "Enabled":true}
m := structs.Map(server)
// Convert the values of a struct to a []interface{}
// => ["gopher", 123456, true]
v := structs.Values(server)
// Convert the names of a struct to a []string
// (see "Names methods" for more info about fields)
n := structs.Names(server)
// Convert the values of a struct to a []*Field
// (see "Field methods" for more info about fields)
f := structs.Fields(server)
// Return the struct name => "Server"
n := structs.Name(server)
// Check if any field of a struct is initialized or not.
h := structs.HasZero(server)
// Check if all fields of a struct is initialized or not.
z := structs.IsZero(server)
// Check if server is a struct or a pointer to struct
i := structs.IsStruct(server)
```
### Struct methods
The structs functions can be also used as independent methods by creating a new
`*structs.Struct`. This is handy if you want to have more control over the
structs (such as retrieving a single Field).
```go
// Create a new struct type:
s := structs.New(server)
m := s.Map() // Get a map[string]interface{}
v := s.Values() // Get a []interface{}
f := s.Fields() // Get a []*Field
n := s.Names() // Get a []string
f := s.Field(name) // Get a *Field based on the given field name
f, ok := s.FieldOk(name) // Get a *Field based on the given field name
n := s.Name() // Get the struct name
h := s.HasZero() // Check if any field is initialized
z := s.IsZero() // Check if all fields are initialized
```
### Field methods
We can easily examine a single Field for more detail. Below you can see how we
get and interact with various field methods:
```go
s := structs.New(server)
// Get the Field struct for the "Name" field
name := s.Field("Name")
// Get the underlying value, value => "gopher"
value := name.Value().(string)
// Set the field's value
name.Set("another gopher")
// Get the field's kind, kind => "string"
name.Kind()
// Check if the field is exported or not
if name.IsExported() {
fmt.Println("Name field is exported")
}
// Check if the value is a zero value, such as "" for string, 0 for int
if !name.IsZero() {
fmt.Println("Name is initialized")
}
// Check if the field is an anonymous (embedded) field
if !name.IsEmbedded() {
fmt.Println("Name is not an embedded field")
}
// Get the Field's tag value for tag name "json", tag value => "name,omitempty"
tagValue := name.Tag("json")
```
Nested structs are supported too:
```go
addrField := s.Field("Server").Field("Addr")
// Get the value for addr
a := addrField.Value().(string)
// Or get all fields
httpServer := s.Field("Server").Fields()
```
We can also get a slice of Fields from the Struct type to iterate over all
fields. This is handy if you wish to examine all fields:
```go
// Convert the fields of a struct to a []*Field
fields := s.Fields()
for _, f := range fields {
fmt.Printf("field name: %+v\n", f.Name())
if f.IsExported() {
fmt.Printf("value : %+v\n", f.Value())
fmt.Printf("is zero : %+v\n", f.IsZero())
}
}
```
## Credits
* [Fatih Arslan](https://github.com/fatih)
* [Cihangir Savas](https://github.com/cihangir)
## License
The MIT License (MIT) - see LICENSE.md for more details

126
Godeps/_workspace/src/github.com/fatih/structs/field.go generated vendored Normal file
View File

@ -0,0 +1,126 @@
package structs
import (
"errors"
"fmt"
"reflect"
)
var (
errNotExported = errors.New("field is not exported")
errNotSettable = errors.New("field is not settable")
)
// Field represents a single struct field that encapsulates high level
// functions around the field.
type Field struct {
value reflect.Value
field reflect.StructField
defaultTag string
}
// Tag returns the value associated with key in the tag string. If there is no
// such key in the tag, Tag returns the empty string.
func (f *Field) Tag(key string) string {
return f.field.Tag.Get(key)
}
// Value returns the underlying value of of the field. It panics if the field
// is not exported.
func (f *Field) Value() interface{} {
return f.value.Interface()
}
// IsEmbedded returns true if the given field is an anonymous field (embedded)
func (f *Field) IsEmbedded() bool {
return f.field.Anonymous
}
// IsExported returns true if the given field is exported.
func (f *Field) IsExported() bool {
return f.field.PkgPath == ""
}
// IsZero returns true if the given field is not initalized (has a zero value).
// It panics if the field is not exported.
func (f *Field) IsZero() bool {
zero := reflect.Zero(f.value.Type()).Interface()
current := f.Value()
return reflect.DeepEqual(current, zero)
}
// Name returns the name of the given field
func (f *Field) Name() string {
return f.field.Name
}
// Kind returns the fields kind, such as "string", "map", "bool", etc ..
func (f *Field) Kind() reflect.Kind {
return f.value.Kind()
}
// Set sets the field to given value v. It retuns an error if the field is not
// settable (not addresable or not exported) or if the given value's type
// doesn't match the fields type.
func (f *Field) Set(val interface{}) error {
// we can't set unexported fields, so be sure this field is exported
if !f.IsExported() {
return errNotExported
}
// do we get here? not sure...
if !f.value.CanSet() {
return errNotSettable
}
given := reflect.ValueOf(val)
if f.value.Kind() != given.Kind() {
return fmt.Errorf("wrong kind. got: %s want: %s", given.Kind(), f.value.Kind())
}
f.value.Set(given)
return nil
}
// Fields returns a slice of Fields. This is particular handy to get the fields
// of a nested struct . A struct tag with the content of "-" ignores the
// checking of that particular field. Example:
//
// // Field is ignored by this package.
// Field *http.Request `structs:"-"`
//
// It panics if field is not exported or if field's kind is not struct
func (f *Field) Fields() []*Field {
return getFields(f.value, f.defaultTag)
}
// Field returns the field from a nested struct. It panics if the nested struct
// is not exported or if the field was not found.
func (f *Field) Field(name string) *Field {
field, ok := f.FieldOk(name)
if !ok {
panic("field not found")
}
return field
}
// Field returns the field from a nested struct. The boolean returns true if
// the field was found. It panics if the nested struct is not exported or if
// the field was not found.
func (f *Field) FieldOk(name string) (*Field, bool) {
v := strctVal(f.value.Interface())
t := v.Type()
field, ok := t.FieldByName(name)
if !ok {
return nil, false
}
return &Field{
field: field,
value: v.FieldByName(name),
}, true
}

View File

@ -0,0 +1,324 @@
package structs
import (
"reflect"
"testing"
)
// A test struct that defines all cases
type Foo struct {
A string
B int `structs:"y"`
C bool `json:"c"`
d string // not exported
E *Baz
x string `xml:"x"` // not exported, with tag
Y []string
Z map[string]interface{}
*Bar // embedded
}
type Baz struct {
A string
B int
}
type Bar struct {
E string
F int
g []string
}
func newStruct() *Struct {
b := &Bar{
E: "example",
F: 2,
g: []string{"zeynep", "fatih"},
}
// B and x is not initialized for testing
f := &Foo{
A: "gopher",
C: true,
d: "small",
E: nil,
Y: []string{"example"},
Z: nil,
}
f.Bar = b
return New(f)
}
func TestField_Set(t *testing.T) {
s := newStruct()
f := s.Field("A")
err := f.Set("fatih")
if err != nil {
t.Error(err)
}
if f.Value().(string) != "fatih" {
t.Errorf("Setted value is wrong: %s want: %s", f.Value().(string), "fatih")
}
f = s.Field("Y")
err = f.Set([]string{"override", "with", "this"})
if err != nil {
t.Error(err)
}
sliceLen := len(f.Value().([]string))
if sliceLen != 3 {
t.Errorf("Setted values slice length is wrong: %d, want: %d", sliceLen, 3)
}
f = s.Field("C")
err = f.Set(false)
if err != nil {
t.Error(err)
}
if f.Value().(bool) {
t.Errorf("Setted value is wrong: %s want: %s", f.Value().(bool), false)
}
// let's pass a different type
f = s.Field("A")
err = f.Set(123) // Field A is of type string, but we are going to pass an integer
if err == nil {
t.Error("Setting a field's value with a different type than the field's type should return an error")
}
// old value should be still there :)
if f.Value().(string) != "fatih" {
t.Errorf("Setted value is wrong: %s want: %s", f.Value().(string), "fatih")
}
// let's access an unexported field, which should give an error
f = s.Field("d")
err = f.Set("large")
if err != errNotExported {
t.Error(err)
}
// let's set a pointer to struct
b := &Bar{
E: "gopher",
F: 2,
}
f = s.Field("Bar")
err = f.Set(b)
if err != nil {
t.Error(err)
}
baz := &Baz{
A: "helloWorld",
B: 42,
}
f = s.Field("E")
err = f.Set(baz)
if err != nil {
t.Error(err)
}
ba := s.Field("E").Value().(*Baz)
if ba.A != "helloWorld" {
t.Errorf("could not set baz. Got: %s Want: helloWorld", ba.A)
}
}
func TestField(t *testing.T) {
s := newStruct()
defer func() {
err := recover()
if err == nil {
t.Error("Retrieveing a non existing field from the struct should panic")
}
}()
_ = s.Field("no-field")
}
func TestField_Kind(t *testing.T) {
s := newStruct()
f := s.Field("A")
if f.Kind() != reflect.String {
t.Errorf("Field A has wrong kind: %s want: %s", f.Kind(), reflect.String)
}
f = s.Field("B")
if f.Kind() != reflect.Int {
t.Errorf("Field B has wrong kind: %s want: %s", f.Kind(), reflect.Int)
}
// unexported
f = s.Field("d")
if f.Kind() != reflect.String {
t.Errorf("Field d has wrong kind: %s want: %s", f.Kind(), reflect.String)
}
}
func TestField_Tag(t *testing.T) {
s := newStruct()
v := s.Field("B").Tag("json")
if v != "" {
t.Errorf("Field's tag value of a non existing tag should return empty, got: %s", v)
}
v = s.Field("C").Tag("json")
if v != "c" {
t.Errorf("Field's tag value of the existing field C should return 'c', got: %s", v)
}
v = s.Field("d").Tag("json")
if v != "" {
t.Errorf("Field's tag value of a non exported field should return empty, got: %s", v)
}
v = s.Field("x").Tag("xml")
if v != "x" {
t.Errorf("Field's tag value of a non exported field with a tag should return 'x', got: %s", v)
}
v = s.Field("A").Tag("json")
if v != "" {
t.Errorf("Field's tag value of a existing field without a tag should return empty, got: %s", v)
}
}
func TestField_Value(t *testing.T) {
s := newStruct()
v := s.Field("A").Value()
val, ok := v.(string)
if !ok {
t.Errorf("Field's value of a A should be string")
}
if val != "gopher" {
t.Errorf("Field's value of a existing tag should return 'gopher', got: %s", val)
}
defer func() {
err := recover()
if err == nil {
t.Error("Value of a non exported field from the field should panic")
}
}()
// should panic
_ = s.Field("d").Value()
}
func TestField_IsEmbedded(t *testing.T) {
s := newStruct()
if !s.Field("Bar").IsEmbedded() {
t.Errorf("Fields 'Bar' field is an embedded field")
}
if s.Field("d").IsEmbedded() {
t.Errorf("Fields 'd' field is not an embedded field")
}
}
func TestField_IsExported(t *testing.T) {
s := newStruct()
if !s.Field("Bar").IsExported() {
t.Errorf("Fields 'Bar' field is an exported field")
}
if !s.Field("A").IsExported() {
t.Errorf("Fields 'A' field is an exported field")
}
if s.Field("d").IsExported() {
t.Errorf("Fields 'd' field is not an exported field")
}
}
func TestField_IsZero(t *testing.T) {
s := newStruct()
if s.Field("A").IsZero() {
t.Errorf("Fields 'A' field is an initialized field")
}
if !s.Field("B").IsZero() {
t.Errorf("Fields 'B' field is not an initialized field")
}
}
func TestField_Name(t *testing.T) {
s := newStruct()
if s.Field("A").Name() != "A" {
t.Errorf("Fields 'A' field should have the name 'A'")
}
}
func TestField_Field(t *testing.T) {
s := newStruct()
e := s.Field("Bar").Field("E")
val, ok := e.Value().(string)
if !ok {
t.Error("The value of the field 'e' inside 'Bar' struct should be string")
}
if val != "example" {
t.Errorf("The value of 'e' should be 'example, got: %s", val)
}
defer func() {
err := recover()
if err == nil {
t.Error("Field of a non existing nested struct should panic")
}
}()
_ = s.Field("Bar").Field("e")
}
func TestField_Fields(t *testing.T) {
s := newStruct()
fields := s.Field("Bar").Fields()
if len(fields) != 3 {
t.Errorf("We expect 3 fields in embedded struct, was: %d", len(fields))
}
}
func TestField_FieldOk(t *testing.T) {
s := newStruct()
b, ok := s.FieldOk("Bar")
if !ok {
t.Error("The field 'Bar' should exists.")
}
e, ok := b.FieldOk("E")
if !ok {
t.Error("The field 'E' should exists.")
}
val, ok := e.Value().(string)
if !ok {
t.Error("The value of the field 'e' inside 'Bar' struct should be string")
}
if val != "example" {
t.Errorf("The value of 'e' should be 'example, got: %s", val)
}
}

View File

@ -0,0 +1,449 @@
// Package structs contains various utilities functions to work with structs.
package structs
import "reflect"
var (
// DefaultTagName is the default tag name for struct fields which provides
// a more granular to tweak certain structs. Lookup the necessary functions
// for more info.
DefaultTagName = "structs" // struct's field default tag name
)
// Struct encapsulates a struct type to provide several high level functions
// around the struct.
type Struct struct {
raw interface{}
value reflect.Value
TagName string
}
// New returns a new *Struct with the struct s. It panics if the s's kind is
// not struct.
func New(s interface{}) *Struct {
return &Struct{
raw: s,
value: strctVal(s),
TagName: DefaultTagName,
}
}
// Map converts the given struct to a map[string]interface{}, where the keys
// of the map are the field names and the values of the map the associated
// values of the fields. The default key string is the struct field name but
// can be changed in the struct field's tag value. The "structs" key in the
// struct's field tag value is the key name. Example:
//
// // Field appears in map as key "myName".
// Name string `structs:"myName"`
//
// A tag value with the content of "-" ignores that particular field. Example:
//
// // Field is ignored by this package.
// Field bool `structs:"-"`
//
// A tag value with the option of "omitnested" stops iterating further if the type
// is a struct. Example:
//
// // Field is not processed further by this package.
// Field time.Time `structs:"myName,omitnested"`
// Field *http.Request `structs:",omitnested"`
//
// A tag value with the option of "omitempty" ignores that particular field if
// the field value is empty. Example:
//
// // Field appears in map as key "myName", but the field is
// // skipped if empty.
// Field string `structs:"myName,omitempty"`
//
// // Field appears in map as key "Field" (the default), but
// // the field is skipped if empty.
// Field string `structs:",omitempty"`
//
// Note that only exported fields of a struct can be accessed, non exported
// fields will be neglected.
func (s *Struct) Map() map[string]interface{} {
out := make(map[string]interface{})
fields := s.structFields()
for _, field := range fields {
name := field.Name
val := s.value.FieldByName(name)
var finalVal interface{}
tagName, tagOpts := parseTag(field.Tag.Get(s.TagName))
if tagName != "" {
name = tagName
}
// if the value is a zero value and the field is marked as omitempty do
// not include
if tagOpts.Has("omitempty") {
zero := reflect.Zero(val.Type()).Interface()
current := val.Interface()
if reflect.DeepEqual(current, zero) {
continue
}
}
if IsStruct(val.Interface()) && !tagOpts.Has("omitnested") {
// look out for embedded structs, and convert them to a
// map[string]interface{} too
n := New(val.Interface())
n.TagName = s.TagName
finalVal = n.Map()
} else {
finalVal = val.Interface()
}
out[name] = finalVal
}
return out
}
// Values converts the given s struct's field values to a []interface{}. A
// struct tag with the content of "-" ignores the that particular field.
// Example:
//
// // Field is ignored by this package.
// Field int `structs:"-"`
//
// A value with the option of "omitnested" stops iterating further if the type
// is a struct. Example:
//
// // Fields is not processed further by this package.
// Field time.Time `structs:",omitnested"`
// Field *http.Request `structs:",omitnested"`
//
// A tag value with the option of "omitempty" ignores that particular field and
// is not added to the values if the field value is empty. Example:
//
// // Field is skipped if empty
// Field string `structs:",omitempty"`
//
// Note that only exported fields of a struct can be accessed, non exported
// fields will be neglected.
func (s *Struct) Values() []interface{} {
fields := s.structFields()
var t []interface{}
for _, field := range fields {
val := s.value.FieldByName(field.Name)
_, tagOpts := parseTag(field.Tag.Get(s.TagName))
// if the value is a zero value and the field is marked as omitempty do
// not include
if tagOpts.Has("omitempty") {
zero := reflect.Zero(val.Type()).Interface()
current := val.Interface()
if reflect.DeepEqual(current, zero) {
continue
}
}
if IsStruct(val.Interface()) && !tagOpts.Has("omitnested") {
// look out for embedded structs, and convert them to a
// []interface{} to be added to the final values slice
for _, embeddedVal := range Values(val.Interface()) {
t = append(t, embeddedVal)
}
} else {
t = append(t, val.Interface())
}
}
return t
}
// Fields returns a slice of Fields. A struct tag with the content of "-"
// ignores the checking of that particular field. Example:
//
// // Field is ignored by this package.
// Field bool `structs:"-"`
//
// It panics if s's kind is not struct.
func (s *Struct) Fields() []*Field {
return getFields(s.value, s.TagName)
}
// Names returns a slice of field names. A struct tag with the content of "-"
// ignores the checking of that particular field. Example:
//
// // Field is ignored by this package.
// Field bool `structs:"-"`
//
// It panics if s's kind is not struct.
func (s *Struct) Names() []string {
fields := getFields(s.value, s.TagName)
names := make([]string, len(fields))
for i, field := range fields {
names[i] = field.Name()
}
return names
}
func getFields(v reflect.Value, tagName string) []*Field {
if v.Kind() == reflect.Ptr {
v = v.Elem()
}
t := v.Type()
var fields []*Field
for i := 0; i < t.NumField(); i++ {
field := t.Field(i)
if tag := field.Tag.Get(tagName); tag == "-" {
continue
}
f := &Field{
field: field,
value: v.FieldByName(field.Name),
}
fields = append(fields, f)
}
return fields
}
// Field returns a new Field struct that provides several high level functions
// around a single struct field entity. It panics if the field is not found.
func (s *Struct) Field(name string) *Field {
f, ok := s.FieldOk(name)
if !ok {
panic("field not found")
}
return f
}
// Field returns a new Field struct that provides several high level functions
// around a single struct field entity. The boolean returns true if the field
// was found.
func (s *Struct) FieldOk(name string) (*Field, bool) {
t := s.value.Type()
field, ok := t.FieldByName(name)
if !ok {
return nil, false
}
return &Field{
field: field,
value: s.value.FieldByName(name),
defaultTag: s.TagName,
}, true
}
// IsZero returns true if all fields in a struct is a zero value (not
// initialized) A struct tag with the content of "-" ignores the checking of
// that particular field. Example:
//
// // Field is ignored by this package.
// Field bool `structs:"-"`
//
// A value with the option of "omitnested" stops iterating further if the type
// is a struct. Example:
//
// // Field is not processed further by this package.
// Field time.Time `structs:"myName,omitnested"`
// Field *http.Request `structs:",omitnested"`
//
// Note that only exported fields of a struct can be accessed, non exported
// fields will be neglected. It panics if s's kind is not struct.
func (s *Struct) IsZero() bool {
fields := s.structFields()
for _, field := range fields {
val := s.value.FieldByName(field.Name)
_, tagOpts := parseTag(field.Tag.Get(s.TagName))
if IsStruct(val.Interface()) && !tagOpts.Has("omitnested") {
ok := IsZero(val.Interface())
if !ok {
return false
}
continue
}
// zero value of the given field, such as "" for string, 0 for int
zero := reflect.Zero(val.Type()).Interface()
// current value of the given field
current := val.Interface()
if !reflect.DeepEqual(current, zero) {
return false
}
}
return true
}
// HasZero returns true if a field in a struct is not initialized (zero value).
// A struct tag with the content of "-" ignores the checking of that particular
// field. Example:
//
// // Field is ignored by this package.
// Field bool `structs:"-"`
//
// A value with the option of "omitnested" stops iterating further if the type
// is a struct. Example:
//
// // Field is not processed further by this package.
// Field time.Time `structs:"myName,omitnested"`
// Field *http.Request `structs:",omitnested"`
//
// Note that only exported fields of a struct can be accessed, non exported
// fields will be neglected. It panics if s's kind is not struct.
func (s *Struct) HasZero() bool {
fields := s.structFields()
for _, field := range fields {
val := s.value.FieldByName(field.Name)
_, tagOpts := parseTag(field.Tag.Get(s.TagName))
if IsStruct(val.Interface()) && !tagOpts.Has("omitnested") {
ok := HasZero(val.Interface())
if ok {
return true
}
continue
}
// zero value of the given field, such as "" for string, 0 for int
zero := reflect.Zero(val.Type()).Interface()
// current value of the given field
current := val.Interface()
if reflect.DeepEqual(current, zero) {
return true
}
}
return false
}
// Name returns the structs's type name within its package. For more info refer
// to Name() function.
func (s *Struct) Name() string {
return s.value.Type().Name()
}
// structFields returns the exported struct fields for a given s struct. This
// is a convenient helper method to avoid duplicate code in some of the
// functions.
func (s *Struct) structFields() []reflect.StructField {
t := s.value.Type()
var f []reflect.StructField
for i := 0; i < t.NumField(); i++ {
field := t.Field(i)
// we can't access the value of unexported fields
if field.PkgPath != "" {
continue
}
// don't check if it's omitted
if tag := field.Tag.Get(s.TagName); tag == "-" {
continue
}
f = append(f, field)
}
return f
}
func strctVal(s interface{}) reflect.Value {
v := reflect.ValueOf(s)
// if pointer get the underlying element≤
if v.Kind() == reflect.Ptr {
v = v.Elem()
}
if v.Kind() != reflect.Struct {
panic("not struct")
}
return v
}
// Map converts the given struct to a map[string]interface{}. For more info
// refer to Struct types Map() method. It panics if s's kind is not struct.
func Map(s interface{}) map[string]interface{} {
return New(s).Map()
}
// Values converts the given struct to a []interface{}. For more info refer to
// Struct types Values() method. It panics if s's kind is not struct.
func Values(s interface{}) []interface{} {
return New(s).Values()
}
// Fields returns a slice of *Field. For more info refer to Struct types
// Fields() method. It panics if s's kind is not struct.
func Fields(s interface{}) []*Field {
return New(s).Fields()
}
// Names returns a slice of field names. For more info refer to Struct types
// Names() method. It panics if s's kind is not struct.
func Names(s interface{}) []string {
return New(s).Names()
}
// IsZero returns true if all fields is equal to a zero value. For more info
// refer to Struct types IsZero() method. It panics if s's kind is not struct.
func IsZero(s interface{}) bool {
return New(s).IsZero()
}
// HasZero returns true if any field is equal to a zero value. For more info
// refer to Struct types HasZero() method. It panics if s's kind is not struct.
func HasZero(s interface{}) bool {
return New(s).HasZero()
}
// IsStruct returns true if the given variable is a struct or a pointer to
// struct.
func IsStruct(s interface{}) bool {
v := reflect.ValueOf(s)
if v.Kind() == reflect.Ptr {
v = v.Elem()
}
// uninitialized zero value of a struct
if v.Kind() == reflect.Invalid {
return false
}
return v.Kind() == reflect.Struct
}
// Name returns the structs's type name within its package. It returns an
// empty string for unnamed types. It panics if s's kind is not struct.
func Name(s interface{}) string {
return New(s).Name()
}

View File

@ -0,0 +1,351 @@
package structs
import (
"fmt"
"time"
)
func ExampleNew() {
type Server struct {
Name string
ID int32
Enabled bool
}
server := &Server{
Name: "Arslan",
ID: 123456,
Enabled: true,
}
s := New(server)
fmt.Printf("Name : %v\n", s.Name())
fmt.Printf("Values : %v\n", s.Values())
fmt.Printf("Value of ID : %v\n", s.Field("ID").Value())
// Output:
// Name : Server
// Values : [Arslan 123456 true]
// Value of ID : 123456
}
func ExampleMap() {
type Server struct {
Name string
ID int32
Enabled bool
}
s := &Server{
Name: "Arslan",
ID: 123456,
Enabled: true,
}
m := Map(s)
fmt.Printf("%#v\n", m["Name"])
fmt.Printf("%#v\n", m["ID"])
fmt.Printf("%#v\n", m["Enabled"])
// Output:
// "Arslan"
// 123456
// true
}
func ExampleMap_tags() {
// Custom tags can change the map keys instead of using the fields name
type Server struct {
Name string `structs:"server_name"`
ID int32 `structs:"server_id"`
Enabled bool `structs:"enabled"`
}
s := &Server{
Name: "Zeynep",
ID: 789012,
}
m := Map(s)
// access them by the custom tags defined above
fmt.Printf("%#v\n", m["server_name"])
fmt.Printf("%#v\n", m["server_id"])
fmt.Printf("%#v\n", m["enabled"])
// Output:
// "Zeynep"
// 789012
// false
}
func ExampleMap_nested() {
// By default field with struct types are processed too. We can stop
// processing them via "omitnested" tag option.
type Server struct {
Name string `structs:"server_name"`
ID int32 `structs:"server_id"`
Time time.Time `structs:"time,omitnested"` // do not convert to map[string]interface{}
}
const shortForm = "2006-Jan-02"
t, _ := time.Parse("2006-Jan-02", "2013-Feb-03")
s := &Server{
Name: "Zeynep",
ID: 789012,
Time: t,
}
m := Map(s)
// access them by the custom tags defined above
fmt.Printf("%v\n", m["server_name"])
fmt.Printf("%v\n", m["server_id"])
fmt.Printf("%v\n", m["time"].(time.Time))
// Output:
// Zeynep
// 789012
// 2013-02-03 00:00:00 +0000 UTC
}
func ExampleMap_omitEmpty() {
// By default field with struct types of zero values are processed too. We
// can stop processing them via "omitempty" tag option.
type Server struct {
Name string `structs:",omitempty"`
ID int32 `structs:"server_id,omitempty"`
Location string
}
// Only add location
s := &Server{
Location: "Tokyo",
}
m := Map(s)
// map contains only the Location field
fmt.Printf("%v\n", m)
// Output:
// map[Location:Tokyo]
}
func ExampleValues() {
type Server struct {
Name string
ID int32
Enabled bool
}
s := &Server{
Name: "Fatih",
ID: 135790,
Enabled: false,
}
m := Values(s)
fmt.Printf("Values: %+v\n", m)
// Output:
// Values: [Fatih 135790 false]
}
func ExampleValues_omitEmpty() {
// By default field with struct types of zero values are processed too. We
// can stop processing them via "omitempty" tag option.
type Server struct {
Name string `structs:",omitempty"`
ID int32 `structs:"server_id,omitempty"`
Location string
}
// Only add location
s := &Server{
Location: "Ankara",
}
m := Values(s)
// values contains only the Location field
fmt.Printf("Values: %+v\n", m)
// Output:
// Values: [Ankara]
}
func ExampleValues_tags() {
type Location struct {
City string
Country string
}
type Server struct {
Name string
ID int32
Enabled bool
Location Location `structs:"-"` // values from location are not included anymore
}
s := &Server{
Name: "Fatih",
ID: 135790,
Enabled: false,
Location: Location{City: "Ankara", Country: "Turkey"},
}
// Let get all values from the struct s. Note that we don't include values
// from the Location field
m := Values(s)
fmt.Printf("Values: %+v\n", m)
// Output:
// Values: [Fatih 135790 false]
}
func ExampleFields() {
type Access struct {
Name string
LastAccessed time.Time
Number int
}
s := &Access{
Name: "Fatih",
LastAccessed: time.Now(),
Number: 1234567,
}
fields := Fields(s)
for i, field := range fields {
fmt.Printf("[%d] %+v\n", i, field.Name())
}
// Output:
// [0] Name
// [1] LastAccessed
// [2] Number
}
func ExampleFields_nested() {
type Person struct {
Name string
Number int
}
type Access struct {
Person Person
HasPermission bool
LastAccessed time.Time
}
s := &Access{
Person: Person{Name: "fatih", Number: 1234567},
LastAccessed: time.Now(),
HasPermission: true,
}
// Let's get all fields from the struct s.
fields := Fields(s)
for _, field := range fields {
if field.Name() == "Person" {
fmt.Printf("Access.Person.Name: %+v\n", field.Field("Name").Value())
}
}
// Output:
// Access.Person.Name: fatih
}
func ExampleField() {
type Person struct {
Name string
Number int
}
type Access struct {
Person Person
HasPermission bool
LastAccessed time.Time
}
access := &Access{
Person: Person{Name: "fatih", Number: 1234567},
LastAccessed: time.Now(),
HasPermission: true,
}
// Create a new Struct type
s := New(access)
// Get the Field type for "Person" field
p := s.Field("Person")
// Get the underlying "Name field" and print the value of it
name := p.Field("Name")
fmt.Printf("Value of Person.Access.Name: %+v\n", name.Value())
// Output:
// Value of Person.Access.Name: fatih
}
func ExampleIsZero() {
type Server struct {
Name string
ID int32
Enabled bool
}
// Nothing is initalized
a := &Server{}
isZeroA := IsZero(a)
// Name and Enabled is initialized, but not ID
b := &Server{
Name: "Golang",
Enabled: true,
}
isZeroB := IsZero(b)
fmt.Printf("%#v\n", isZeroA)
fmt.Printf("%#v\n", isZeroB)
// Output:
// true
// false
}
func ExampleHasZero() {
// Let's define an Access struct. Note that the "Enabled" field is not
// going to be checked because we added the "structs" tag to the field.
type Access struct {
Name string
LastAccessed time.Time
Number int
Enabled bool `structs:"-"`
}
// Name and Number is not initialized.
a := &Access{
LastAccessed: time.Now(),
}
hasZeroA := HasZero(a)
// Name and Number is initialized.
b := &Access{
Name: "Fatih",
LastAccessed: time.Now(),
Number: 12345,
}
hasZeroB := HasZero(b)
fmt.Printf("%#v\n", hasZeroA)
fmt.Printf("%#v\n", hasZeroB)
// Output:
// true
// false
}

View File

@ -0,0 +1,898 @@
package structs
import (
"fmt"
"reflect"
"testing"
"time"
)
func TestMapNonStruct(t *testing.T) {
foo := []string{"foo"}
defer func() {
err := recover()
if err == nil {
t.Error("Passing a non struct into Map should panic")
}
}()
// this should panic. We are going to recover and and test it
_ = Map(foo)
}
func TestStructIndexes(t *testing.T) {
type C struct {
something int
Props map[string]interface{}
}
defer func() {
err := recover()
if err != nil {
fmt.Printf("err %+v\n", err)
t.Error("Using mixed indexes should not panic")
}
}()
// They should not panic
_ = Map(&C{})
_ = Fields(&C{})
_ = Values(&C{})
_ = IsZero(&C{})
_ = HasZero(&C{})
}
func TestMap(t *testing.T) {
var T = struct {
A string
B int
C bool
}{
A: "a-value",
B: 2,
C: true,
}
a := Map(T)
if typ := reflect.TypeOf(a).Kind(); typ != reflect.Map {
t.Errorf("Map should return a map type, got: %v", typ)
}
// we have three fields
if len(a) != 3 {
t.Errorf("Map should return a map of len 3, got: %d", len(a))
}
inMap := func(val interface{}) bool {
for _, v := range a {
if reflect.DeepEqual(v, val) {
return true
}
}
return false
}
for _, val := range []interface{}{"a-value", 2, true} {
if !inMap(val) {
t.Errorf("Map should have the value %v", val)
}
}
}
func TestMap_Tag(t *testing.T) {
var T = struct {
A string `structs:"x"`
B int `structs:"y"`
C bool `structs:"z"`
}{
A: "a-value",
B: 2,
C: true,
}
a := Map(T)
inMap := func(key interface{}) bool {
for k := range a {
if reflect.DeepEqual(k, key) {
return true
}
}
return false
}
for _, key := range []string{"x", "y", "z"} {
if !inMap(key) {
t.Errorf("Map should have the key %v", key)
}
}
}
func TestMap_CustomTag(t *testing.T) {
var T = struct {
A string `json:"x"`
B int `json:"y"`
C bool `json:"z"`
D struct {
E string `json:"jkl"`
} `json:"nested"`
}{
A: "a-value",
B: 2,
C: true,
}
T.D.E = "e-value"
s := New(T)
s.TagName = "json"
a := s.Map()
inMap := func(key interface{}) bool {
for k := range a {
if reflect.DeepEqual(k, key) {
return true
}
}
return false
}
for _, key := range []string{"x", "y", "z"} {
if !inMap(key) {
t.Errorf("Map should have the key %v", key)
}
}
nested, ok := a["nested"].(map[string]interface{})
if !ok {
t.Fatalf("Map should contain the D field that is tagged as 'nested'")
}
e, ok := nested["jkl"].(string)
if !ok {
t.Fatalf("Map should contain the D.E field that is tagged as 'jkl'")
}
if e != "e-value" {
t.Errorf("D.E field should be equal to 'e-value', got: '%v'", e)
}
}
func TestMap_MultipleCustomTag(t *testing.T) {
var A = struct {
X string `aa:"ax"`
}{"a_value"}
aStruct := New(A)
aStruct.TagName = "aa"
var B = struct {
X string `bb:"bx"`
}{"b_value"}
bStruct := New(B)
bStruct.TagName = "bb"
a, b := aStruct.Map(), bStruct.Map()
if !reflect.DeepEqual(a, map[string]interface{}{"ax": "a_value"}) {
t.Error("Map should have field ax with value a_value")
}
if !reflect.DeepEqual(b, map[string]interface{}{"bx": "b_value"}) {
t.Error("Map should have field bx with value b_value")
}
}
func TestMap_OmitEmpty(t *testing.T) {
type A struct {
Name string
Value string `structs:",omitempty"`
Time time.Time `structs:",omitempty"`
}
a := A{}
m := Map(a)
_, ok := m["Value"].(map[string]interface{})
if ok {
t.Error("Map should not contain the Value field that is tagged as omitempty")
}
_, ok = m["Time"].(map[string]interface{})
if ok {
t.Error("Map should not contain the Time field that is tagged as omitempty")
}
}
func TestMap_OmitNested(t *testing.T) {
type A struct {
Name string
Value string
Time time.Time `structs:",omitnested"`
}
a := A{Time: time.Now()}
type B struct {
Desc string
A A
}
b := &B{A: a}
m := Map(b)
in, ok := m["A"].(map[string]interface{})
if !ok {
t.Error("Map nested structs is not available in the map")
}
// should not happen
if _, ok := in["Time"].(map[string]interface{}); ok {
t.Error("Map nested struct should omit recursiving parsing of Time")
}
if _, ok := in["Time"].(time.Time); !ok {
t.Error("Map nested struct should stop parsing of Time at is current value")
}
}
func TestMap_Nested(t *testing.T) {
type A struct {
Name string
}
a := &A{Name: "example"}
type B struct {
A *A
}
b := &B{A: a}
m := Map(b)
if typ := reflect.TypeOf(m).Kind(); typ != reflect.Map {
t.Errorf("Map should return a map type, got: %v", typ)
}
in, ok := m["A"].(map[string]interface{})
if !ok {
t.Error("Map nested structs is not available in the map")
}
if name := in["Name"].(string); name != "example" {
t.Errorf("Map nested struct's name field should give example, got: %s", name)
}
}
func TestMap_Anonymous(t *testing.T) {
type A struct {
Name string
}
a := &A{Name: "example"}
type B struct {
*A
}
b := &B{}
b.A = a
m := Map(b)
if typ := reflect.TypeOf(m).Kind(); typ != reflect.Map {
t.Errorf("Map should return a map type, got: %v", typ)
}
in, ok := m["A"].(map[string]interface{})
if !ok {
t.Error("Embedded structs is not available in the map")
}
if name := in["Name"].(string); name != "example" {
t.Errorf("Embedded A struct's Name field should give example, got: %s", name)
}
}
func TestStruct(t *testing.T) {
var T = struct{}{}
if !IsStruct(T) {
t.Errorf("T should be a struct, got: %T", T)
}
if !IsStruct(&T) {
t.Errorf("T should be a struct, got: %T", T)
}
}
func TestValues(t *testing.T) {
var T = struct {
A string
B int
C bool
}{
A: "a-value",
B: 2,
C: true,
}
s := Values(T)
if typ := reflect.TypeOf(s).Kind(); typ != reflect.Slice {
t.Errorf("Values should return a slice type, got: %v", typ)
}
inSlice := func(val interface{}) bool {
for _, v := range s {
if reflect.DeepEqual(v, val) {
return true
}
}
return false
}
for _, val := range []interface{}{"a-value", 2, true} {
if !inSlice(val) {
t.Errorf("Values should have the value %v", val)
}
}
}
func TestValues_OmitEmpty(t *testing.T) {
type A struct {
Name string
Value int `structs:",omitempty"`
}
a := A{Name: "example"}
s := Values(a)
if len(s) != 1 {
t.Errorf("Values of omitted empty fields should be not counted")
}
if s[0].(string) != "example" {
t.Errorf("Values of omitted empty fields should left the value example")
}
}
func TestValues_OmitNested(t *testing.T) {
type A struct {
Name string
Value int
}
a := A{
Name: "example",
Value: 123,
}
type B struct {
A A `structs:",omitnested"`
C int
}
b := &B{A: a, C: 123}
s := Values(b)
if len(s) != 2 {
t.Errorf("Values of omitted nested struct should be not counted")
}
inSlice := func(val interface{}) bool {
for _, v := range s {
if reflect.DeepEqual(v, val) {
return true
}
}
return false
}
for _, val := range []interface{}{123, a} {
if !inSlice(val) {
t.Errorf("Values should have the value %v", val)
}
}
}
func TestValues_Nested(t *testing.T) {
type A struct {
Name string
}
a := A{Name: "example"}
type B struct {
A A
C int
}
b := &B{A: a, C: 123}
s := Values(b)
inSlice := func(val interface{}) bool {
for _, v := range s {
if reflect.DeepEqual(v, val) {
return true
}
}
return false
}
for _, val := range []interface{}{"example", 123} {
if !inSlice(val) {
t.Errorf("Values should have the value %v", val)
}
}
}
func TestValues_Anonymous(t *testing.T) {
type A struct {
Name string
}
a := A{Name: "example"}
type B struct {
A
C int
}
b := &B{C: 123}
b.A = a
s := Values(b)
inSlice := func(val interface{}) bool {
for _, v := range s {
if reflect.DeepEqual(v, val) {
return true
}
}
return false
}
for _, val := range []interface{}{"example", 123} {
if !inSlice(val) {
t.Errorf("Values should have the value %v", val)
}
}
}
func TestNames(t *testing.T) {
var T = struct {
A string
B int
C bool
}{
A: "a-value",
B: 2,
C: true,
}
s := Names(T)
if len(s) != 3 {
t.Errorf("Names should return a slice of len 3, got: %d", len(s))
}
inSlice := func(val string) bool {
for _, v := range s {
if reflect.DeepEqual(v, val) {
return true
}
}
return false
}
for _, val := range []string{"A", "B", "C"} {
if !inSlice(val) {
t.Errorf("Names should have the value %v", val)
}
}
}
func TestFields(t *testing.T) {
var T = struct {
A string
B int
C bool
}{
A: "a-value",
B: 2,
C: true,
}
s := Fields(T)
if len(s) != 3 {
t.Errorf("Fields should return a slice of len 3, got: %d", len(s))
}
inSlice := func(val string) bool {
for _, v := range s {
if reflect.DeepEqual(v.Name(), val) {
return true
}
}
return false
}
for _, val := range []string{"A", "B", "C"} {
if !inSlice(val) {
t.Errorf("Fields should have the value %v", val)
}
}
}
func TestFields_OmitNested(t *testing.T) {
type A struct {
Name string
Enabled bool
}
a := A{Name: "example"}
type B struct {
A A
C int
Value string `structs:"-"`
Number int
}
b := &B{A: a, C: 123}
s := Fields(b)
if len(s) != 3 {
t.Errorf("Fields should omit nested struct. Expecting 2 got: %d", len(s))
}
inSlice := func(val interface{}) bool {
for _, v := range s {
if reflect.DeepEqual(v.Name(), val) {
return true
}
}
return false
}
for _, val := range []interface{}{"A", "C"} {
if !inSlice(val) {
t.Errorf("Fields should have the value %v", val)
}
}
}
func TestFields_Anonymous(t *testing.T) {
type A struct {
Name string
}
a := A{Name: "example"}
type B struct {
A
C int
}
b := &B{C: 123}
b.A = a
s := Fields(b)
inSlice := func(val interface{}) bool {
for _, v := range s {
if reflect.DeepEqual(v.Name(), val) {
return true
}
}
return false
}
for _, val := range []interface{}{"A", "C"} {
if !inSlice(val) {
t.Errorf("Fields should have the value %v", val)
}
}
}
func TestIsZero(t *testing.T) {
var T = struct {
A string
B int
C bool `structs:"-"`
D []string
}{}
ok := IsZero(T)
if !ok {
t.Error("IsZero should return true because none of the fields are initialized.")
}
var X = struct {
A string
F *bool
}{
A: "a-value",
}
ok = IsZero(X)
if ok {
t.Error("IsZero should return false because A is initialized")
}
var Y = struct {
A string
B int
}{
A: "a-value",
B: 123,
}
ok = IsZero(Y)
if ok {
t.Error("IsZero should return false because A and B is initialized")
}
}
func TestIsZero_OmitNested(t *testing.T) {
type A struct {
Name string
D string
}
a := A{Name: "example"}
type B struct {
A A `structs:",omitnested"`
C int
}
b := &B{A: a, C: 123}
ok := IsZero(b)
if ok {
t.Error("IsZero should return false because A, B and C are initialized")
}
aZero := A{}
bZero := &B{A: aZero}
ok = IsZero(bZero)
if !ok {
t.Error("IsZero should return true because neither A nor B is initialized")
}
}
func TestIsZero_Nested(t *testing.T) {
type A struct {
Name string
D string
}
a := A{Name: "example"}
type B struct {
A A
C int
}
b := &B{A: a, C: 123}
ok := IsZero(b)
if ok {
t.Error("IsZero should return false because A, B and C are initialized")
}
aZero := A{}
bZero := &B{A: aZero}
ok = IsZero(bZero)
if !ok {
t.Error("IsZero should return true because neither A nor B is initialized")
}
}
func TestIsZero_Anonymous(t *testing.T) {
type A struct {
Name string
D string
}
a := A{Name: "example"}
type B struct {
A
C int
}
b := &B{C: 123}
b.A = a
ok := IsZero(b)
if ok {
t.Error("IsZero should return false because A, B and C are initialized")
}
aZero := A{}
bZero := &B{}
bZero.A = aZero
ok = IsZero(bZero)
if !ok {
t.Error("IsZero should return true because neither A nor B is initialized")
}
}
func TestHasZero(t *testing.T) {
var T = struct {
A string
B int
C bool `structs:"-"`
D []string
}{
A: "a-value",
B: 2,
}
ok := HasZero(T)
if !ok {
t.Error("HasZero should return true because A and B are initialized.")
}
var X = struct {
A string
F *bool
}{
A: "a-value",
}
ok = HasZero(X)
if !ok {
t.Error("HasZero should return true because A is initialized")
}
var Y = struct {
A string
B int
}{
A: "a-value",
B: 123,
}
ok = HasZero(Y)
if ok {
t.Error("HasZero should return false because A and B is initialized")
}
}
func TestHasZero_OmitNested(t *testing.T) {
type A struct {
Name string
D string
}
a := A{Name: "example"}
type B struct {
A A `structs:",omitnested"`
C int
}
b := &B{A: a, C: 123}
// Because the Field A inside B is omitted HasZero should return false
// because it will stop iterating deeper andnot going to lookup for D
ok := HasZero(b)
if ok {
t.Error("HasZero should return false because A and C are initialized")
}
}
func TestHasZero_Nested(t *testing.T) {
type A struct {
Name string
D string
}
a := A{Name: "example"}
type B struct {
A A
C int
}
b := &B{A: a, C: 123}
ok := HasZero(b)
if !ok {
t.Error("HasZero should return true because D is not initialized")
}
}
func TestHasZero_Anonymous(t *testing.T) {
type A struct {
Name string
D string
}
a := A{Name: "example"}
type B struct {
A
C int
}
b := &B{C: 123}
b.A = a
ok := HasZero(b)
if !ok {
t.Error("HasZero should return false because D is not initialized")
}
}
func TestName(t *testing.T) {
type Foo struct {
A string
B bool
}
f := &Foo{}
n := Name(f)
if n != "Foo" {
t.Errorf("Name should return Foo, got: %s", n)
}
unnamed := struct{ Name string }{Name: "Cihangir"}
m := Name(unnamed)
if m != "" {
t.Errorf("Name should return empty string for unnamed struct, got: %s", n)
}
defer func() {
err := recover()
if err == nil {
t.Error("Name should panic if a non struct is passed")
}
}()
Name([]string{})
}
func TestNestedNilPointer(t *testing.T) {
type Collar struct {
Engraving string
}
type Dog struct {
Name string
Collar *Collar
}
type Person struct {
Name string
Dog *Dog
}
person := &Person{
Name: "John",
}
personWithDog := &Person{
Name: "Ron",
Dog: &Dog{
Name: "Rover",
},
}
personWithDogWithCollar := &Person{
Name: "Kon",
Dog: &Dog{
Name: "Ruffles",
Collar: &Collar{
Engraving: "If lost, call Kon",
},
},
}
defer func() {
err := recover()
if err != nil {
fmt.Printf("err %+v\n", err)
t.Error("Internal nil pointer should not panic")
}
}()
_ = Map(person) // Panics
_ = Map(personWithDog) // Panics
_ = Map(personWithDogWithCollar) // Doesn't panic
}

32
Godeps/_workspace/src/github.com/fatih/structs/tags.go generated vendored Normal file
View File

@ -0,0 +1,32 @@
package structs
import "strings"
// tagOptions contains a slice of tag options
type tagOptions []string
// Has returns true if the given optiton is available in tagOptions
func (t tagOptions) Has(opt string) bool {
for _, tagOpt := range t {
if tagOpt == opt {
return true
}
}
return false
}
// parseTag splits a struct field's tag into its name and a list of options
// which comes after a name. A tag is in the form of: "name,option1,option2".
// The name can be neglectected.
func parseTag(tag string) (string, tagOptions) {
// tag is one of followings:
// ""
// "name"
// "name,opt"
// "name,opt,opt2"
// ",opt"
res := strings.Split(tag, ",")
return res[0], res[1:]
}

View File

@ -0,0 +1,46 @@
package structs
import "testing"
func TestParseTag_Name(t *testing.T) {
tags := []struct {
tag string
has bool
}{
{"", false},
{"name", true},
{"name,opt", true},
{"name , opt, opt2", false}, // has a single whitespace
{", opt, opt2", false},
}
for _, tag := range tags {
name, _ := parseTag(tag.tag)
if (name != "name") && tag.has {
t.Errorf("Parse tag should return name: %#v", tag)
}
}
}
func TestParseTag_Opts(t *testing.T) {
tags := []struct {
opts string
has bool
}{
{"name", false},
{"name,opt", true},
{"name , opt, opt2", false}, // has a single whitespace
{",opt, opt2", true},
{", opt3, opt4", false},
}
// search for "opt"
for _, tag := range tags {
_, opts := parseTag(tag.opts)
if opts.Has("opt") != tag.has {
t.Errorf("Tag opts should have opt: %#v", tag)
}
}
}

View File

@ -0,0 +1,353 @@
Mozilla Public License, version 2.0
1. Definitions
1.1. “Contributor”
means each individual or legal entity that creates, contributes to the
creation of, or owns Covered Software.
1.2. “Contributor Version”
means the combination of the Contributions of others (if any) used by a
Contributor and that particular Contributors Contribution.
1.3. “Contribution”
means Covered Software of a particular Contributor.
1.4. “Covered Software”
means Source Code Form to which the initial Contributor has attached the
notice in Exhibit A, the Executable Form of such Source Code Form, and
Modifications of such Source Code Form, in each case including portions
thereof.
1.5. “Incompatible With Secondary Licenses”
means
a. that the initial Contributor has attached the notice described in
Exhibit B to the Covered Software; or
b. that the Covered Software was made available under the terms of version
1.1 or earlier of the License, but not also under the terms of a
Secondary License.
1.6. “Executable Form”
means any form of the work other than Source Code Form.
1.7. “Larger Work”
means a work that combines Covered Software with other material, in a separate
file or files, that is not Covered Software.
1.8. “License”
means this document.
1.9. “Licensable”
means having the right to grant, to the maximum extent possible, whether at the
time of the initial grant or subsequently, any and all of the rights conveyed by
this License.
1.10. “Modifications”
means any of the following:
a. any file in Source Code Form that results from an addition to, deletion
from, or modification of the contents of Covered Software; or
b. any new file in Source Code Form that contains any Covered Software.
1.11. “Patent Claims” of a Contributor
means any patent claim(s), including without limitation, method, process,
and apparatus claims, in any patent Licensable by such Contributor that
would be infringed, but for the grant of the License, by the making,
using, selling, offering for sale, having made, import, or transfer of
either its Contributions or its Contributor Version.
1.12. “Secondary License”
means either the GNU General Public License, Version 2.0, the GNU Lesser
General Public License, Version 2.1, the GNU Affero General Public
License, Version 3.0, or any later versions of those licenses.
1.13. “Source Code Form”
means the form of the work preferred for making modifications.
1.14. “You” (or “Your”)
means an individual or a legal entity exercising rights under this
License. For legal entities, “You” includes any entity that controls, is
controlled by, or is under common control with You. For purposes of this
definition, “control” means (a) the power, direct or indirect, to cause
the direction or management of such entity, whether by contract or
otherwise, or (b) ownership of more than fifty percent (50%) of the
outstanding shares or beneficial ownership of such entity.
2. License Grants and Conditions
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
a. under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or as
part of a Larger Work; and
b. under Patent Claims of such Contributor to make, use, sell, offer for
sale, have made, import, and otherwise transfer either its Contributions
or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution become
effective for each Contribution on the date the Contributor first distributes
such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under this
License. No additional rights or licenses will be implied from the distribution
or licensing of Covered Software under this License. Notwithstanding Section
2.1(b) above, no patent license is granted by a Contributor:
a. for any code that a Contributor has removed from Covered Software; or
b. for infringements caused by: (i) Your and any other third partys
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
c. under Patent Claims infringed by Covered Software in the absence of its
Contributions.
This License does not grant any rights in the trademarks, service marks, or
logos of any Contributor (except as may be necessary to comply with the
notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this License
(see Section 10.2) or under the terms of a Secondary License (if permitted
under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its Contributions
are its original creation(s) or it has sufficient rights to grant the
rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under applicable
copyright doctrines of fair use, fair dealing, or other equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
Section 2.1.
3. Responsibilities
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under the
terms of this License. You must inform recipients that the Source Code Form
of the Covered Software is governed by the terms of this License, and how
they can obtain a copy of this License. You may not attempt to alter or
restrict the recipients rights in the Source Code Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
a. such Covered Software must also be made available in Source Code Form,
as described in Section 3.1, and You must inform recipients of the
Executable Form how they can obtain a copy of such Source Code Form by
reasonable means in a timely manner, at a charge no more than the cost
of distribution to the recipient; and
b. You may distribute such Executable Form under the terms of this License,
or sublicense it under different terms, provided that the license for
the Executable Form does not attempt to limit or alter the recipients
rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for the
Covered Software. If the Larger Work is a combination of Covered Software
with a work governed by one or more Secondary Licenses, and the Covered
Software is not Incompatible With Secondary Licenses, this License permits
You to additionally distribute such Covered Software under the terms of
such Secondary License(s), so that the recipient of the Larger Work may, at
their option, further distribute the Covered Software under the terms of
either this License or such Secondary License(s).
3.4. Notices
You may not remove or alter the substance of any license notices (including
copyright notices, patent notices, disclaimers of warranty, or limitations
of liability) contained within the Source Code Form of the Covered
Software, except that You may alter any license notices to the extent
required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on behalf
of any Contributor. You must make it absolutely clear that any such
warranty, support, indemnity, or liability obligation is offered by You
alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
If it is impossible for You to comply with any of the terms of this License
with respect to some or all of the Covered Software due to statute, judicial
order, or regulation then You must: (a) comply with the terms of this License
to the maximum extent possible; and (b) describe the limitations and the code
they affect. Such description must be placed in a text file included with all
distributions of the Covered Software under this License. Except to the
extent prohibited by statute or regulation, such description must be
sufficiently detailed for a recipient of ordinary skill to be able to
understand it.
5. Termination
5.1. The rights granted under this License will terminate automatically if You
fail to comply with any of its terms. However, if You become compliant,
then the rights granted under this License from a particular Contributor
are reinstated (a) provisionally, unless and until such Contributor
explicitly and finally terminates Your grants, and (b) on an ongoing basis,
if such Contributor fails to notify You of the non-compliance by some
reasonable means prior to 60 days after You have come back into compliance.
Moreover, Your grants from a particular Contributor are reinstated on an
ongoing basis if such Contributor notifies You of the non-compliance by
some reasonable means, this is the first time You have received notice of
non-compliance with this License from such Contributor, and You become
compliant prior to 30 days after Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions, counter-claims,
and cross-claims) alleging that a Contributor Version directly or
indirectly infringes any patent, then the rights granted to You by any and
all Contributors for the Covered Software under Section 2.1 of this License
shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
license agreements (excluding distributors and resellers) which have been
validly granted by You or Your distributors under this License prior to
termination shall survive termination.
6. Disclaimer of Warranty
Covered Software is provided under this License on an “as is” basis, without
warranty of any kind, either expressed, implied, or statutory, including,
without limitation, warranties that the Covered Software is free of defects,
merchantable, fit for a particular purpose or non-infringing. The entire
risk as to the quality and performance of the Covered Software is with You.
Should any Covered Software prove defective in any respect, You (not any
Contributor) assume the cost of any necessary servicing, repair, or
correction. This disclaimer of warranty constitutes an essential part of this
License. No use of any Covered Software is authorized under this License
except under this disclaimer.
7. Limitation of Liability
Under no circumstances and under no legal theory, whether tort (including
negligence), contract, or otherwise, shall any Contributor, or anyone who
distributes Covered Software as permitted above, be liable to You for any
direct, indirect, special, incidental, or consequential damages of any
character including, without limitation, damages for lost profits, loss of
goodwill, work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses, even if such party shall have been
informed of the possibility of such damages. This limitation of liability
shall not apply to liability for death or personal injury resulting from such
partys negligence to the extent applicable law prohibits such limitation.
Some jurisdictions do not allow the exclusion or limitation of incidental or
consequential damages, so this exclusion and limitation may not apply to You.
8. Litigation
Any litigation relating to this License may be brought only in the courts of
a jurisdiction where the defendant maintains its principal place of business
and such litigation shall be governed by laws of that jurisdiction, without
reference to its conflict-of-law provisions. Nothing in this Section shall
prevent a partys ability to bring cross-claims or counter-claims.
9. Miscellaneous
This License represents the complete agreement concerning the subject matter
hereof. If any provision of this License is held to be unenforceable, such
provision shall be reformed only to the extent necessary to make it
enforceable. Any law or regulation which provides that the language of a
contract shall be construed against the drafter shall not be used to construe
this License against a Contributor.
10. Versions of the License
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version of
the License under which You originally received the Covered Software, or
under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a modified
version of this License if you rename the license and remove any
references to the name of the license steward (except to note that such
modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
This Source Code Form is subject to the
terms of the Mozilla Public License, v.
2.0. If a copy of the MPL was not
distributed with this file, You can
obtain one at
http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular file, then
You may include the notice in a location (such as a LICENSE file in a relevant
directory) where a recipient would be likely to look for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - “Incompatible With Secondary Licenses” Notice
This Source Code Form is “Incompatible
With Secondary Licenses”, as defined by
the Mozilla Public License, v. 2.0.

View File

@ -0,0 +1,91 @@
# go-multierror
`go-multierror` is a package for Go that provides a mechanism for
representing a list of `error` values as a single `error`.
This allows a function in Go to return an `error` that might actually
be a list of errors. If the caller knows this, they can unwrap the
list and access the errors. If the caller doesn't know, the error
formats to a nice human-readable format.
`go-multierror` implements the
[errwrap](https://github.com/hashicorp/errwrap) interface so that it can
be used with that library, as well.
## Installation and Docs
Install using `go get github.com/hashicorp/go-multierror`.
Full documentation is available at
http://godoc.org/github.com/hashicorp/go-multierror
## Usage
go-multierror is easy to use and purposely built to be unobtrusive in
existing Go applications/libraries that may not be aware of it.
**Building a list of errors**
The `Append` function is used to create a list of errors. This function
behaves a lot like the Go built-in `append` function: it doesn't matter
if the first argument is nil, a `multierror.Error`, or any other `error`,
the function behaves as you would expect.
```go
var result error
if err := step1(); err != nil {
result = multierror.Append(result, err)
}
if err := step2(); err != nil {
result = multierror.Append(result, err)
}
return result
```
**Customizing the formatting of the errors**
By specifying a custom `ErrorFormat`, you can customize the format
of the `Error() string` function:
```go
var result *multierror.Error
// ... accumulate errors here, maybe using Append
if result != nil {
result.ErrorFormat = func([]error) string {
return "errors!"
}
}
```
**Accessing the list of errors**
`multierror.Error` implements `error` so if the caller doesn't know about
multierror, it will work just fine. But if you're aware a multierror might
be returned, you can use type switches to access the list of errors:
```go
if err := something(); err != nil {
if merr, ok := err.(*multierror.Error); ok {
// Use merr.Errors
}
}
```
**Returning a multierror only if there are errors**
If you build a `multierror.Error`, you can use the `ErrorOrNil` function
to return an `error` implementation only if there are errors to return:
```go
var result *multierror.Error
// ... accumulate errors here
// Return the `error` only if errors were added to the multierror, otherwise
// return nil since there are no errors.
return result.ErrorOrNil()
```

View File

@ -0,0 +1,30 @@
package multierror
// Append is a helper function that will append more errors
// onto an Error in order to create a larger multi-error.
//
// If err is not a multierror.Error, then it will be turned into
// one. If any of the errs are multierr.Error, they will be flattened
// one level into err.
func Append(err error, errs ...error) *Error {
switch err := err.(type) {
case *Error:
// Typed nils can reach here, so initialize if we are nil
if err == nil {
err = new(Error)
}
err.Errors = append(err.Errors, errs...)
return err
default:
newErrs := make([]error, 0, len(errs)+1)
if err != nil {
newErrs = append(newErrs, err)
}
newErrs = append(newErrs, errs...)
return &Error{
Errors: newErrs,
}
}
}

View File

@ -0,0 +1,45 @@
package multierror
import (
"errors"
"testing"
)
func TestAppend_Error(t *testing.T) {
original := &Error{
Errors: []error{errors.New("foo")},
}
result := Append(original, errors.New("bar"))
if len(result.Errors) != 2 {
t.Fatalf("wrong len: %d", len(result.Errors))
}
original = &Error{}
result = Append(original, errors.New("bar"))
if len(result.Errors) != 1 {
t.Fatalf("wrong len: %d", len(result.Errors))
}
// Test when a typed nil is passed
var e *Error
result = Append(e, errors.New("baz"))
if len(result.Errors) != 1 {
t.Fatalf("wrong len: %d", len(result.Errors))
}
}
func TestAppend_NilError(t *testing.T) {
var err error
result := Append(err, errors.New("bar"))
if len(result.Errors) != 1 {
t.Fatalf("wrong len: %d", len(result.Errors))
}
}
func TestAppend_NonError(t *testing.T) {
original := errors.New("foo")
result := Append(original, errors.New("bar"))
if len(result.Errors) != 2 {
t.Fatalf("wrong len: %d", len(result.Errors))
}
}

View File

@ -0,0 +1,26 @@
package multierror
// Flatten flattens the given error, merging any *Errors together into
// a single *Error.
func Flatten(err error) error {
// If it isn't an *Error, just return the error as-is
if _, ok := err.(*Error); !ok {
return err
}
// Otherwise, make the result and flatten away!
flatErr := new(Error)
flatten(err, flatErr)
return flatErr
}
func flatten(err error, flatErr *Error) {
switch err := err.(type) {
case *Error:
for _, e := range err.Errors {
flatten(e, flatErr)
}
default:
flatErr.Errors = append(flatErr.Errors, err)
}
}

View File

@ -0,0 +1,48 @@
package multierror
import (
"errors"
"fmt"
"reflect"
"strings"
"testing"
)
func TestFlatten(t *testing.T) {
original := &Error{
Errors: []error{
errors.New("one"),
&Error{
Errors: []error{
errors.New("two"),
&Error{
Errors: []error{
errors.New("three"),
},
},
},
},
},
}
expected := strings.TrimSpace(`
3 error(s) occurred:
* one
* two
* three
`)
actual := fmt.Sprintf("%s", Flatten(original))
if expected != actual {
t.Fatalf("expected: %s, got: %s", expected, actual)
}
}
func TestFlatten_nonError(t *testing.T) {
err := errors.New("foo")
actual := Flatten(err)
if !reflect.DeepEqual(actual, err) {
t.Fatalf("bad: %#v", actual)
}
}

View File

@ -0,0 +1,23 @@
package multierror
import (
"fmt"
"strings"
)
// ErrorFormatFunc is a function callback that is called by Error to
// turn the list of errors into a string.
type ErrorFormatFunc func([]error) string
// ListFormatFunc is a basic formatter that outputs the number of errors
// that occurred along with a bullet point list of the errors.
func ListFormatFunc(es []error) string {
points := make([]string, len(es))
for i, err := range es {
points[i] = fmt.Sprintf("* %s", err)
}
return fmt.Sprintf(
"%d error(s) occurred:\n\n%s",
len(es), strings.Join(points, "\n"))
}

View File

@ -0,0 +1,23 @@
package multierror
import (
"errors"
"testing"
)
func TestListFormatFunc(t *testing.T) {
expected := `2 error(s) occurred:
* foo
* bar`
errors := []error{
errors.New("foo"),
errors.New("bar"),
}
actual := ListFormatFunc(errors)
if actual != expected {
t.Fatalf("bad: %#v", actual)
}
}

View File

@ -0,0 +1,51 @@
package multierror
import (
"fmt"
)
// Error is an error type to track multiple errors. This is used to
// accumulate errors in cases and return them as a single "error".
type Error struct {
Errors []error
ErrorFormat ErrorFormatFunc
}
func (e *Error) Error() string {
fn := e.ErrorFormat
if fn == nil {
fn = ListFormatFunc
}
return fn(e.Errors)
}
// ErrorOrNil returns an error interface if this Error represents
// a list of errors, or returns nil if the list of errors is empty. This
// function is useful at the end of accumulation to make sure that the value
// returned represents the existence of errors.
func (e *Error) ErrorOrNil() error {
if e == nil {
return nil
}
if len(e.Errors) == 0 {
return nil
}
return e
}
func (e *Error) GoString() string {
return fmt.Sprintf("*%#v", *e)
}
// WrappedErrors returns the list of errors that this Error is wrapping.
// It is an implementatin of the errwrap.Wrapper interface so that
// multierror.Error can be used with that library.
//
// This method is not safe to be called concurrently and is no different
// than accessing the Errors field directly. It is implementd only to
// satisfy the errwrap.Wrapper interface.
func (e *Error) WrappedErrors() []error {
return e.Errors
}

View File

@ -0,0 +1,70 @@
package multierror
import (
"errors"
"reflect"
"testing"
)
func TestError_Impl(t *testing.T) {
var _ error = new(Error)
}
func TestErrorError_custom(t *testing.T) {
errors := []error{
errors.New("foo"),
errors.New("bar"),
}
fn := func(es []error) string {
return "foo"
}
multi := &Error{Errors: errors, ErrorFormat: fn}
if multi.Error() != "foo" {
t.Fatalf("bad: %s", multi.Error())
}
}
func TestErrorError_default(t *testing.T) {
expected := `2 error(s) occurred:
* foo
* bar`
errors := []error{
errors.New("foo"),
errors.New("bar"),
}
multi := &Error{Errors: errors}
if multi.Error() != expected {
t.Fatalf("bad: %s", multi.Error())
}
}
func TestErrorErrorOrNil(t *testing.T) {
err := new(Error)
if err.ErrorOrNil() != nil {
t.Fatalf("bad: %#v", err.ErrorOrNil())
}
err.Errors = []error{errors.New("foo")}
if v := err.ErrorOrNil(); v == nil {
t.Fatal("should not be nil")
} else if !reflect.DeepEqual(v, err) {
t.Fatalf("bad: %#v", v)
}
}
func TestErrorWrappedErrors(t *testing.T) {
errors := []error{
errors.New("foo"),
errors.New("bar"),
}
multi := &Error{Errors: errors}
if !reflect.DeepEqual(multi.Errors, multi.WrappedErrors()) {
t.Fatalf("bad: %s", multi.WrappedErrors())
}
}

View File

@ -0,0 +1 @@
y.output

354
Godeps/_workspace/src/github.com/hashicorp/hcl/LICENSE generated vendored Normal file
View File

@ -0,0 +1,354 @@
Mozilla Public License, version 2.0
1. Definitions
1.1. “Contributor”
means each individual or legal entity that creates, contributes to the
creation of, or owns Covered Software.
1.2. “Contributor Version”
means the combination of the Contributions of others (if any) used by a
Contributor and that particular Contributors Contribution.
1.3. “Contribution”
means Covered Software of a particular Contributor.
1.4. “Covered Software”
means Source Code Form to which the initial Contributor has attached the
notice in Exhibit A, the Executable Form of such Source Code Form, and
Modifications of such Source Code Form, in each case including portions
thereof.
1.5. “Incompatible With Secondary Licenses”
means
a. that the initial Contributor has attached the notice described in
Exhibit B to the Covered Software; or
b. that the Covered Software was made available under the terms of version
1.1 or earlier of the License, but not also under the terms of a
Secondary License.
1.6. “Executable Form”
means any form of the work other than Source Code Form.
1.7. “Larger Work”
means a work that combines Covered Software with other material, in a separate
file or files, that is not Covered Software.
1.8. “License”
means this document.
1.9. “Licensable”
means having the right to grant, to the maximum extent possible, whether at the
time of the initial grant or subsequently, any and all of the rights conveyed by
this License.
1.10. “Modifications”
means any of the following:
a. any file in Source Code Form that results from an addition to, deletion
from, or modification of the contents of Covered Software; or
b. any new file in Source Code Form that contains any Covered Software.
1.11. “Patent Claims” of a Contributor
means any patent claim(s), including without limitation, method, process,
and apparatus claims, in any patent Licensable by such Contributor that
would be infringed, but for the grant of the License, by the making,
using, selling, offering for sale, having made, import, or transfer of
either its Contributions or its Contributor Version.
1.12. “Secondary License”
means either the GNU General Public License, Version 2.0, the GNU Lesser
General Public License, Version 2.1, the GNU Affero General Public
License, Version 3.0, or any later versions of those licenses.
1.13. “Source Code Form”
means the form of the work preferred for making modifications.
1.14. “You” (or “Your”)
means an individual or a legal entity exercising rights under this
License. For legal entities, “You” includes any entity that controls, is
controlled by, or is under common control with You. For purposes of this
definition, “control” means (a) the power, direct or indirect, to cause
the direction or management of such entity, whether by contract or
otherwise, or (b) ownership of more than fifty percent (50%) of the
outstanding shares or beneficial ownership of such entity.
2. License Grants and Conditions
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
a. under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or as
part of a Larger Work; and
b. under Patent Claims of such Contributor to make, use, sell, offer for
sale, have made, import, and otherwise transfer either its Contributions
or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution become
effective for each Contribution on the date the Contributor first distributes
such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under this
License. No additional rights or licenses will be implied from the distribution
or licensing of Covered Software under this License. Notwithstanding Section
2.1(b) above, no patent license is granted by a Contributor:
a. for any code that a Contributor has removed from Covered Software; or
b. for infringements caused by: (i) Your and any other third partys
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
c. under Patent Claims infringed by Covered Software in the absence of its
Contributions.
This License does not grant any rights in the trademarks, service marks, or
logos of any Contributor (except as may be necessary to comply with the
notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this License
(see Section 10.2) or under the terms of a Secondary License (if permitted
under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its Contributions
are its original creation(s) or it has sufficient rights to grant the
rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under applicable
copyright doctrines of fair use, fair dealing, or other equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
Section 2.1.
3. Responsibilities
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under the
terms of this License. You must inform recipients that the Source Code Form
of the Covered Software is governed by the terms of this License, and how
they can obtain a copy of this License. You may not attempt to alter or
restrict the recipients rights in the Source Code Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
a. such Covered Software must also be made available in Source Code Form,
as described in Section 3.1, and You must inform recipients of the
Executable Form how they can obtain a copy of such Source Code Form by
reasonable means in a timely manner, at a charge no more than the cost
of distribution to the recipient; and
b. You may distribute such Executable Form under the terms of this License,
or sublicense it under different terms, provided that the license for
the Executable Form does not attempt to limit or alter the recipients
rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for the
Covered Software. If the Larger Work is a combination of Covered Software
with a work governed by one or more Secondary Licenses, and the Covered
Software is not Incompatible With Secondary Licenses, this License permits
You to additionally distribute such Covered Software under the terms of
such Secondary License(s), so that the recipient of the Larger Work may, at
their option, further distribute the Covered Software under the terms of
either this License or such Secondary License(s).
3.4. Notices
You may not remove or alter the substance of any license notices (including
copyright notices, patent notices, disclaimers of warranty, or limitations
of liability) contained within the Source Code Form of the Covered
Software, except that You may alter any license notices to the extent
required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on behalf
of any Contributor. You must make it absolutely clear that any such
warranty, support, indemnity, or liability obligation is offered by You
alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
If it is impossible for You to comply with any of the terms of this License
with respect to some or all of the Covered Software due to statute, judicial
order, or regulation then You must: (a) comply with the terms of this License
to the maximum extent possible; and (b) describe the limitations and the code
they affect. Such description must be placed in a text file included with all
distributions of the Covered Software under this License. Except to the
extent prohibited by statute or regulation, such description must be
sufficiently detailed for a recipient of ordinary skill to be able to
understand it.
5. Termination
5.1. The rights granted under this License will terminate automatically if You
fail to comply with any of its terms. However, if You become compliant,
then the rights granted under this License from a particular Contributor
are reinstated (a) provisionally, unless and until such Contributor
explicitly and finally terminates Your grants, and (b) on an ongoing basis,
if such Contributor fails to notify You of the non-compliance by some
reasonable means prior to 60 days after You have come back into compliance.
Moreover, Your grants from a particular Contributor are reinstated on an
ongoing basis if such Contributor notifies You of the non-compliance by
some reasonable means, this is the first time You have received notice of
non-compliance with this License from such Contributor, and You become
compliant prior to 30 days after Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions, counter-claims,
and cross-claims) alleging that a Contributor Version directly or
indirectly infringes any patent, then the rights granted to You by any and
all Contributors for the Covered Software under Section 2.1 of this License
shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
license agreements (excluding distributors and resellers) which have been
validly granted by You or Your distributors under this License prior to
termination shall survive termination.
6. Disclaimer of Warranty
Covered Software is provided under this License on an “as is” basis, without
warranty of any kind, either expressed, implied, or statutory, including,
without limitation, warranties that the Covered Software is free of defects,
merchantable, fit for a particular purpose or non-infringing. The entire
risk as to the quality and performance of the Covered Software is with You.
Should any Covered Software prove defective in any respect, You (not any
Contributor) assume the cost of any necessary servicing, repair, or
correction. This disclaimer of warranty constitutes an essential part of this
License. No use of any Covered Software is authorized under this License
except under this disclaimer.
7. Limitation of Liability
Under no circumstances and under no legal theory, whether tort (including
negligence), contract, or otherwise, shall any Contributor, or anyone who
distributes Covered Software as permitted above, be liable to You for any
direct, indirect, special, incidental, or consequential damages of any
character including, without limitation, damages for lost profits, loss of
goodwill, work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses, even if such party shall have been
informed of the possibility of such damages. This limitation of liability
shall not apply to liability for death or personal injury resulting from such
partys negligence to the extent applicable law prohibits such limitation.
Some jurisdictions do not allow the exclusion or limitation of incidental or
consequential damages, so this exclusion and limitation may not apply to You.
8. Litigation
Any litigation relating to this License may be brought only in the courts of
a jurisdiction where the defendant maintains its principal place of business
and such litigation shall be governed by laws of that jurisdiction, without
reference to its conflict-of-law provisions. Nothing in this Section shall
prevent a partys ability to bring cross-claims or counter-claims.
9. Miscellaneous
This License represents the complete agreement concerning the subject matter
hereof. If any provision of this License is held to be unenforceable, such
provision shall be reformed only to the extent necessary to make it
enforceable. Any law or regulation which provides that the language of a
contract shall be construed against the drafter shall not be used to construe
this License against a Contributor.
10. Versions of the License
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version of
the License under which You originally received the Covered Software, or
under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a modified
version of this License if you rename the license and remove any
references to the name of the license steward (except to note that such
modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
This Source Code Form is subject to the
terms of the Mozilla Public License, v.
2.0. If a copy of the MPL was not
distributed with this file, You can
obtain one at
http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular file, then
You may include the notice in a location (such as a LICENSE file in a relevant
directory) where a recipient would be likely to look for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - “Incompatible With Secondary Licenses” Notice
This Source Code Form is “Incompatible
With Secondary Licenses”, as defined by
the Mozilla Public License, v. 2.0.

View File

@ -0,0 +1,17 @@
TEST?=./...
default: test
fmt: generate
go fmt ./...
test: generate
go test $(TEST) $(TESTARGS)
generate:
go generate ./...
updatedeps:
go get -u golang.org/x/tools/cmd/stringer
.PHONY: default generate test updatedeps

View File

@ -0,0 +1,84 @@
# HCL
HCL (HashiCorp Configuration Language) is a configuration language built
by HashiCorp. The goal of HCL is to build a structured configuration language
that is both human and machine friendly for use with command-line tools, but
specifically targeted towards DevOps tools, servers, etc.
HCL is also fully JSON compatible. That is, JSON can be used as completely
valid input to a system expecting HCL. This helps makes systems
interoperable with other systems.
HCL is heavily inspired by
[libucl](https://github.com/vstakhov/libucl),
nginx configuration, and others similar.
## Why?
A common question when viewing HCL is to ask the question: why not
JSON, YAML, etc.?
Prior to HCL, the tools we built at [HashiCorp](http://www.hashicorp.com)
used a variety of configuration languages from full programming languages
such as Ruby to complete data structure languages such as JSON. What we
learned is that some people wanted human-friendly configuration languages
and some people wanted machine-friendly languages.
JSON fits a nice balance in this, but is fairly verbose and most
importantly doesn't support comments. With YAML, we found that beginners
had a really hard time determining what the actual structure was, and
ended up guessing more than not whether to use a hyphen, colon, etc.
in order to represent some configuration key.
Full programming languages such as Ruby enable complex behavior
a configuration language shouldn't usually allow, and also forces
people to learn some set of Ruby.
Because of this, we decided to create our own configuration language
that is JSON-compatible. Our configuration language (HCL) is designed
to be written and modified by humans. The API for HCL allows JSON
as an input so that it is also machine-friendly (machines can generate
JSON instead of trying to generate HCL).
Our goal with HCL is not to alienate other configuration languages.
It is instead to provide HCL as a specialized language for our tools,
and JSON as the interoperability layer.
## Syntax
The complete grammar
[can be found here](https://github.com/hashicorp/hcl/blob/master/hcl/parse.y),
if you're more comfortable reading specifics, but a high-level overview
of the syntax and grammar are listed here.
* Single line comments start with `#` or `//`
* Multi-line comments are wrapped in `/*` and `*/`. Nested block comments
are not allowed. A multi-line comment (also known as a block comment)
terminates at the first `*/` found.
* Values are assigned with the syntax `key = value` (whitespace doesn't
matter). The value can be any primitive: a string, number, boolean,
object, or list.
* Strings are double-quoted and can contain any UTF-8 characters.
Example: `"Hello, World"`
* Numbers are assumed to be base 10. If you prefix a number with 0x,
it is treated as a hexadecimal. If it is prefixed with 0, it is
treated as an octal. Numbers can be in scientific notation: "1e10".
* Boolean values: `true`, `false`
* Arrays can be made by wrapping it in `[]`. Example:
`["foo", "bar", 42]`. Arrays can contain primitives
and other arrays, but cannot contain objects. Objects must
use the block syntax shown below.
Objects and nested objects are created using the structure shown below:
```
variable "ami" {
description = "the AMI to use"
}
```

View File

@ -0,0 +1,483 @@
package hcl
import (
"errors"
"fmt"
"reflect"
"sort"
"strconv"
"strings"
"github.com/hashicorp/hcl/hcl"
)
// This is the tag to use with structures to have settings for HCL
const tagName = "hcl"
// Decode reads the given input and decodes it into the structure
// given by `out`.
func Decode(out interface{}, in string) error {
obj, err := Parse(in)
if err != nil {
return err
}
return DecodeObject(out, obj)
}
// DecodeObject is a lower-level version of Decode. It decodes a
// raw Object into the given output.
func DecodeObject(out interface{}, n *hcl.Object) error {
val := reflect.ValueOf(out)
if val.Kind() != reflect.Ptr {
return errors.New("result must be a pointer")
}
var d decoder
return d.decode("root", n, val.Elem())
}
type decoder struct {
stack []reflect.Kind
}
func (d *decoder) decode(name string, o *hcl.Object, result reflect.Value) error {
k := result
// If we have an interface with a valid value, we use that
// for the check.
if result.Kind() == reflect.Interface {
elem := result.Elem()
if elem.IsValid() {
k = elem
}
}
// Push current onto stack unless it is an interface.
if k.Kind() != reflect.Interface {
d.stack = append(d.stack, k.Kind())
// Schedule a pop
defer func() {
d.stack = d.stack[:len(d.stack)-1]
}()
}
switch k.Kind() {
case reflect.Bool:
return d.decodeBool(name, o, result)
case reflect.Float64:
return d.decodeFloat(name, o, result)
case reflect.Int:
return d.decodeInt(name, o, result)
case reflect.Interface:
// When we see an interface, we make our own thing
return d.decodeInterface(name, o, result)
case reflect.Map:
return d.decodeMap(name, o, result)
case reflect.Ptr:
return d.decodePtr(name, o, result)
case reflect.Slice:
return d.decodeSlice(name, o, result)
case reflect.String:
return d.decodeString(name, o, result)
case reflect.Struct:
return d.decodeStruct(name, o, result)
default:
return fmt.Errorf(
"%s: unknown kind to decode into: %s", name, k.Kind())
}
return nil
}
func (d *decoder) decodeBool(name string, o *hcl.Object, result reflect.Value) error {
switch o.Type {
case hcl.ValueTypeBool:
result.Set(reflect.ValueOf(o.Value.(bool)))
default:
return fmt.Errorf("%s: unknown type %v", name, o.Type)
}
return nil
}
func (d *decoder) decodeFloat(name string, o *hcl.Object, result reflect.Value) error {
switch o.Type {
case hcl.ValueTypeFloat:
result.Set(reflect.ValueOf(o.Value.(float64)))
default:
return fmt.Errorf("%s: unknown type %v", name, o.Type)
}
return nil
}
func (d *decoder) decodeInt(name string, o *hcl.Object, result reflect.Value) error {
switch o.Type {
case hcl.ValueTypeInt:
result.Set(reflect.ValueOf(o.Value.(int)))
case hcl.ValueTypeString:
v, err := strconv.ParseInt(o.Value.(string), 0, 0)
if err != nil {
return err
}
result.SetInt(int64(v))
default:
return fmt.Errorf("%s: unknown type %v", name, o.Type)
}
return nil
}
func (d *decoder) decodeInterface(name string, o *hcl.Object, result reflect.Value) error {
var set reflect.Value
redecode := true
switch o.Type {
case hcl.ValueTypeObject:
// If we're at the root or we're directly within a slice, then we
// decode objects into map[string]interface{}, otherwise we decode
// them into lists.
if len(d.stack) == 0 || d.stack[len(d.stack)-1] == reflect.Slice {
var temp map[string]interface{}
tempVal := reflect.ValueOf(temp)
result := reflect.MakeMap(
reflect.MapOf(
reflect.TypeOf(""),
tempVal.Type().Elem()))
set = result
} else {
var temp []map[string]interface{}
tempVal := reflect.ValueOf(temp)
result := reflect.MakeSlice(
reflect.SliceOf(tempVal.Type().Elem()), 0, int(o.Len()))
set = result
}
case hcl.ValueTypeList:
var temp []interface{}
tempVal := reflect.ValueOf(temp)
result := reflect.MakeSlice(
reflect.SliceOf(tempVal.Type().Elem()), 0, 0)
set = result
case hcl.ValueTypeBool:
var result bool
set = reflect.Indirect(reflect.New(reflect.TypeOf(result)))
case hcl.ValueTypeFloat:
var result float64
set = reflect.Indirect(reflect.New(reflect.TypeOf(result)))
case hcl.ValueTypeInt:
var result int
set = reflect.Indirect(reflect.New(reflect.TypeOf(result)))
case hcl.ValueTypeString:
set = reflect.Indirect(reflect.New(reflect.TypeOf("")))
default:
return fmt.Errorf(
"%s: cannot decode into interface: %T",
name, o)
}
// Set the result to what its supposed to be, then reset
// result so we don't reflect into this method anymore.
result.Set(set)
if redecode {
// Revisit the node so that we can use the newly instantiated
// thing and populate it.
if err := d.decode(name, o, result); err != nil {
return err
}
}
return nil
}
func (d *decoder) decodeMap(name string, o *hcl.Object, result reflect.Value) error {
if o.Type != hcl.ValueTypeObject {
return fmt.Errorf("%s: not an object type for map (%v)", name, o.Type)
}
// If we have an interface, then we can address the interface,
// but not the slice itself, so get the element but set the interface
set := result
if result.Kind() == reflect.Interface {
result = result.Elem()
}
resultType := result.Type()
resultElemType := resultType.Elem()
resultKeyType := resultType.Key()
if resultKeyType.Kind() != reflect.String {
return fmt.Errorf(
"%s: map must have string keys", name)
}
// Make a map if it is nil
resultMap := result
if result.IsNil() {
resultMap = reflect.MakeMap(
reflect.MapOf(resultKeyType, resultElemType))
}
// Go through each element and decode it.
for _, o := range o.Elem(false) {
if o.Value == nil {
continue
}
for _, o := range o.Elem(true) {
// Make the field name
fieldName := fmt.Sprintf("%s.%s", name, o.Key)
// Get the key/value as reflection values
key := reflect.ValueOf(o.Key)
val := reflect.Indirect(reflect.New(resultElemType))
// If we have a pre-existing value in the map, use that
oldVal := resultMap.MapIndex(key)
if oldVal.IsValid() {
val.Set(oldVal)
}
// Decode!
if err := d.decode(fieldName, o, val); err != nil {
return err
}
// Set the value on the map
resultMap.SetMapIndex(key, val)
}
}
// Set the final map if we can
set.Set(resultMap)
return nil
}
func (d *decoder) decodePtr(name string, o *hcl.Object, result reflect.Value) error {
// Create an element of the concrete (non pointer) type and decode
// into that. Then set the value of the pointer to this type.
resultType := result.Type()
resultElemType := resultType.Elem()
val := reflect.New(resultElemType)
if err := d.decode(name, o, reflect.Indirect(val)); err != nil {
return err
}
result.Set(val)
return nil
}
func (d *decoder) decodeSlice(name string, o *hcl.Object, result reflect.Value) error {
// If we have an interface, then we can address the interface,
// but not the slice itself, so get the element but set the interface
set := result
if result.Kind() == reflect.Interface {
result = result.Elem()
}
// Create the slice if it isn't nil
resultType := result.Type()
resultElemType := resultType.Elem()
if result.IsNil() {
resultSliceType := reflect.SliceOf(resultElemType)
result = reflect.MakeSlice(
resultSliceType, 0, 0)
}
// Determine how we're doing this
expand := true
switch o.Type {
case hcl.ValueTypeObject:
expand = false
default:
// Array or anything else: we expand values and take it all
}
i := 0
for _, o := range o.Elem(expand) {
fieldName := fmt.Sprintf("%s[%d]", name, i)
// Decode
val := reflect.Indirect(reflect.New(resultElemType))
if err := d.decode(fieldName, o, val); err != nil {
return err
}
// Append it onto the slice
result = reflect.Append(result, val)
i += 1
}
set.Set(result)
return nil
}
func (d *decoder) decodeString(name string, o *hcl.Object, result reflect.Value) error {
switch o.Type {
case hcl.ValueTypeInt:
result.Set(reflect.ValueOf(
strconv.FormatInt(int64(o.Value.(int)), 10)).Convert(result.Type()))
case hcl.ValueTypeString:
result.Set(reflect.ValueOf(o.Value.(string)).Convert(result.Type()))
default:
return fmt.Errorf("%s: unknown type to string: %v", name, o.Type)
}
return nil
}
func (d *decoder) decodeStruct(name string, o *hcl.Object, result reflect.Value) error {
if o.Type != hcl.ValueTypeObject {
return fmt.Errorf("%s: not an object type for struct (%v)", name, o.Type)
}
// This slice will keep track of all the structs we'll be decoding.
// There can be more than one struct if there are embedded structs
// that are squashed.
structs := make([]reflect.Value, 1, 5)
structs[0] = result
// Compile the list of all the fields that we're going to be decoding
// from all the structs.
fields := make(map[*reflect.StructField]reflect.Value)
for len(structs) > 0 {
structVal := structs[0]
structs = structs[1:]
structType := structVal.Type()
for i := 0; i < structType.NumField(); i++ {
fieldType := structType.Field(i)
if fieldType.Anonymous {
fieldKind := fieldType.Type.Kind()
if fieldKind != reflect.Struct {
return fmt.Errorf(
"%s: unsupported type to struct: %s",
fieldType.Name, fieldKind)
}
// We have an embedded field. We "squash" the fields down
// if specified in the tag.
squash := false
tagParts := strings.Split(fieldType.Tag.Get(tagName), ",")
for _, tag := range tagParts[1:] {
if tag == "squash" {
squash = true
break
}
}
if squash {
structs = append(
structs, result.FieldByName(fieldType.Name))
continue
}
}
// Normal struct field, store it away
fields[&fieldType] = structVal.Field(i)
}
}
usedKeys := make(map[string]struct{})
decodedFields := make([]string, 0, len(fields))
decodedFieldsVal := make([]reflect.Value, 0)
unusedKeysVal := make([]reflect.Value, 0)
for fieldType, field := range fields {
if !field.IsValid() {
// This should never happen
panic("field is not valid")
}
// If we can't set the field, then it is unexported or something,
// and we just continue onwards.
if !field.CanSet() {
continue
}
fieldName := fieldType.Name
// This is whether or not we expand the object into its children
// later.
expand := false
tagValue := fieldType.Tag.Get(tagName)
tagParts := strings.SplitN(tagValue, ",", 2)
if len(tagParts) >= 2 {
switch tagParts[1] {
case "expand":
expand = true
case "decodedFields":
decodedFieldsVal = append(decodedFieldsVal, field)
continue
case "key":
field.SetString(o.Key)
continue
case "unusedKeys":
unusedKeysVal = append(unusedKeysVal, field)
continue
}
}
if tagParts[0] != "" {
fieldName = tagParts[0]
}
// Find the element matching this name
obj := o.Get(fieldName, true)
if obj == nil {
continue
}
// Track the used key
usedKeys[fieldName] = struct{}{}
// Create the field name and decode. We range over the elements
// because we actually want the value.
fieldName = fmt.Sprintf("%s.%s", name, fieldName)
for _, obj := range obj.Elem(expand) {
if err := d.decode(fieldName, obj, field); err != nil {
return err
}
}
decodedFields = append(decodedFields, fieldType.Name)
}
if len(decodedFieldsVal) > 0 {
// Sort it so that it is deterministic
sort.Strings(decodedFields)
for _, v := range decodedFieldsVal {
v.Set(reflect.ValueOf(decodedFields))
}
}
// If we want to know what keys are unused, compile that
if len(unusedKeysVal) > 0 {
/*
unusedKeys := make([]string, 0, int(obj.Len())-len(usedKeys))
for _, elem := range obj.Elem {
k := elem.Key()
if _, ok := usedKeys[k]; !ok {
unusedKeys = append(unusedKeys, k)
}
}
if len(unusedKeys) == 0 {
unusedKeys = nil
}
for _, v := range unusedKeysVal {
v.Set(reflect.ValueOf(unusedKeys))
}
*/
}
return nil
}

View File

@ -0,0 +1,481 @@
package hcl
import (
"io/ioutil"
"path/filepath"
"reflect"
"testing"
)
func TestDecode_interface(t *testing.T) {
cases := []struct {
File string
Err bool
Out interface{}
}{
{
"basic.hcl",
false,
map[string]interface{}{
"foo": "bar",
"bar": "${file(\"bing/bong.txt\")}",
},
},
{
"basic_squish.hcl",
false,
map[string]interface{}{
"foo": "bar",
"bar": "${file(\"bing/bong.txt\")}",
"foo-bar": "baz",
},
},
{
"empty.hcl",
false,
map[string]interface{}{
"resource": []map[string]interface{}{
map[string]interface{}{
"foo": []map[string]interface{}{
map[string]interface{}{},
},
},
},
},
},
{
"escape.hcl",
false,
map[string]interface{}{
"foo": "bar\"baz\\n",
},
},
{
"float.hcl",
false,
map[string]interface{}{
"a": 1.02,
},
},
{
"multiline_bad.hcl",
false,
map[string]interface{}{"foo": "bar\nbaz\n"},
},
{
"multiline.json",
false,
map[string]interface{}{"foo": "bar\nbaz"},
},
{
"scientific.json",
false,
map[string]interface{}{
"a": 1e-10,
"b": 1e+10,
"c": 1e10,
"d": 1.2e-10,
"e": 1.2e+10,
"f": 1.2e10,
},
},
{
"scientific.hcl",
false,
map[string]interface{}{
"a": 1e-10,
"b": 1e+10,
"c": 1e10,
"d": 1.2e-10,
"e": 1.2e+10,
"f": 1.2e10,
},
},
{
"terraform_heroku.hcl",
false,
map[string]interface{}{
"name": "terraform-test-app",
"config_vars": []map[string]interface{}{
map[string]interface{}{
"FOO": "bar",
},
},
},
},
{
"structure_multi.hcl",
false,
map[string]interface{}{
"foo": []map[string]interface{}{
map[string]interface{}{
"baz": []map[string]interface{}{
map[string]interface{}{"key": 7},
},
},
map[string]interface{}{
"bar": []map[string]interface{}{
map[string]interface{}{"key": 12},
},
},
},
},
},
{
"structure_multi.json",
false,
map[string]interface{}{
"foo": []map[string]interface{}{
map[string]interface{}{
"baz": []map[string]interface{}{
map[string]interface{}{"key": 7},
},
"bar": []map[string]interface{}{
map[string]interface{}{"key": 12},
},
},
},
},
},
{
"structure_list.hcl",
false,
map[string]interface{}{
"foo": []map[string]interface{}{
map[string]interface{}{
"key": 7,
},
map[string]interface{}{
"key": 12,
},
},
},
},
{
"structure_list.json",
false,
map[string]interface{}{
"foo": []interface{}{
map[string]interface{}{
"key": 7,
},
map[string]interface{}{
"key": 12,
},
},
},
},
{
"structure_list_deep.json",
false,
map[string]interface{}{
"bar": []map[string]interface{}{
map[string]interface{}{
"foo": []map[string]interface{}{
map[string]interface{}{
"name": "terraform_example",
"ingress": []interface{}{
map[string]interface{}{
"from_port": 22,
},
map[string]interface{}{
"from_port": 80,
},
},
},
},
},
},
},
},
{
"nested_block_comment.hcl",
false,
map[string]interface{}{
"bar": "value",
},
},
{
"unterminated_block_comment.hcl",
true,
nil,
},
}
for _, tc := range cases {
d, err := ioutil.ReadFile(filepath.Join(fixtureDir, tc.File))
if err != nil {
t.Fatalf("err: %s", err)
}
var out interface{}
err = Decode(&out, string(d))
if (err != nil) != tc.Err {
t.Fatalf("Input: %s\n\nError: %s", tc.File, err)
}
if !reflect.DeepEqual(out, tc.Out) {
t.Fatalf("Input: %s\n\nActual: %#v\n\nExpected: %#v", tc.File, out, tc.Out)
}
}
}
func TestDecode_equal(t *testing.T) {
cases := []struct {
One, Two string
}{
{
"basic.hcl",
"basic.json",
},
{
"float.hcl",
"float.json",
},
/*
{
"structure.hcl",
"structure.json",
},
*/
{
"structure.hcl",
"structure_flat.json",
},
{
"terraform_heroku.hcl",
"terraform_heroku.json",
},
}
for _, tc := range cases {
p1 := filepath.Join(fixtureDir, tc.One)
p2 := filepath.Join(fixtureDir, tc.Two)
d1, err := ioutil.ReadFile(p1)
if err != nil {
t.Fatalf("err: %s", err)
}
d2, err := ioutil.ReadFile(p2)
if err != nil {
t.Fatalf("err: %s", err)
}
var i1, i2 interface{}
err = Decode(&i1, string(d1))
if err != nil {
t.Fatalf("err: %s", err)
}
err = Decode(&i2, string(d2))
if err != nil {
t.Fatalf("err: %s", err)
}
if !reflect.DeepEqual(i1, i2) {
t.Fatalf(
"%s != %s\n\n%#v\n\n%#v",
tc.One, tc.Two,
i1, i2)
}
}
}
func TestDecode_flatMap(t *testing.T) {
var val map[string]map[string]string
err := Decode(&val, testReadFile(t, "structure_flatmap.hcl"))
if err != nil {
t.Fatalf("err: %s", err)
}
expected := map[string]map[string]string{
"foo": map[string]string{
"foo": "bar",
"key": "7",
},
}
if !reflect.DeepEqual(val, expected) {
t.Fatalf("Actual: %#v\n\nExpected: %#v", val, expected)
}
}
func TestDecode_structure(t *testing.T) {
type V struct {
Key int
Foo string
}
var actual V
err := Decode(&actual, testReadFile(t, "flat.hcl"))
if err != nil {
t.Fatalf("err: %s", err)
}
expected := V{
Key: 7,
Foo: "bar",
}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("Actual: %#v\n\nExpected: %#v", actual, expected)
}
}
func TestDecode_structurePtr(t *testing.T) {
type V struct {
Key int
Foo string
}
var actual *V
err := Decode(&actual, testReadFile(t, "flat.hcl"))
if err != nil {
t.Fatalf("err: %s", err)
}
expected := &V{
Key: 7,
Foo: "bar",
}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("Actual: %#v\n\nExpected: %#v", actual, expected)
}
}
func TestDecode_structureArray(t *testing.T) {
// This test is extracted from a failure in Consul (consul.io),
// hence the interesting structure naming.
type KeyPolicyType string
type KeyPolicy struct {
Prefix string `hcl:",key"`
Policy KeyPolicyType
}
type Policy struct {
Keys []KeyPolicy `hcl:"key,expand"`
}
expected := Policy{
Keys: []KeyPolicy{
KeyPolicy{
Prefix: "",
Policy: "read",
},
KeyPolicy{
Prefix: "foo/",
Policy: "write",
},
KeyPolicy{
Prefix: "foo/bar/",
Policy: "read",
},
KeyPolicy{
Prefix: "foo/bar/baz",
Policy: "deny",
},
},
}
files := []string{
"decode_policy.hcl",
"decode_policy.json",
}
for _, f := range files {
var actual Policy
err := Decode(&actual, testReadFile(t, f))
if err != nil {
t.Fatalf("err: %s", err)
}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("Input: %s\n\nActual: %#v\n\nExpected: %#v", f, actual, expected)
}
}
}
func TestDecode_structureMap(t *testing.T) {
// This test is extracted from a failure in Terraform (terraform.io),
// hence the interesting structure naming.
type hclVariable struct {
Default interface{}
Description string
Fields []string `hcl:",decodedFields"`
}
type rawConfig struct {
Variable map[string]hclVariable
}
expected := rawConfig{
Variable: map[string]hclVariable{
"foo": hclVariable{
Default: "bar",
Description: "bar",
Fields: []string{"Default", "Description"},
},
"amis": hclVariable{
Default: []map[string]interface{}{
map[string]interface{}{
"east": "foo",
},
},
Fields: []string{"Default"},
},
},
}
files := []string{
"decode_tf_variable.hcl",
"decode_tf_variable.json",
}
for _, f := range files {
var actual rawConfig
err := Decode(&actual, testReadFile(t, f))
if err != nil {
t.Fatalf("Input: %s\n\nerr: %s", f, err)
}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("Input: %s\n\nActual: %#v\n\nExpected: %#v", f, actual, expected)
}
}
}
func TestDecode_interfaceNonPointer(t *testing.T) {
var value interface{}
err := Decode(value, testReadFile(t, "basic_int_string.hcl"))
if err == nil {
t.Fatal("should error")
}
}
func TestDecode_intString(t *testing.T) {
var value struct {
Count int
}
err := Decode(&value, testReadFile(t, "basic_int_string.hcl"))
if err != nil {
t.Fatalf("err: %s", err)
}
if value.Count != 3 {
t.Fatalf("bad: %#v", value.Count)
}
}

11
Godeps/_workspace/src/github.com/hashicorp/hcl/hcl.go generated vendored Normal file
View File

@ -0,0 +1,11 @@
// hcl is a package for decoding HCL into usable Go structures.
//
// hcl input can come in either pure HCL format or JSON format.
// It can be parsed into an AST, and then decoded into a structure,
// or it can be decoded directly from a string into a structure.
//
// If you choose to parse HCL into a raw AST, the benefit is that you
// can write custom visitor implementations to implement custom
// semantic checks. By default, HCL does not perform any semantic
// checks.
package hcl

View File

@ -0,0 +1,4 @@
package hcl
// This is the directory where our test fixtures are.
const fixtureDir = "./test-fixtures"

View File

@ -0,0 +1,466 @@
package hcl
import (
"bytes"
"fmt"
"strconv"
"unicode"
"unicode/utf8"
)
//go:generate go tool yacc -p "hcl" parse.y
// The parser expects the lexer to return 0 on EOF.
const lexEOF = 0
// The parser uses the type <prefix>Lex as a lexer. It must provide
// the methods Lex(*<prefix>SymType) int and Error(string).
type hclLex struct {
Input string
lastNumber bool
pos int
width int
col, line int
lastCol, lastLine int
err error
}
// The parser calls this method to get each new token.
func (x *hclLex) Lex(yylval *hclSymType) int {
for {
c := x.next()
if c == lexEOF {
return lexEOF
}
// Ignore all whitespace except a newline which we handle
// specially later.
if unicode.IsSpace(c) {
x.lastNumber = false
continue
}
// Consume all comments
switch c {
case '#':
fallthrough
case '/':
// Starting comment
if !x.consumeComment(c) {
return lexEOF
}
continue
}
// If it is a number, lex the number
if c >= '0' && c <= '9' {
x.lastNumber = true
x.backup()
return x.lexNumber(yylval)
}
// This is a hacky way to find 'e' and lex it, but it works.
if x.lastNumber {
switch c {
case 'e':
fallthrough
case 'E':
switch x.next() {
case '+':
return EPLUS
case '-':
return EMINUS
default:
x.backup()
return EPLUS
}
}
}
x.lastNumber = false
switch c {
case '.':
return PERIOD
case '-':
return MINUS
case ',':
return x.lexComma()
case '=':
return EQUAL
case '[':
return LEFTBRACKET
case ']':
return RIGHTBRACKET
case '{':
return LEFTBRACE
case '}':
return RIGHTBRACE
case '"':
return x.lexString(yylval)
case '<':
return x.lexHeredoc(yylval)
default:
x.backup()
return x.lexId(yylval)
}
}
}
func (x *hclLex) consumeComment(c rune) bool {
single := c == '#'
if !single {
c = x.next()
if c != '/' && c != '*' {
x.backup()
x.createErr(fmt.Sprintf("comment expected, got '%c'", c))
return false
}
single = c == '/'
}
nested := 1
for {
c = x.next()
if c == lexEOF {
x.backup()
if single {
// Single line comments can end with an EOF
return true
}
// Multi-line comments must end with a */
x.createErr(fmt.Sprintf("end of multi-line comment expected, got EOF"))
return false
}
// Single line comments continue until a '\n'
if single {
if c == '\n' {
return true
}
continue
}
// Multi-line comments continue until a '*/'
switch c {
case '/':
c = x.next()
if c == '*' {
nested++
} else {
x.backup()
}
case '*':
c = x.next()
if c == '/' {
return true
} else {
x.backup()
}
default:
// Continue
}
}
}
// lexComma reads the comma
func (x *hclLex) lexComma() int {
for {
c := x.peek()
// Consume space
if unicode.IsSpace(c) {
x.next()
continue
}
if c == ']' {
return COMMAEND
}
break
}
return COMMA
}
// lexId lexes an identifier
func (x *hclLex) lexId(yylval *hclSymType) int {
var b bytes.Buffer
first := true
for {
c := x.next()
if c == lexEOF {
break
}
if !unicode.IsDigit(c) && !unicode.IsLetter(c) &&
c != '_' && c != '-' && c != '.' {
x.backup()
if first {
x.createErr("Invalid identifier")
return lexEOF
}
break
}
first = false
if _, err := b.WriteRune(c); err != nil {
return lexEOF
}
}
yylval.str = b.String()
switch yylval.str {
case "true":
yylval.b = true
return BOOL
case "false":
yylval.b = false
return BOOL
}
return IDENTIFIER
}
// lexHeredoc extracts a string from the input in heredoc format
func (x *hclLex) lexHeredoc(yylval *hclSymType) int {
if x.next() != '<' {
x.createErr("Heredoc must start with <<")
return lexEOF
}
// Now determine the marker
var buf bytes.Buffer
for {
c := x.next()
if c == lexEOF {
return lexEOF
}
// Newline signals the end of the marker
if c == '\n' {
break
}
if _, err := buf.WriteRune(c); err != nil {
return lexEOF
}
}
marker := buf.String()
if marker == "" {
x.createErr("Heredoc must have a marker, e.g. <<FOO")
return lexEOF
}
check := true
buf.Reset()
for {
c := x.next()
// If we're checking, then check to see if we see the marker
if check {
check = false
var cs []rune
for _, r := range marker {
if r != c {
break
}
cs = append(cs, c)
c = x.next()
}
if len(cs) == len(marker) {
break
}
if len(cs) > 0 {
for _, c := range cs {
if _, err := buf.WriteRune(c); err != nil {
return lexEOF
}
}
}
}
if c == lexEOF {
return lexEOF
}
// If we hit a newline, then reset to check
if c == '\n' {
check = true
}
if _, err := buf.WriteRune(c); err != nil {
return lexEOF
}
}
yylval.str = buf.String()
return STRING
}
// lexNumber lexes out a number
func (x *hclLex) lexNumber(yylval *hclSymType) int {
var b bytes.Buffer
gotPeriod := false
for {
c := x.next()
if c == lexEOF {
break
}
if c == '.' {
if gotPeriod {
x.backup()
break
}
gotPeriod = true
} else if c < '0' || c > '9' {
x.backup()
break
}
if _, err := b.WriteRune(c); err != nil {
x.createErr(fmt.Sprintf("Internal error: %s", err))
return lexEOF
}
}
if !gotPeriod {
v, err := strconv.ParseInt(b.String(), 0, 0)
if err != nil {
x.createErr(fmt.Sprintf("Expected number: %s", err))
return lexEOF
}
yylval.num = int(v)
return NUMBER
}
f, err := strconv.ParseFloat(b.String(), 64)
if err != nil {
x.createErr(fmt.Sprintf("Expected float: %s", err))
return lexEOF
}
yylval.f = float64(f)
return FLOAT
}
// lexString extracts a string from the input
func (x *hclLex) lexString(yylval *hclSymType) int {
braces := 0
var b bytes.Buffer
for {
c := x.next()
if c == lexEOF {
break
}
// String end
if c == '"' && braces == 0 {
break
}
// If we hit a newline, then its an error
if c == '\n' {
x.createErr(fmt.Sprintf("Newline before string closed"))
return lexEOF
}
// If we're escaping a quote, then escape the quote
if c == '\\' {
n := x.next()
switch n {
case '"':
c = n
case 'n':
c = '\n'
case '\\':
c = n
default:
x.backup()
}
}
// If we're starting into variable, mark it
if braces == 0 && c == '$' && x.peek() == '{' {
braces += 1
if _, err := b.WriteRune(c); err != nil {
return lexEOF
}
c = x.next()
} else if braces > 0 && c == '{' {
braces += 1
}
if braces > 0 && c == '}' {
braces -= 1
}
if _, err := b.WriteRune(c); err != nil {
return lexEOF
}
}
yylval.str = b.String()
return STRING
}
// Return the next rune for the lexer.
func (x *hclLex) next() rune {
if int(x.pos) >= len(x.Input) {
x.width = 0
return lexEOF
}
r, w := utf8.DecodeRuneInString(x.Input[x.pos:])
x.width = w
x.pos += x.width
x.col += 1
if x.line == 0 {
x.line = 1
}
if r == '\n' {
x.line += 1
x.col = 0
}
return r
}
// peek returns but does not consume the next rune in the input
func (x *hclLex) peek() rune {
r := x.next()
x.backup()
return r
}
// backup steps back one rune. Can only be called once per next.
func (x *hclLex) backup() {
x.col -= 1
x.pos -= x.width
}
// createErr records the given error
func (x *hclLex) createErr(msg string) {
x.err = fmt.Errorf("Line %d, column %d: %s", x.line, x.col, msg)
}
// The parser calls this method on a parse error.
func (x *hclLex) Error(s string) {
x.createErr(s)
}

View File

@ -0,0 +1,95 @@
package hcl
import (
"io/ioutil"
"path/filepath"
"reflect"
"testing"
)
func TestLex(t *testing.T) {
cases := []struct {
Input string
Output []int
}{
{
"comment.hcl",
[]int{IDENTIFIER, EQUAL, STRING, lexEOF},
},
{
"comment_single.hcl",
[]int{lexEOF},
},
{
"complex_key.hcl",
[]int{IDENTIFIER, EQUAL, STRING, lexEOF},
},
{
"multiple.hcl",
[]int{
IDENTIFIER, EQUAL, STRING,
IDENTIFIER, EQUAL, NUMBER,
lexEOF,
},
},
{
"list.hcl",
[]int{
IDENTIFIER, EQUAL, LEFTBRACKET,
NUMBER, COMMA, NUMBER, COMMA, STRING,
RIGHTBRACKET, lexEOF,
},
},
{
"old.hcl",
[]int{IDENTIFIER, EQUAL, LEFTBRACE, STRING, lexEOF},
},
{
"structure_basic.hcl",
[]int{
IDENTIFIER, LEFTBRACE,
IDENTIFIER, EQUAL, NUMBER,
STRING, EQUAL, NUMBER,
STRING, EQUAL, NUMBER,
RIGHTBRACE, lexEOF,
},
},
{
"structure.hcl",
[]int{
IDENTIFIER, IDENTIFIER, STRING, LEFTBRACE,
IDENTIFIER, EQUAL, NUMBER,
IDENTIFIER, EQUAL, STRING,
RIGHTBRACE, lexEOF,
},
},
}
for _, tc := range cases {
d, err := ioutil.ReadFile(filepath.Join(fixtureDir, tc.Input))
if err != nil {
t.Fatalf("err: %s", err)
}
l := &hclLex{Input: string(d)}
var actual []int
for {
token := l.Lex(new(hclSymType))
actual = append(actual, token)
if token == lexEOF {
break
}
if len(actual) > 500 {
t.Fatalf("Input:%s\n\nExausted.", tc.Input)
}
}
if !reflect.DeepEqual(actual, tc.Output) {
t.Fatalf(
"Input: %s\n\nBad: %#v\n\nExpected: %#v",
tc.Input, actual, tc.Output)
}
}
}

View File

@ -0,0 +1,128 @@
package hcl
import (
"fmt"
"strings"
)
//go:generate stringer -type=ValueType
// ValueType is an enum represnting the type of a value in
// a LiteralNode.
type ValueType byte
const (
ValueTypeUnknown ValueType = iota
ValueTypeFloat
ValueTypeInt
ValueTypeString
ValueTypeBool
ValueTypeNil
ValueTypeList
ValueTypeObject
)
// Object represents any element of HCL: an object itself, a list,
// a literal, etc.
type Object struct {
Key string
Type ValueType
Value interface{}
Next *Object
}
// GoString is an implementation of the GoStringer interface.
func (o *Object) GoString() string {
return fmt.Sprintf("*%#v", *o)
}
// Get gets all the objects that match the given key.
//
// It returns the resulting objects as a single Object structure with
// the linked list populated.
func (o *Object) Get(k string, insensitive bool) *Object {
if o.Type != ValueTypeObject {
return nil
}
for _, o := range o.Elem(true) {
if o.Key != k {
if !insensitive || !strings.EqualFold(o.Key, k) {
continue
}
}
return o
}
return nil
}
// Elem returns all the elements that are part of this object.
func (o *Object) Elem(expand bool) []*Object {
if !expand {
result := make([]*Object, 0, 1)
current := o
for current != nil {
obj := *current
obj.Next = nil
result = append(result, &obj)
current = current.Next
}
return result
}
if o.Value == nil {
return nil
}
switch o.Type {
case ValueTypeList:
return o.Value.([]*Object)
case ValueTypeObject:
result := make([]*Object, 0, 5)
for _, obj := range o.Elem(false) {
result = append(result, obj.Value.([]*Object)...)
}
return result
default:
return []*Object{o}
}
}
// Len returns the number of objects in this object structure.
func (o *Object) Len() (i int) {
current := o
for current != nil {
i += 1
current = current.Next
}
return
}
// ObjectList is a list of objects.
type ObjectList []*Object
// Flat returns a flattened list structure of the objects.
func (l ObjectList) Flat() []*Object {
m := make(map[string]*Object)
result := make([]*Object, 0, len(l))
for _, obj := range l {
prev, ok := m[obj.Key]
if !ok {
m[obj.Key] = obj
result = append(result, obj)
continue
}
for prev.Next != nil {
prev = prev.Next
}
prev.Next = obj
}
return result
}

View File

@ -0,0 +1,39 @@
package hcl
import (
"sync"
"github.com/hashicorp/go-multierror"
)
// hclErrors are the errors built up from parsing. These should not
// be accessed directly.
var hclErrors []error
var hclLock sync.Mutex
var hclResult *Object
// Parse parses the given string and returns the result.
func Parse(v string) (*Object, error) {
hclLock.Lock()
defer hclLock.Unlock()
hclErrors = nil
hclResult = nil
// Parse
lex := &hclLex{Input: v}
hclParse(lex)
// If we have an error in the lexer itself, return it
if lex.err != nil {
return nil, lex.err
}
// Build up the errors
var err error
if len(hclErrors) > 0 {
err = &multierror.Error{Errors: hclErrors}
hclResult = nil
}
return hclResult, err
}

View File

@ -0,0 +1,259 @@
// This is the yacc input for creating the parser for HCL.
%{
package hcl
import (
"fmt"
"strconv"
)
%}
%union {
b bool
f float64
num int
str string
obj *Object
objlist []*Object
}
%type <f> float
%type <num> int
%type <objlist> list listitems objectlist
%type <obj> block number object objectitem
%type <obj> listitem
%type <str> blockId exp objectkey
%token <b> BOOL
%token <f> FLOAT
%token <num> NUMBER
%token <str> COMMA COMMAEND IDENTIFIER EQUAL NEWLINE STRING MINUS
%token <str> LEFTBRACE RIGHTBRACE LEFTBRACKET RIGHTBRACKET PERIOD
%token <str> EPLUS EMINUS
%%
top:
{
hclResult = &Object{Type: ValueTypeObject}
}
| objectlist
{
hclResult = &Object{
Type: ValueTypeObject,
Value: ObjectList($1).Flat(),
}
}
objectlist:
objectitem
{
$$ = []*Object{$1}
}
| objectlist objectitem
{
$$ = append($1, $2)
}
object:
LEFTBRACE objectlist RIGHTBRACE
{
$$ = &Object{
Type: ValueTypeObject,
Value: ObjectList($2).Flat(),
}
}
| LEFTBRACE RIGHTBRACE
{
$$ = &Object{
Type: ValueTypeObject,
}
}
objectkey:
IDENTIFIER
{
$$ = $1
}
| STRING
{
$$ = $1
}
objectitem:
objectkey EQUAL number
{
$$ = $3
$$.Key = $1
}
| objectkey EQUAL BOOL
{
$$ = &Object{
Key: $1,
Type: ValueTypeBool,
Value: $3,
}
}
| objectkey EQUAL STRING
{
$$ = &Object{
Key: $1,
Type: ValueTypeString,
Value: $3,
}
}
| objectkey EQUAL object
{
$3.Key = $1
$$ = $3
}
| objectkey EQUAL list
{
$$ = &Object{
Key: $1,
Type: ValueTypeList,
Value: $3,
}
}
| block
{
$$ = $1
}
block:
blockId object
{
$2.Key = $1
$$ = $2
}
| blockId block
{
$$ = &Object{
Key: $1,
Type: ValueTypeObject,
Value: []*Object{$2},
}
}
blockId:
IDENTIFIER
{
$$ = $1
}
| STRING
{
$$ = $1
}
list:
LEFTBRACKET listitems RIGHTBRACKET
{
$$ = $2
}
| LEFTBRACKET RIGHTBRACKET
{
$$ = nil
}
listitems:
listitem
{
$$ = []*Object{$1}
}
| listitems COMMA listitem
{
$$ = append($1, $3)
}
| listitems COMMAEND
{
$$ = $1
}
listitem:
number
{
$$ = $1
}
| STRING
{
$$ = &Object{
Type: ValueTypeString,
Value: $1,
}
}
number:
int
{
$$ = &Object{
Type: ValueTypeInt,
Value: $1,
}
}
| float
{
$$ = &Object{
Type: ValueTypeFloat,
Value: $1,
}
}
| int exp
{
fs := fmt.Sprintf("%d%s", $1, $2)
f, err := strconv.ParseFloat(fs, 64)
if err != nil {
panic(err)
}
$$ = &Object{
Type: ValueTypeFloat,
Value: f,
}
}
| float exp
{
fs := fmt.Sprintf("%f%s", $1, $2)
f, err := strconv.ParseFloat(fs, 64)
if err != nil {
panic(err)
}
$$ = &Object{
Type: ValueTypeFloat,
Value: f,
}
}
int:
MINUS int
{
$$ = $2 * -1
}
| NUMBER
{
$$ = $1
}
float:
MINUS float
{
$$ = $2 * -1
}
| FLOAT
{
$$ = $1
}
exp:
EPLUS NUMBER
{
$$ = "e" + strconv.FormatInt(int64($2), 10)
}
| EMINUS NUMBER
{
$$ = "e-" + strconv.FormatInt(int64($2), 10)
}
%%

View File

@ -0,0 +1,75 @@
package hcl
import (
"io/ioutil"
"path/filepath"
"testing"
)
func TestParse(t *testing.T) {
cases := []struct {
Name string
Err bool
}{
{
"assign_colon.hcl",
true,
},
{
"comment.hcl",
false,
},
{
"comment_single.hcl",
false,
},
{
"empty.hcl",
false,
},
{
"list_comma.hcl",
false,
},
{
"multiple.hcl",
false,
},
{
"structure.hcl",
false,
},
{
"structure_basic.hcl",
false,
},
{
"structure_empty.hcl",
false,
},
{
"complex.hcl",
false,
},
{
"assign_deep.hcl",
true,
},
{
"types.hcl",
false,
},
}
for _, tc := range cases {
d, err := ioutil.ReadFile(filepath.Join(fixtureDir, tc.Name))
if err != nil {
t.Fatalf("err: %s", err)
}
_, err = Parse(string(d))
if (err != nil) != tc.Err {
t.Fatalf("Input: %s\n\nError: %s", tc.Name, err)
}
}
}

View File

@ -0,0 +1,6 @@
resource = [{
"foo": {
"bar": {},
"baz": [1, 2, "foo"],
}
}]

View File

@ -0,0 +1,5 @@
resource = [{
foo = [{
bar = {}
}]
}]

View File

@ -0,0 +1,15 @@
// Foo
/* Bar */
/*
/*
Baz
*/
# Another
# Multiple
# Lines
foo = "bar"

View File

@ -0,0 +1 @@
# Hello

View File

@ -0,0 +1,42 @@
// This comes from Terraform, as a test
variable "foo" {
default = "bar"
description = "bar"
}
provider "aws" {
access_key = "foo"
secret_key = "bar"
}
provider "do" {
api_key = "${var.foo}"
}
resource "aws_security_group" "firewall" {
count = 5
}
resource aws_instance "web" {
ami = "${var.foo}"
security_groups = [
"foo",
"${aws_security_group.firewall.foo}"
]
network_interface {
device_index = 0
description = "Main network interface"
}
}
resource "aws_instance" "db" {
security_groups = "${aws_security_group.firewall.*.id}"
VPC = "foo"
depends_on = ["aws_instance.web"]
}
output "web_ip" {
value = "${aws_instance.web.private_ip}"
}

View File

@ -0,0 +1 @@
foo.bar = "baz"

View File

@ -0,0 +1 @@
foo = [1, 2, "foo"]

View File

@ -0,0 +1 @@
foo = [1, 2, "foo",]

View File

@ -0,0 +1,2 @@
foo = "bar"
key = 7

View File

@ -0,0 +1,3 @@
default = {
"eu-west-1": "ami-b1cf19c6",
}

View File

@ -0,0 +1,5 @@
// This is a test structure for the lexer
foo bar "baz" {
key = 7
foo = "bar"
}

View File

@ -0,0 +1,5 @@
foo {
value = 7
"value" = 8
"complex::value" = 9
}

View File

@ -0,0 +1 @@
resource "foo" "bar" {}

View File

@ -0,0 +1,7 @@
foo = "bar"
bar = 7
baz = [1,2,3]
foo = -12
bar = 3.14159
foo = true
bar = false

View File

@ -0,0 +1,16 @@
// generated by stringer -type=ValueType; DO NOT EDIT
package hcl
import "fmt"
const _ValueType_name = "ValueTypeUnknownValueTypeFloatValueTypeIntValueTypeStringValueTypeBoolValueTypeNilValueTypeListValueTypeObject"
var _ValueType_index = [...]uint8{0, 16, 30, 42, 57, 70, 82, 95, 110}
func (i ValueType) String() string {
if i >= ValueType(len(_ValueType_index)-1) {
return fmt.Sprintf("ValueType(%d)", i)
}
return _ValueType_name[_ValueType_index[i]:_ValueType_index[i+1]]
}

611
Godeps/_workspace/src/github.com/hashicorp/hcl/hcl/y.go generated vendored Normal file
View File

@ -0,0 +1,611 @@
//line parse.y:4
package hcl
import __yyfmt__ "fmt"
//line parse.y:4
import (
"fmt"
"strconv"
)
//line parse.y:13
type hclSymType struct {
yys int
b bool
f float64
num int
str string
obj *Object
objlist []*Object
}
const BOOL = 57346
const FLOAT = 57347
const NUMBER = 57348
const COMMA = 57349
const COMMAEND = 57350
const IDENTIFIER = 57351
const EQUAL = 57352
const NEWLINE = 57353
const STRING = 57354
const MINUS = 57355
const LEFTBRACE = 57356
const RIGHTBRACE = 57357
const LEFTBRACKET = 57358
const RIGHTBRACKET = 57359
const PERIOD = 57360
const EPLUS = 57361
const EMINUS = 57362
var hclToknames = []string{
"BOOL",
"FLOAT",
"NUMBER",
"COMMA",
"COMMAEND",
"IDENTIFIER",
"EQUAL",
"NEWLINE",
"STRING",
"MINUS",
"LEFTBRACE",
"RIGHTBRACE",
"LEFTBRACKET",
"RIGHTBRACKET",
"PERIOD",
"EPLUS",
"EMINUS",
}
var hclStatenames = []string{}
const hclEofCode = 1
const hclErrCode = 2
const hclMaxDepth = 200
//line parse.y:259
//line yacctab:1
var hclExca = []int{
-1, 1,
1, -1,
-2, 0,
-1, 6,
10, 7,
-2, 17,
-1, 7,
10, 8,
-2, 18,
}
const hclNprod = 36
const hclPrivate = 57344
var hclTokenNames []string
var hclStates []string
const hclLast = 62
var hclAct = []int{
35, 3, 21, 22, 9, 30, 31, 29, 17, 26,
25, 26, 25, 10, 26, 25, 18, 24, 13, 24,
23, 37, 24, 44, 45, 42, 34, 38, 39, 9,
32, 6, 6, 43, 7, 7, 2, 40, 28, 26,
25, 6, 41, 11, 7, 46, 37, 24, 14, 36,
27, 15, 5, 13, 19, 1, 4, 8, 33, 20,
16, 12,
}
var hclPact = []int{
32, -1000, 32, -1000, 3, -1000, -1000, -1000, 39, -1000,
4, -1000, -1000, 23, -1000, -1000, -1000, -1000, -1000, -1000,
-1000, -14, -14, 9, 6, -1000, -1000, 22, -1000, -1000,
36, 19, -1000, 16, -1000, -1000, -1000, -1000, -1000, -1000,
-1000, -1000, -1000, -1000, 34, -1000, -1000,
}
var hclPgo = []int{
0, 3, 2, 59, 58, 36, 52, 49, 43, 1,
0, 57, 7, 56, 55,
}
var hclR1 = []int{
0, 14, 14, 5, 5, 8, 8, 13, 13, 9,
9, 9, 9, 9, 9, 6, 6, 11, 11, 3,
3, 4, 4, 4, 10, 10, 7, 7, 7, 7,
2, 2, 1, 1, 12, 12,
}
var hclR2 = []int{
0, 0, 1, 1, 2, 3, 2, 1, 1, 3,
3, 3, 3, 3, 1, 2, 2, 1, 1, 3,
2, 1, 3, 2, 1, 1, 1, 1, 2, 2,
2, 1, 2, 1, 2, 2,
}
var hclChk = []int{
-1000, -14, -5, -9, -13, -6, 9, 12, -11, -9,
10, -8, -6, 14, 9, 12, -7, 4, 12, -8,
-3, -2, -1, 16, 13, 6, 5, -5, 15, -12,
19, 20, -12, -4, 17, -10, -7, 12, -2, -1,
15, 6, 6, 17, 7, 8, -10,
}
var hclDef = []int{
1, -2, 2, 3, 0, 14, -2, -2, 0, 4,
0, 15, 16, 0, 17, 18, 9, 10, 11, 12,
13, 26, 27, 0, 0, 31, 33, 0, 6, 28,
0, 0, 29, 0, 20, 21, 24, 25, 30, 32,
5, 34, 35, 19, 0, 23, 22,
}
var hclTok1 = []int{
1,
}
var hclTok2 = []int{
2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
12, 13, 14, 15, 16, 17, 18, 19, 20,
}
var hclTok3 = []int{
0,
}
//line yaccpar:1
/* parser for yacc output */
var hclDebug = 0
type hclLexer interface {
Lex(lval *hclSymType) int
Error(s string)
}
const hclFlag = -1000
func hclTokname(c int) string {
// 4 is TOKSTART above
if c >= 4 && c-4 < len(hclToknames) {
if hclToknames[c-4] != "" {
return hclToknames[c-4]
}
}
return __yyfmt__.Sprintf("tok-%v", c)
}
func hclStatname(s int) string {
if s >= 0 && s < len(hclStatenames) {
if hclStatenames[s] != "" {
return hclStatenames[s]
}
}
return __yyfmt__.Sprintf("state-%v", s)
}
func hcllex1(lex hclLexer, lval *hclSymType) int {
c := 0
char := lex.Lex(lval)
if char <= 0 {
c = hclTok1[0]
goto out
}
if char < len(hclTok1) {
c = hclTok1[char]
goto out
}
if char >= hclPrivate {
if char < hclPrivate+len(hclTok2) {
c = hclTok2[char-hclPrivate]
goto out
}
}
for i := 0; i < len(hclTok3); i += 2 {
c = hclTok3[i+0]
if c == char {
c = hclTok3[i+1]
goto out
}
}
out:
if c == 0 {
c = hclTok2[1] /* unknown char */
}
if hclDebug >= 3 {
__yyfmt__.Printf("lex %s(%d)\n", hclTokname(c), uint(char))
}
return c
}
func hclParse(hcllex hclLexer) int {
var hcln int
var hcllval hclSymType
var hclVAL hclSymType
hclS := make([]hclSymType, hclMaxDepth)
Nerrs := 0 /* number of errors */
Errflag := 0 /* error recovery flag */
hclstate := 0
hclchar := -1
hclp := -1
goto hclstack
ret0:
return 0
ret1:
return 1
hclstack:
/* put a state and value onto the stack */
if hclDebug >= 4 {
__yyfmt__.Printf("char %v in %v\n", hclTokname(hclchar), hclStatname(hclstate))
}
hclp++
if hclp >= len(hclS) {
nyys := make([]hclSymType, len(hclS)*2)
copy(nyys, hclS)
hclS = nyys
}
hclS[hclp] = hclVAL
hclS[hclp].yys = hclstate
hclnewstate:
hcln = hclPact[hclstate]
if hcln <= hclFlag {
goto hcldefault /* simple state */
}
if hclchar < 0 {
hclchar = hcllex1(hcllex, &hcllval)
}
hcln += hclchar
if hcln < 0 || hcln >= hclLast {
goto hcldefault
}
hcln = hclAct[hcln]
if hclChk[hcln] == hclchar { /* valid shift */
hclchar = -1
hclVAL = hcllval
hclstate = hcln
if Errflag > 0 {
Errflag--
}
goto hclstack
}
hcldefault:
/* default state action */
hcln = hclDef[hclstate]
if hcln == -2 {
if hclchar < 0 {
hclchar = hcllex1(hcllex, &hcllval)
}
/* look through exception table */
xi := 0
for {
if hclExca[xi+0] == -1 && hclExca[xi+1] == hclstate {
break
}
xi += 2
}
for xi += 2; ; xi += 2 {
hcln = hclExca[xi+0]
if hcln < 0 || hcln == hclchar {
break
}
}
hcln = hclExca[xi+1]
if hcln < 0 {
goto ret0
}
}
if hcln == 0 {
/* error ... attempt to resume parsing */
switch Errflag {
case 0: /* brand new error */
hcllex.Error("syntax error")
Nerrs++
if hclDebug >= 1 {
__yyfmt__.Printf("%s", hclStatname(hclstate))
__yyfmt__.Printf(" saw %s\n", hclTokname(hclchar))
}
fallthrough
case 1, 2: /* incompletely recovered error ... try again */
Errflag = 3
/* find a state where "error" is a legal shift action */
for hclp >= 0 {
hcln = hclPact[hclS[hclp].yys] + hclErrCode
if hcln >= 0 && hcln < hclLast {
hclstate = hclAct[hcln] /* simulate a shift of "error" */
if hclChk[hclstate] == hclErrCode {
goto hclstack
}
}
/* the current p has no shift on "error", pop stack */
if hclDebug >= 2 {
__yyfmt__.Printf("error recovery pops state %d\n", hclS[hclp].yys)
}
hclp--
}
/* there is no state on the stack with an error shift ... abort */
goto ret1
case 3: /* no shift yet; clobber input char */
if hclDebug >= 2 {
__yyfmt__.Printf("error recovery discards %s\n", hclTokname(hclchar))
}
if hclchar == hclEofCode {
goto ret1
}
hclchar = -1
goto hclnewstate /* try again in the same state */
}
}
/* reduction by production hcln */
if hclDebug >= 2 {
__yyfmt__.Printf("reduce %v in:\n\t%v\n", hcln, hclStatname(hclstate))
}
hclnt := hcln
hclpt := hclp
_ = hclpt // guard against "declared and not used"
hclp -= hclR2[hcln]
hclVAL = hclS[hclp+1]
/* consult goto table to find next state */
hcln = hclR1[hcln]
hclg := hclPgo[hcln]
hclj := hclg + hclS[hclp].yys + 1
if hclj >= hclLast {
hclstate = hclAct[hclg]
} else {
hclstate = hclAct[hclj]
if hclChk[hclstate] != -hcln {
hclstate = hclAct[hclg]
}
}
// dummy call; replaced with literal code
switch hclnt {
case 1:
//line parse.y:39
{
hclResult = &Object{Type: ValueTypeObject}
}
case 2:
//line parse.y:43
{
hclResult = &Object{
Type: ValueTypeObject,
Value: ObjectList(hclS[hclpt-0].objlist).Flat(),
}
}
case 3:
//line parse.y:52
{
hclVAL.objlist = []*Object{hclS[hclpt-0].obj}
}
case 4:
//line parse.y:56
{
hclVAL.objlist = append(hclS[hclpt-1].objlist, hclS[hclpt-0].obj)
}
case 5:
//line parse.y:62
{
hclVAL.obj = &Object{
Type: ValueTypeObject,
Value: ObjectList(hclS[hclpt-1].objlist).Flat(),
}
}
case 6:
//line parse.y:69
{
hclVAL.obj = &Object{
Type: ValueTypeObject,
}
}
case 7:
//line parse.y:77
{
hclVAL.str = hclS[hclpt-0].str
}
case 8:
//line parse.y:81
{
hclVAL.str = hclS[hclpt-0].str
}
case 9:
//line parse.y:87
{
hclVAL.obj = hclS[hclpt-0].obj
hclVAL.obj.Key = hclS[hclpt-2].str
}
case 10:
//line parse.y:92
{
hclVAL.obj = &Object{
Key: hclS[hclpt-2].str,
Type: ValueTypeBool,
Value: hclS[hclpt-0].b,
}
}
case 11:
//line parse.y:100
{
hclVAL.obj = &Object{
Key: hclS[hclpt-2].str,
Type: ValueTypeString,
Value: hclS[hclpt-0].str,
}
}
case 12:
//line parse.y:108
{
hclS[hclpt-0].obj.Key = hclS[hclpt-2].str
hclVAL.obj = hclS[hclpt-0].obj
}
case 13:
//line parse.y:113
{
hclVAL.obj = &Object{
Key: hclS[hclpt-2].str,
Type: ValueTypeList,
Value: hclS[hclpt-0].objlist,
}
}
case 14:
//line parse.y:121
{
hclVAL.obj = hclS[hclpt-0].obj
}
case 15:
//line parse.y:127
{
hclS[hclpt-0].obj.Key = hclS[hclpt-1].str
hclVAL.obj = hclS[hclpt-0].obj
}
case 16:
//line parse.y:132
{
hclVAL.obj = &Object{
Key: hclS[hclpt-1].str,
Type: ValueTypeObject,
Value: []*Object{hclS[hclpt-0].obj},
}
}
case 17:
//line parse.y:142
{
hclVAL.str = hclS[hclpt-0].str
}
case 18:
//line parse.y:146
{
hclVAL.str = hclS[hclpt-0].str
}
case 19:
//line parse.y:152
{
hclVAL.objlist = hclS[hclpt-1].objlist
}
case 20:
//line parse.y:156
{
hclVAL.objlist = nil
}
case 21:
//line parse.y:162
{
hclVAL.objlist = []*Object{hclS[hclpt-0].obj}
}
case 22:
//line parse.y:166
{
hclVAL.objlist = append(hclS[hclpt-2].objlist, hclS[hclpt-0].obj)
}
case 23:
//line parse.y:170
{
hclVAL.objlist = hclS[hclpt-1].objlist
}
case 24:
//line parse.y:176
{
hclVAL.obj = hclS[hclpt-0].obj
}
case 25:
//line parse.y:180
{
hclVAL.obj = &Object{
Type: ValueTypeString,
Value: hclS[hclpt-0].str,
}
}
case 26:
//line parse.y:189
{
hclVAL.obj = &Object{
Type: ValueTypeInt,
Value: hclS[hclpt-0].num,
}
}
case 27:
//line parse.y:196
{
hclVAL.obj = &Object{
Type: ValueTypeFloat,
Value: hclS[hclpt-0].f,
}
}
case 28:
//line parse.y:203
{
fs := fmt.Sprintf("%d%s", hclS[hclpt-1].num, hclS[hclpt-0].str)
f, err := strconv.ParseFloat(fs, 64)
if err != nil {
panic(err)
}
hclVAL.obj = &Object{
Type: ValueTypeFloat,
Value: f,
}
}
case 29:
//line parse.y:216
{
fs := fmt.Sprintf("%f%s", hclS[hclpt-1].f, hclS[hclpt-0].str)
f, err := strconv.ParseFloat(fs, 64)
if err != nil {
panic(err)
}
hclVAL.obj = &Object{
Type: ValueTypeFloat,
Value: f,
}
}
case 30:
//line parse.y:231
{
hclVAL.num = hclS[hclpt-0].num * -1
}
case 31:
//line parse.y:235
{
hclVAL.num = hclS[hclpt-0].num
}
case 32:
//line parse.y:241
{
hclVAL.f = hclS[hclpt-0].f * -1
}
case 33:
//line parse.y:245
{
hclVAL.f = hclS[hclpt-0].f
}
case 34:
//line parse.y:251
{
hclVAL.str = "e" + strconv.FormatInt(int64(hclS[hclpt-0].num), 10)
}
case 35:
//line parse.y:255
{
hclVAL.str = "e-" + strconv.FormatInt(int64(hclS[hclpt-0].num), 10)
}
}
goto hclstack /* stack new state and value */
}

View File

@ -0,0 +1,19 @@
package hcl
import (
"io/ioutil"
"path/filepath"
"testing"
)
// This is the directory where our test fixtures are.
const fixtureDir = "./test-fixtures"
func testReadFile(t *testing.T, n string) string {
d, err := ioutil.ReadFile(filepath.Join(fixtureDir, n))
if err != nil {
t.Fatalf("err: %s", err)
}
return string(d)
}

View File

@ -0,0 +1,4 @@
package json
// This is the directory where our test fixtures are.
const fixtureDir = "./test-fixtures"

View File

@ -0,0 +1,256 @@
package json
import (
"bytes"
"fmt"
"strconv"
"unicode"
"unicode/utf8"
)
//go:generate go tool yacc -p "json" parse.y
// This marks the end of the lexer
const lexEOF = 0
// The parser uses the type <prefix>Lex as a lexer. It must provide
// the methods Lex(*<prefix>SymType) int and Error(string).
type jsonLex struct {
Input string
pos int
width int
col, line int
err error
}
// The parser calls this method to get each new token.
func (x *jsonLex) Lex(yylval *jsonSymType) int {
for {
c := x.next()
if c == lexEOF {
return lexEOF
}
// Ignore all whitespace except a newline which we handle
// specially later.
if unicode.IsSpace(c) {
continue
}
// If it is a number, lex the number
if c >= '0' && c <= '9' {
x.backup()
return x.lexNumber(yylval)
}
switch c {
case 'e':
fallthrough
case 'E':
switch x.next() {
case '+':
return EPLUS
case '-':
return EMINUS
default:
x.backup()
return EPLUS
}
case '.':
return PERIOD
case '-':
return MINUS
case ':':
return COLON
case ',':
return COMMA
case '[':
return LEFTBRACKET
case ']':
return RIGHTBRACKET
case '{':
return LEFTBRACE
case '}':
return RIGHTBRACE
case '"':
return x.lexString(yylval)
default:
x.backup()
return x.lexId(yylval)
}
}
}
// lexId lexes an identifier
func (x *jsonLex) lexId(yylval *jsonSymType) int {
var b bytes.Buffer
first := true
for {
c := x.next()
if c == lexEOF {
break
}
if !unicode.IsDigit(c) && !unicode.IsLetter(c) && c != '_' && c != '-' {
x.backup()
if first {
x.createErr("Invalid identifier")
return lexEOF
}
break
}
first = false
if _, err := b.WriteRune(c); err != nil {
return lexEOF
}
}
switch v := b.String(); v {
case "true":
return TRUE
case "false":
return FALSE
case "null":
return NULL
default:
x.createErr(fmt.Sprintf("Invalid identifier: %s", v))
return lexEOF
}
}
// lexNumber lexes out a number
func (x *jsonLex) lexNumber(yylval *jsonSymType) int {
var b bytes.Buffer
gotPeriod := false
for {
c := x.next()
if c == lexEOF {
break
}
if c == '.' {
if gotPeriod {
x.backup()
break
}
gotPeriod = true
} else if c < '0' || c > '9' {
x.backup()
break
}
if _, err := b.WriteRune(c); err != nil {
x.createErr(fmt.Sprintf("Internal error: %s", err))
return lexEOF
}
}
if !gotPeriod {
v, err := strconv.ParseInt(b.String(), 0, 0)
if err != nil {
x.createErr(fmt.Sprintf("Expected number: %s", err))
return lexEOF
}
yylval.num = int(v)
return NUMBER
}
f, err := strconv.ParseFloat(b.String(), 64)
if err != nil {
x.createErr(fmt.Sprintf("Expected float: %s", err))
return lexEOF
}
yylval.f = float64(f)
return FLOAT
}
// lexString extracts a string from the input
func (x *jsonLex) lexString(yylval *jsonSymType) int {
var b bytes.Buffer
for {
c := x.next()
if c == lexEOF {
break
}
// String end
if c == '"' {
break
}
// If we're escaping a quote, then escape the quote
if c == '\\' {
n := x.next()
switch n {
case '"':
c = n
case 'n':
c = '\n'
case '\\':
c = n
default:
x.backup()
}
}
if _, err := b.WriteRune(c); err != nil {
return lexEOF
}
}
yylval.str = b.String()
return STRING
}
// Return the next rune for the lexer.
func (x *jsonLex) next() rune {
if int(x.pos) >= len(x.Input) {
x.width = 0
return lexEOF
}
r, w := utf8.DecodeRuneInString(x.Input[x.pos:])
x.width = w
x.pos += x.width
x.col += 1
if x.line == 0 {
x.line = 1
}
if r == '\n' {
x.line += 1
x.col = 0
}
return r
}
// peek returns but does not consume the next rune in the input
func (x *jsonLex) peek() rune {
r := x.next()
x.backup()
return r
}
// backup steps back one rune. Can only be called once per next.
func (x *jsonLex) backup() {
x.col -= 1
x.pos -= x.width
}
// createErr records the given error
func (x *jsonLex) createErr(msg string) {
x.err = fmt.Errorf("Line %d, column %d: %s", x.line, x.col, msg)
}
// The parser calls this method on a parse error.
func (x *jsonLex) Error(s string) {
x.createErr(s)
}

View File

@ -0,0 +1,78 @@
package json
import (
"io/ioutil"
"path/filepath"
"reflect"
"testing"
)
func TestLexJson(t *testing.T) {
cases := []struct {
Input string
Output []int
}{
{
"basic.json",
[]int{
LEFTBRACE,
STRING, COLON, STRING,
RIGHTBRACE,
lexEOF,
},
},
{
"array.json",
[]int{
LEFTBRACE,
STRING, COLON, LEFTBRACKET,
NUMBER, COMMA, NUMBER, COMMA, STRING,
RIGHTBRACKET, COMMA,
STRING, COLON, STRING,
RIGHTBRACE,
lexEOF,
},
},
{
"object.json",
[]int{
LEFTBRACE,
STRING, COLON, LEFTBRACE,
STRING, COLON, LEFTBRACKET,
NUMBER, COMMA, NUMBER,
RIGHTBRACKET,
RIGHTBRACE,
RIGHTBRACE,
lexEOF,
},
},
}
for _, tc := range cases {
d, err := ioutil.ReadFile(filepath.Join(fixtureDir, tc.Input))
if err != nil {
t.Fatalf("err: %s", err)
}
l := &jsonLex{Input: string(d)}
var actual []int
for {
token := l.Lex(new(jsonSymType))
actual = append(actual, token)
if token == lexEOF {
break
}
if len(actual) > 500 {
t.Fatalf("Input:%s\n\nExausted.", tc.Input)
}
}
if !reflect.DeepEqual(actual, tc.Output) {
t.Fatalf(
"Input: %s\n\nBad: %#v\n\nExpected: %#v",
tc.Input, actual, tc.Output)
}
}
}

View File

@ -0,0 +1,40 @@
package json
import (
"sync"
"github.com/hashicorp/hcl/hcl"
"github.com/hashicorp/go-multierror"
)
// jsonErrors are the errors built up from parsing. These should not
// be accessed directly.
var jsonErrors []error
var jsonLock sync.Mutex
var jsonResult *hcl.Object
// Parse parses the given string and returns the result.
func Parse(v string) (*hcl.Object, error) {
jsonLock.Lock()
defer jsonLock.Unlock()
jsonErrors = nil
jsonResult = nil
// Parse
lex := &jsonLex{Input: v}
jsonParse(lex)
// If we have an error in the lexer itself, return it
if lex.err != nil {
return nil, lex.err
}
// Build up the errors
var err error
if len(jsonErrors) > 0 {
err = &multierror.Error{Errors: jsonErrors}
jsonResult = nil
}
return jsonResult, err
}

View File

@ -0,0 +1,210 @@
// This is the yacc input for creating the parser for HCL JSON.
%{
package json
import (
"fmt"
"strconv"
"github.com/hashicorp/hcl/hcl"
)
%}
%union {
f float64
num int
str string
obj *hcl.Object
objlist []*hcl.Object
}
%type <f> float
%type <num> int
%type <obj> number object pair value
%type <objlist> array elements members
%type <str> exp
%token <f> FLOAT
%token <num> NUMBER
%token <str> COLON COMMA IDENTIFIER EQUAL NEWLINE STRING
%token <str> LEFTBRACE RIGHTBRACE LEFTBRACKET RIGHTBRACKET
%token <str> TRUE FALSE NULL MINUS PERIOD EPLUS EMINUS
%%
top:
object
{
jsonResult = $1
}
object:
LEFTBRACE members RIGHTBRACE
{
$$ = &hcl.Object{
Type: hcl.ValueTypeObject,
Value: hcl.ObjectList($2).Flat(),
}
}
| LEFTBRACE RIGHTBRACE
{
$$ = &hcl.Object{Type: hcl.ValueTypeObject}
}
members:
pair
{
$$ = []*hcl.Object{$1}
}
| members COMMA pair
{
$$ = append($1, $3)
}
pair:
STRING COLON value
{
$3.Key = $1
$$ = $3
}
value:
STRING
{
$$ = &hcl.Object{
Type: hcl.ValueTypeString,
Value: $1,
}
}
| number
{
$$ = $1
}
| object
{
$$ = $1
}
| array
{
$$ = &hcl.Object{
Type: hcl.ValueTypeList,
Value: $1,
}
}
| TRUE
{
$$ = &hcl.Object{
Type: hcl.ValueTypeBool,
Value: true,
}
}
| FALSE
{
$$ = &hcl.Object{
Type: hcl.ValueTypeBool,
Value: false,
}
}
| NULL
{
$$ = &hcl.Object{
Type: hcl.ValueTypeNil,
Value: nil,
}
}
array:
LEFTBRACKET RIGHTBRACKET
{
$$ = nil
}
| LEFTBRACKET elements RIGHTBRACKET
{
$$ = $2
}
elements:
value
{
$$ = []*hcl.Object{$1}
}
| elements COMMA value
{
$$ = append($1, $3)
}
number:
int
{
$$ = &hcl.Object{
Type: hcl.ValueTypeInt,
Value: $1,
}
}
| float
{
$$ = &hcl.Object{
Type: hcl.ValueTypeFloat,
Value: $1,
}
}
| int exp
{
fs := fmt.Sprintf("%d%s", $1, $2)
f, err := strconv.ParseFloat(fs, 64)
if err != nil {
panic(err)
}
$$ = &hcl.Object{
Type: hcl.ValueTypeFloat,
Value: f,
}
}
| float exp
{
fs := fmt.Sprintf("%f%s", $1, $2)
f, err := strconv.ParseFloat(fs, 64)
if err != nil {
panic(err)
}
$$ = &hcl.Object{
Type: hcl.ValueTypeFloat,
Value: f,
}
}
int:
MINUS int
{
$$ = $2 * -1
}
| NUMBER
{
$$ = $1
}
float:
MINUS float
{
$$ = $2 * -1
}
| FLOAT
{
$$ = $1
}
exp:
EPLUS NUMBER
{
$$ = "e" + strconv.FormatInt(int64($2), 10)
}
| EMINUS NUMBER
{
$$ = "e-" + strconv.FormatInt(int64($2), 10)
}
%%

View File

@ -0,0 +1,43 @@
package json
import (
"io/ioutil"
"path/filepath"
"testing"
)
func TestParse(t *testing.T) {
cases := []struct {
Name string
Err bool
}{
{
"basic.json",
false,
},
{
"object.json",
false,
},
{
"array.json",
false,
},
{
"types.json",
false,
},
}
for _, tc := range cases {
d, err := ioutil.ReadFile(filepath.Join(fixtureDir, tc.Name))
if err != nil {
t.Fatalf("err: %s", err)
}
_, err = Parse(string(d))
if (err != nil) != tc.Err {
t.Fatalf("Input: %s\n\nError: %s", tc.Name, err)
}
}
}

View File

@ -0,0 +1,4 @@
{
"foo": [1, 2, "bar"],
"bar": "baz"
}

View File

@ -0,0 +1,3 @@
{
"foo": "bar"
}

View File

@ -0,0 +1,5 @@
{
"foo": {
"bar": [1,2]
}
}

View File

@ -0,0 +1,10 @@
{
"foo": "bar",
"bar": 7,
"baz": [1,2,3],
"foo": -12,
"bar": 3.14159,
"foo": true,
"bar": false,
"foo": null
}

View File

@ -0,0 +1,554 @@
//line parse.y:3
package json
import __yyfmt__ "fmt"
//line parse.y:5
import (
"fmt"
"strconv"
"github.com/hashicorp/hcl/hcl"
)
//line parse.y:15
type jsonSymType struct {
yys int
f float64
num int
str string
obj *hcl.Object
objlist []*hcl.Object
}
const FLOAT = 57346
const NUMBER = 57347
const COLON = 57348
const COMMA = 57349
const IDENTIFIER = 57350
const EQUAL = 57351
const NEWLINE = 57352
const STRING = 57353
const LEFTBRACE = 57354
const RIGHTBRACE = 57355
const LEFTBRACKET = 57356
const RIGHTBRACKET = 57357
const TRUE = 57358
const FALSE = 57359
const NULL = 57360
const MINUS = 57361
const PERIOD = 57362
const EPLUS = 57363
const EMINUS = 57364
var jsonToknames = []string{
"FLOAT",
"NUMBER",
"COLON",
"COMMA",
"IDENTIFIER",
"EQUAL",
"NEWLINE",
"STRING",
"LEFTBRACE",
"RIGHTBRACE",
"LEFTBRACKET",
"RIGHTBRACKET",
"TRUE",
"FALSE",
"NULL",
"MINUS",
"PERIOD",
"EPLUS",
"EMINUS",
}
var jsonStatenames = []string{}
const jsonEofCode = 1
const jsonErrCode = 2
const jsonMaxDepth = 200
//line parse.y:210
//line yacctab:1
var jsonExca = []int{
-1, 1,
1, -1,
-2, 0,
}
const jsonNprod = 28
const jsonPrivate = 57344
var jsonTokenNames []string
var jsonStates []string
const jsonLast = 53
var jsonAct = []int{
12, 25, 24, 3, 20, 27, 28, 7, 13, 3,
21, 22, 30, 17, 18, 19, 23, 25, 24, 26,
25, 24, 36, 32, 13, 3, 10, 22, 33, 17,
18, 19, 23, 35, 34, 23, 38, 9, 7, 39,
5, 29, 6, 8, 37, 15, 2, 1, 4, 31,
16, 14, 11,
}
var jsonPact = []int{
-9, -1000, -1000, 27, 30, -1000, -1000, 20, -1000, -4,
13, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
-16, -16, -3, 16, -1000, -1000, -1000, 28, 17, -1000,
-1000, 29, -1000, -1000, -1000, -1000, -1000, -1000, 13, -1000,
}
var jsonPgo = []int{
0, 10, 4, 51, 45, 42, 0, 50, 49, 48,
19, 47,
}
var jsonR1 = []int{
0, 11, 4, 4, 9, 9, 5, 6, 6, 6,
6, 6, 6, 6, 7, 7, 8, 8, 3, 3,
3, 3, 2, 2, 1, 1, 10, 10,
}
var jsonR2 = []int{
0, 1, 3, 2, 1, 3, 3, 1, 1, 1,
1, 1, 1, 1, 2, 3, 1, 3, 1, 1,
2, 2, 2, 1, 2, 1, 2, 2,
}
var jsonChk = []int{
-1000, -11, -4, 12, -9, 13, -5, 11, 13, 7,
6, -5, -6, 11, -3, -4, -7, 16, 17, 18,
-2, -1, 14, 19, 5, 4, -10, 21, 22, -10,
15, -8, -6, -2, -1, 5, 5, 15, 7, -6,
}
var jsonDef = []int{
0, -2, 1, 0, 0, 3, 4, 0, 2, 0,
0, 5, 6, 7, 8, 9, 10, 11, 12, 13,
18, 19, 0, 0, 23, 25, 20, 0, 0, 21,
14, 0, 16, 22, 24, 26, 27, 15, 0, 17,
}
var jsonTok1 = []int{
1,
}
var jsonTok2 = []int{
2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
22,
}
var jsonTok3 = []int{
0,
}
//line yaccpar:1
/* parser for yacc output */
var jsonDebug = 0
type jsonLexer interface {
Lex(lval *jsonSymType) int
Error(s string)
}
const jsonFlag = -1000
func jsonTokname(c int) string {
// 4 is TOKSTART above
if c >= 4 && c-4 < len(jsonToknames) {
if jsonToknames[c-4] != "" {
return jsonToknames[c-4]
}
}
return __yyfmt__.Sprintf("tok-%v", c)
}
func jsonStatname(s int) string {
if s >= 0 && s < len(jsonStatenames) {
if jsonStatenames[s] != "" {
return jsonStatenames[s]
}
}
return __yyfmt__.Sprintf("state-%v", s)
}
func jsonlex1(lex jsonLexer, lval *jsonSymType) int {
c := 0
char := lex.Lex(lval)
if char <= 0 {
c = jsonTok1[0]
goto out
}
if char < len(jsonTok1) {
c = jsonTok1[char]
goto out
}
if char >= jsonPrivate {
if char < jsonPrivate+len(jsonTok2) {
c = jsonTok2[char-jsonPrivate]
goto out
}
}
for i := 0; i < len(jsonTok3); i += 2 {
c = jsonTok3[i+0]
if c == char {
c = jsonTok3[i+1]
goto out
}
}
out:
if c == 0 {
c = jsonTok2[1] /* unknown char */
}
if jsonDebug >= 3 {
__yyfmt__.Printf("lex %s(%d)\n", jsonTokname(c), uint(char))
}
return c
}
func jsonParse(jsonlex jsonLexer) int {
var jsonn int
var jsonlval jsonSymType
var jsonVAL jsonSymType
jsonS := make([]jsonSymType, jsonMaxDepth)
Nerrs := 0 /* number of errors */
Errflag := 0 /* error recovery flag */
jsonstate := 0
jsonchar := -1
jsonp := -1
goto jsonstack
ret0:
return 0
ret1:
return 1
jsonstack:
/* put a state and value onto the stack */
if jsonDebug >= 4 {
__yyfmt__.Printf("char %v in %v\n", jsonTokname(jsonchar), jsonStatname(jsonstate))
}
jsonp++
if jsonp >= len(jsonS) {
nyys := make([]jsonSymType, len(jsonS)*2)
copy(nyys, jsonS)
jsonS = nyys
}
jsonS[jsonp] = jsonVAL
jsonS[jsonp].yys = jsonstate
jsonnewstate:
jsonn = jsonPact[jsonstate]
if jsonn <= jsonFlag {
goto jsondefault /* simple state */
}
if jsonchar < 0 {
jsonchar = jsonlex1(jsonlex, &jsonlval)
}
jsonn += jsonchar
if jsonn < 0 || jsonn >= jsonLast {
goto jsondefault
}
jsonn = jsonAct[jsonn]
if jsonChk[jsonn] == jsonchar { /* valid shift */
jsonchar = -1
jsonVAL = jsonlval
jsonstate = jsonn
if Errflag > 0 {
Errflag--
}
goto jsonstack
}
jsondefault:
/* default state action */
jsonn = jsonDef[jsonstate]
if jsonn == -2 {
if jsonchar < 0 {
jsonchar = jsonlex1(jsonlex, &jsonlval)
}
/* look through exception table */
xi := 0
for {
if jsonExca[xi+0] == -1 && jsonExca[xi+1] == jsonstate {
break
}
xi += 2
}
for xi += 2; ; xi += 2 {
jsonn = jsonExca[xi+0]
if jsonn < 0 || jsonn == jsonchar {
break
}
}
jsonn = jsonExca[xi+1]
if jsonn < 0 {
goto ret0
}
}
if jsonn == 0 {
/* error ... attempt to resume parsing */
switch Errflag {
case 0: /* brand new error */
jsonlex.Error("syntax error")
Nerrs++
if jsonDebug >= 1 {
__yyfmt__.Printf("%s", jsonStatname(jsonstate))
__yyfmt__.Printf(" saw %s\n", jsonTokname(jsonchar))
}
fallthrough
case 1, 2: /* incompletely recovered error ... try again */
Errflag = 3
/* find a state where "error" is a legal shift action */
for jsonp >= 0 {
jsonn = jsonPact[jsonS[jsonp].yys] + jsonErrCode
if jsonn >= 0 && jsonn < jsonLast {
jsonstate = jsonAct[jsonn] /* simulate a shift of "error" */
if jsonChk[jsonstate] == jsonErrCode {
goto jsonstack
}
}
/* the current p has no shift on "error", pop stack */
if jsonDebug >= 2 {
__yyfmt__.Printf("error recovery pops state %d\n", jsonS[jsonp].yys)
}
jsonp--
}
/* there is no state on the stack with an error shift ... abort */
goto ret1
case 3: /* no shift yet; clobber input char */
if jsonDebug >= 2 {
__yyfmt__.Printf("error recovery discards %s\n", jsonTokname(jsonchar))
}
if jsonchar == jsonEofCode {
goto ret1
}
jsonchar = -1
goto jsonnewstate /* try again in the same state */
}
}
/* reduction by production jsonn */
if jsonDebug >= 2 {
__yyfmt__.Printf("reduce %v in:\n\t%v\n", jsonn, jsonStatname(jsonstate))
}
jsonnt := jsonn
jsonpt := jsonp
_ = jsonpt // guard against "declared and not used"
jsonp -= jsonR2[jsonn]
jsonVAL = jsonS[jsonp+1]
/* consult goto table to find next state */
jsonn = jsonR1[jsonn]
jsong := jsonPgo[jsonn]
jsonj := jsong + jsonS[jsonp].yys + 1
if jsonj >= jsonLast {
jsonstate = jsonAct[jsong]
} else {
jsonstate = jsonAct[jsonj]
if jsonChk[jsonstate] != -jsonn {
jsonstate = jsonAct[jsong]
}
}
// dummy call; replaced with literal code
switch jsonnt {
case 1:
//line parse.y:39
{
jsonResult = jsonS[jsonpt-0].obj
}
case 2:
//line parse.y:45
{
jsonVAL.obj = &hcl.Object{
Type: hcl.ValueTypeObject,
Value: hcl.ObjectList(jsonS[jsonpt-1].objlist).Flat(),
}
}
case 3:
//line parse.y:52
{
jsonVAL.obj = &hcl.Object{Type: hcl.ValueTypeObject}
}
case 4:
//line parse.y:58
{
jsonVAL.objlist = []*hcl.Object{jsonS[jsonpt-0].obj}
}
case 5:
//line parse.y:62
{
jsonVAL.objlist = append(jsonS[jsonpt-2].objlist, jsonS[jsonpt-0].obj)
}
case 6:
//line parse.y:68
{
jsonS[jsonpt-0].obj.Key = jsonS[jsonpt-2].str
jsonVAL.obj = jsonS[jsonpt-0].obj
}
case 7:
//line parse.y:75
{
jsonVAL.obj = &hcl.Object{
Type: hcl.ValueTypeString,
Value: jsonS[jsonpt-0].str,
}
}
case 8:
//line parse.y:82
{
jsonVAL.obj = jsonS[jsonpt-0].obj
}
case 9:
//line parse.y:86
{
jsonVAL.obj = jsonS[jsonpt-0].obj
}
case 10:
//line parse.y:90
{
jsonVAL.obj = &hcl.Object{
Type: hcl.ValueTypeList,
Value: jsonS[jsonpt-0].objlist,
}
}
case 11:
//line parse.y:97
{
jsonVAL.obj = &hcl.Object{
Type: hcl.ValueTypeBool,
Value: true,
}
}
case 12:
//line parse.y:104
{
jsonVAL.obj = &hcl.Object{
Type: hcl.ValueTypeBool,
Value: false,
}
}
case 13:
//line parse.y:111
{
jsonVAL.obj = &hcl.Object{
Type: hcl.ValueTypeNil,
Value: nil,
}
}
case 14:
//line parse.y:120
{
jsonVAL.objlist = nil
}
case 15:
//line parse.y:124
{
jsonVAL.objlist = jsonS[jsonpt-1].objlist
}
case 16:
//line parse.y:130
{
jsonVAL.objlist = []*hcl.Object{jsonS[jsonpt-0].obj}
}
case 17:
//line parse.y:134
{
jsonVAL.objlist = append(jsonS[jsonpt-2].objlist, jsonS[jsonpt-0].obj)
}
case 18:
//line parse.y:140
{
jsonVAL.obj = &hcl.Object{
Type: hcl.ValueTypeInt,
Value: jsonS[jsonpt-0].num,
}
}
case 19:
//line parse.y:147
{
jsonVAL.obj = &hcl.Object{
Type: hcl.ValueTypeFloat,
Value: jsonS[jsonpt-0].f,
}
}
case 20:
//line parse.y:154
{
fs := fmt.Sprintf("%d%s", jsonS[jsonpt-1].num, jsonS[jsonpt-0].str)
f, err := strconv.ParseFloat(fs, 64)
if err != nil {
panic(err)
}
jsonVAL.obj = &hcl.Object{
Type: hcl.ValueTypeFloat,
Value: f,
}
}
case 21:
//line parse.y:167
{
fs := fmt.Sprintf("%f%s", jsonS[jsonpt-1].f, jsonS[jsonpt-0].str)
f, err := strconv.ParseFloat(fs, 64)
if err != nil {
panic(err)
}
jsonVAL.obj = &hcl.Object{
Type: hcl.ValueTypeFloat,
Value: f,
}
}
case 22:
//line parse.y:182
{
jsonVAL.num = jsonS[jsonpt-0].num * -1
}
case 23:
//line parse.y:186
{
jsonVAL.num = jsonS[jsonpt-0].num
}
case 24:
//line parse.y:192
{
jsonVAL.f = jsonS[jsonpt-0].f * -1
}
case 25:
//line parse.y:196
{
jsonVAL.f = jsonS[jsonpt-0].f
}
case 26:
//line parse.y:202
{
jsonVAL.str = "e" + strconv.FormatInt(int64(jsonS[jsonpt-0].num), 10)
}
case 27:
//line parse.y:206
{
jsonVAL.str = "e-" + strconv.FormatInt(int64(jsonS[jsonpt-0].num), 10)
}
}
goto jsonstack /* stack new state and value */
}

31
Godeps/_workspace/src/github.com/hashicorp/hcl/lex.go generated vendored Normal file
View File

@ -0,0 +1,31 @@
package hcl
import (
"unicode"
)
type lexModeValue byte
const (
lexModeUnknown lexModeValue = iota
lexModeHcl
lexModeJson
)
// lexMode returns whether we're going to be parsing in JSON
// mode or HCL mode.
func lexMode(v string) lexModeValue {
for _, r := range v {
if unicode.IsSpace(r) {
continue
}
if r == '{' {
return lexModeJson
} else {
return lexModeHcl
}
}
return lexModeHcl
}

View File

@ -0,0 +1,37 @@
package hcl
import (
"testing"
)
func TestLexMode(t *testing.T) {
cases := []struct {
Input string
Mode lexModeValue
}{
{
"",
lexModeHcl,
},
{
"foo",
lexModeHcl,
},
{
"{}",
lexModeJson,
},
{
" {}",
lexModeJson,
},
}
for i, tc := range cases {
actual := lexMode(tc.Input)
if actual != tc.Mode {
t.Fatalf("%d: %#v", i, actual)
}
}
}

View File

@ -0,0 +1,22 @@
package hcl
import (
"fmt"
"github.com/hashicorp/hcl/hcl"
"github.com/hashicorp/hcl/json"
)
// Parse parses the given input and returns the root object.
//
// The input format can be either HCL or JSON.
func Parse(input string) (*hcl.Object, error) {
switch lexMode(input) {
case lexModeHcl:
return hcl.Parse(input)
case lexModeJson:
return json.Parse(input)
}
return nil, fmt.Errorf("unknown config format")
}

View File

@ -0,0 +1,2 @@
foo = "bar"
bar = "${file("bing/bong.txt")}"

View File

@ -0,0 +1,4 @@
{
"foo": "bar",
"bar": "${file(\"bing/bong.txt\")}"
}

View File

@ -0,0 +1 @@
count = "3"

View File

@ -0,0 +1,3 @@
foo="bar"
bar="${file("bing/bong.txt")}"
foo-bar="baz"

View File

@ -0,0 +1,15 @@
key "" {
policy = "read"
}
key "foo/" {
policy = "write"
}
key "foo/bar/" {
policy = "read"
}
key "foo/bar/baz" {
policy = "deny"
}

View File

@ -0,0 +1,19 @@
{
"key": {
"": {
"policy": "read"
},
"foo/": {
"policy": "write"
},
"foo/bar/": {
"policy": "read"
},
"foo/bar/baz": {
"policy": "deny"
}
}
}

View File

@ -0,0 +1,10 @@
variable "foo" {
default = "bar"
description = "bar"
}
variable "amis" {
default = {
east = "foo"
}
}

View File

@ -0,0 +1,14 @@
{
"variable": {
"foo": {
"default": "bar",
"description": "bar"
},
"amis": {
"default": {
"east": "foo"
}
}
}
}

View File

@ -0,0 +1 @@
resource "foo" {}

View File

@ -0,0 +1 @@
foo = "bar\"baz\\n"

View File

@ -0,0 +1,2 @@
foo = "bar"
Key = 7

View File

@ -0,0 +1 @@
a = 1.02

View File

@ -0,0 +1,3 @@
{
"a": 1.02
}

View File

@ -0,0 +1,3 @@
{
"foo": "bar\nbaz"
}

View File

@ -0,0 +1,4 @@
foo = <<EOF
bar
baz
EOF

View File

@ -0,0 +1,5 @@
/*
foo = "bar/*"
*/
bar = "value"

View File

@ -0,0 +1,6 @@
a = 1e-10
b = 1e+10
c = 1e10
d = 1.2e-10
e = 1.2e+10
f = 1.2e10

View File

@ -0,0 +1,8 @@
{
"a": 1e-10,
"b": 1e+10,
"c": 1e10,
"d": 1.2e-10,
"e": 1.2e+10,
"f": 1.2e10
}

View File

@ -0,0 +1,5 @@
// This is a test structure for the lexer
foo "baz" {
key = 7
foo = "bar"
}

View File

@ -0,0 +1,8 @@
{
"foo": [{
"baz": [{
"key": 7,
"foo": "bar"
}]
}]
}

View File

@ -0,0 +1,9 @@
// This is a test structure for the lexer
foo "baz" {
key = 7
foo = "bar"
}
foo {
key = 7
}

View File

@ -0,0 +1,10 @@
{
"foo": [{
"baz": {
"key": 7,
"foo": "bar"
}
}, {
"key": 7
}]
}

View File

@ -0,0 +1,8 @@
{
"foo": {
"baz": {
"key": 7,
"foo": "bar"
}
}
}

View File

@ -0,0 +1,7 @@
foo {
key = 7
}
foo {
foo = "bar"
}

View File

@ -0,0 +1,6 @@
foo {
key = 7
}
foo {
key = 12
}

View File

@ -0,0 +1,7 @@
{
"foo": [{
"key": 7
}, {
"key": 12
}]
}

View File

@ -0,0 +1,16 @@
{
"bar": {
"foo": {
"name": "terraform_example",
"ingress": [
{
"from_port": 22
},
{
"from_port": 80
}
]
}
}
}

Some files were not shown because too many files have changed in this diff Show More