diff --git a/.gitignore b/.gitignore
index 1585b6845..10cf9cf91 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,7 +1,7 @@
bufferflow_tinyg_old.md
-serial-port-json-server
+arduino-create-agent
snapshot/*
public/
diff --git a/Gopkg.lock b/Gopkg.lock
index 6c242cf7c..6c01bdf40 100644
--- a/Gopkg.lock
+++ b/Gopkg.lock
@@ -1,14 +1,6 @@
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
-[[projects]]
- digest = "1:69b1cc331fca23d702bd72f860c6a647afd0aa9fcbc1d0659b1365e26546dd70"
- name = "github.com/Sirupsen/logrus"
- packages = ["."]
- pruneopts = "UT"
- revision = "bcd833dfe83d3cebad139e4a29ed79cb2318bf95"
- version = "v1.2.0"
-
[[projects]]
digest = "1:705c40022f5c03bf96ffeb6477858d88565064485a513abcd0f11a0911546cb6"
name = "github.com/blang/semver"
@@ -17,6 +9,14 @@
revision = "2ee87856327ba09384cabd113bc6b5d174e9ec0f"
version = "v3.5.1"
+[[projects]]
+ digest = "1:db8e2f3c8cc717afe53c6941776312bd7046d45d9fb757c5f3f32fd3cc46562d"
+ name = "github.com/codeclysm/extract"
+ packages = ["."]
+ pruneopts = "UT"
+ revision = "de8493db4b3e06921c3ee97fa0b200435a8a796d"
+ version = "v2.0.0"
+
[[projects]]
branch = "master"
digest = "1:210dd3b6f30ebc5ff2e0b4795b5141ee4578c29b38df0326fb140863d45c01fa"
@@ -41,6 +41,22 @@
revision = "30ad2d03e9fe97a029a6a6dc87bff319e0bb6bc4"
version = "v1.0.1"
+[[projects]]
+ branch = "master"
+ digest = "1:376e5a9fdb57657725ff19b4c510c3f5e1f804ab4ab0ff3064d8f4dc01e40fe2"
+ name = "github.com/dimfeld/httppath"
+ packages = ["."]
+ pruneopts = "UT"
+ revision = "ee938bf735983d53694d79138ad9820efff94c92"
+
+[[projects]]
+ digest = "1:09d3ceb4456b0b463c5730198a9138e383089b27c0c10b1032789a0618a6dfe6"
+ name = "github.com/dimfeld/httptreemux"
+ packages = ["."]
+ pruneopts = "UT"
+ revision = "a454a10de4a11f751681a0914461ab9e98c2a3ff"
+ version = "5.0.2"
+
[[projects]]
branch = "master"
digest = "1:8df8718256c87ae7984cbfa1304b9efdd8b5952dcb94a8a51cfe9d61f08785ee"
@@ -207,6 +223,14 @@
revision = "36b14963da70d11297d313183d7e6388c8510e1e"
version = "1.0.0"
+[[projects]]
+ branch = "master"
+ digest = "1:7d1a655e1f16ccaf1adb026bdeece28499ecb0ead32d32accc6ed6f2c40cacee"
+ name = "github.com/juju/errors"
+ packages = ["."]
+ pruneopts = "UT"
+ revision = "089d3ea4e4d597bd98acac068193d341983326a3"
+
[[projects]]
branch = "master"
digest = "1:caf6db28595425c0e0f2301a00257d11712f65c1878e12cffc42f6b9a9cf3f23"
@@ -239,6 +263,14 @@
revision = "1455def202f6e05b95cc7bfc7e8ae67ae5141eba"
version = "v0.1.0"
+[[projects]]
+ branch = "master"
+ digest = "1:393f23e872c9141dbe315c428dc3eac9406f029be7ab0e1cae2374486c07aa54"
+ name = "github.com/manveru/faker"
+ packages = ["."]
+ pruneopts = "UT"
+ revision = "9fbc68a78c4dbc7914e1a23f88f126bea4383b97"
+
[[projects]]
digest = "1:fa610f9fe6a93f4a75e64c83673dfff9bf1a34bbb21e6102021b6bc7850834a3"
name = "github.com/mattn/go-isatty"
@@ -310,6 +342,14 @@
revision = "792786c7400a136282c1664665ae0a8db921c6c2"
version = "v1.0.0"
+[[projects]]
+ digest = "1:d917313f309bda80d27274d53985bc65651f81a5b66b820749ac7f8ef061fd04"
+ name = "github.com/sergi/go-diff"
+ packages = ["diffmatchpatch"]
+ pruneopts = "UT"
+ revision = "1744e2970ca51c86172c8190fadad617561ed6e7"
+ version = "v1.0.0"
+
[[projects]]
branch = "master"
digest = "1:e244ec9ef8a91f5d0f640c4d14db6d65938182bb976c07f01f0f6e8f4605c4e9"
@@ -318,6 +358,14 @@
pruneopts = "UT"
revision = "495cbb862a9c6348e0fff479ed1e7b70c871b372"
+[[projects]]
+ digest = "1:69b1cc331fca23d702bd72f860c6a647afd0aa9fcbc1d0659b1365e26546dd70"
+ name = "github.com/sirupsen/logrus"
+ packages = ["."]
+ pruneopts = "UT"
+ revision = "bcd833dfe83d3cebad139e4a29ed79cb2318bf95"
+ version = "v1.2.0"
+
[[projects]]
branch = "master"
digest = "1:39853e1ae46a02816e2419e1f590e00682b1a6b60bb988597cf2efb84314da45"
@@ -349,6 +397,22 @@
pruneopts = "UT"
revision = "a3153f7040e90324c58c6287535e26a0ac5c1cc1"
+[[projects]]
+ branch = "master"
+ digest = "1:6f7f5a5452e6dc8b0665b0fd2b779c3c6b7e70a121e0661aeb03daef7baae58d"
+ name = "github.com/zach-klippenstein/goregen"
+ packages = ["."]
+ pruneopts = "UT"
+ revision = "795b5e3961ea1912fde60af417ad85e86acc0d6a"
+
+[[projects]]
+ branch = "master"
+ digest = "1:975e88bfd9ed90a0e939a9e67ecab0682a28c0b9ed021bd30f469a0b5b4e3739"
+ name = "go.bug.st/downloader"
+ packages = ["."]
+ pruneopts = "UT"
+ revision = "9b8976a44d87b5d86444bccc3ed875b2530f45e0"
+
[[projects]]
branch = "master"
digest = "1:49b892f63d6b54df49cc35901eba38acc3c5309f919b40b3d1e2ff859c8108ba"
@@ -361,6 +425,29 @@
pruneopts = "UT"
revision = "5f7892a7bb453066bdc6683b9b5d24d9dee03ec1"
+[[projects]]
+ branch = "master"
+ digest = "1:f3f6a6b258e99d130be6d10ab74bf1a8eb25796af7258e85354a0b0602b3e122"
+ name = "goa.design/goa"
+ packages = [
+ ".",
+ "codegen",
+ "codegen/generator",
+ "codegen/server",
+ "codegen/service",
+ "dsl",
+ "eval",
+ "expr",
+ "grpc/codegen",
+ "http",
+ "http/codegen",
+ "http/codegen/openapi",
+ "http/middleware",
+ "pkg",
+ ]
+ pruneopts = "UT"
+ revision = "40843d63b0e4f06ef5004d2c526ee64814b24542"
+
[[projects]]
branch = "master"
digest = "1:04c834d4d75b1e3c678ffadb336e4c5f8b58786cb7bd1f3ee19a166d76aa9b56"
@@ -411,6 +498,26 @@
pruneopts = "UT"
revision = "62eef0e2fa9b2c385f7b2778e763486da6880d37"
+[[projects]]
+ branch = "master"
+ digest = "1:b5cd6adda1381d3b37182aa4a403bca5e69bc7ff6eb412b8618fe38fce3400bd"
+ name = "golang.org/x/tools"
+ packages = [
+ "go/ast/astutil",
+ "go/gcexportdata",
+ "go/internal/cgo",
+ "go/internal/gcimporter",
+ "go/internal/packagesdriver",
+ "go/packages",
+ "go/types/typeutil",
+ "imports",
+ "internal/fastwalk",
+ "internal/gopathwalk",
+ "internal/semver",
+ ]
+ pruneopts = "UT"
+ revision = "b258f6da23835bf37e95cc50af792447c7c67fe4"
+
[[projects]]
digest = "1:1b4724d3c8125f6044925f02b485b74bfec9905cbf579d95aafd1a6c8f8447d3"
name = "gopkg.in/go-playground/validator.v8"
@@ -419,6 +526,18 @@
revision = "5f57d2222ad794d0dffb07e664ea05e2ee07d60c"
version = "v8.18.1"
+[[projects]]
+ digest = "1:8b1a1e6d3dcef7c542f604f8b9f8013d1d13c3d61f8e9cbe368afa2735c898c2"
+ name = "gopkg.in/h2non/filetype.v1"
+ packages = [
+ ".",
+ "matchers",
+ "types",
+ ]
+ pruneopts = "UT"
+ revision = "cc14fdc9ca0e4c2bafad7458f6ff79fd3947cfbb"
+ version = "v1.0.5"
+
[[projects]]
branch = "v0"
digest = "1:580aa4af38b87e235950c3a09841bb7f9f329e2495ec5d43c42d7f7fb7f5a464"
@@ -431,18 +550,19 @@
revision = "d8b0b1d421aa1cbf392c05869f8abbc669bb7066"
[[projects]]
- digest = "1:cacb98d52c60c337c2ce95a7af83ba0313a93ce5e73fa9e99a96aff70776b9d3"
+ digest = "1:4d2e5a73dc1500038e504a8d78b986630e3626dc027bc030ba5c75da257cdb96"
name = "gopkg.in/yaml.v2"
packages = ["."]
pruneopts = "UT"
- revision = "a5b47d31c556af34a302ce5d659e6fea44d90de0"
+ revision = "51d6538a90f86fe93ac480b35f37b2be17fef232"
+ version = "v2.2.2"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
input-imports = [
- "github.com/Sirupsen/logrus",
"github.com/blang/semver",
+ "github.com/codeclysm/extract",
"github.com/facchinm/systray",
"github.com/gin-gonic/gin",
"github.com/go-ini/ini",
@@ -455,11 +575,18 @@
"github.com/oleksandr/bonjour",
"github.com/pkg/errors",
"github.com/sfreiberg/simplessh",
+ "github.com/sirupsen/logrus",
"github.com/skratchdot/open-golang/open",
"github.com/stretchr/testify/assert",
"github.com/xrash/smetrics",
+ "go.bug.st/downloader",
"go.bug.st/serial.v1",
"go.bug.st/serial.v1/enumerator",
+ "goa.design/goa",
+ "goa.design/goa/codegen/generator",
+ "goa.design/goa/dsl",
+ "goa.design/goa/http",
+ "goa.design/goa/http/middleware",
"golang.org/x/crypto/openpgp",
"gopkg.in/inconshreveable/go-update.v0",
]
diff --git a/Gopkg.toml b/Gopkg.toml
index e89510da8..bac93b144 100644
--- a/Gopkg.toml
+++ b/Gopkg.toml
@@ -24,9 +24,10 @@
# go-tests = true
# unused-packages = true
+required = ["goa.design/goa/codegen/generator"]
[[constraint]]
- name = "github.com/Sirupsen/logrus"
+ name = "github.com/sirupsen/logrus"
version = "1.2.0"
[[constraint]]
diff --git a/bufferflow_default.go b/bufferflow_default.go
index 4294fb824..7f37ac3d2 100644
--- a/bufferflow_default.go
+++ b/bufferflow_default.go
@@ -1,7 +1,7 @@
package main
import (
- log "github.com/Sirupsen/logrus"
+ log "github.com/sirupsen/logrus"
)
type BufferflowDefault struct {
diff --git a/bufferflow_timed.go b/bufferflow_timed.go
index ee2e92063..c4a7b6117 100644
--- a/bufferflow_timed.go
+++ b/bufferflow_timed.go
@@ -4,7 +4,7 @@ import (
"encoding/json"
"time"
- log "github.com/Sirupsen/logrus"
+ log "github.com/sirupsen/logrus"
)
type BufferflowTimed struct {
diff --git a/bufferflow_timedraw.go b/bufferflow_timedraw.go
index 27de6da48..0c3acf7b8 100644
--- a/bufferflow_timedraw.go
+++ b/bufferflow_timedraw.go
@@ -4,7 +4,7 @@ import (
"encoding/json"
"time"
- log "github.com/Sirupsen/logrus"
+ log "github.com/sirupsen/logrus"
)
type BufferflowTimedRaw struct {
diff --git a/certificates.go b/certificates.go
index d95d1d8f5..8c2628fb9 100644
--- a/certificates.go
+++ b/certificates.go
@@ -24,8 +24,8 @@ import (
"text/template"
"time"
- log "github.com/Sirupsen/logrus"
"github.com/gin-gonic/gin"
+ log "github.com/sirupsen/logrus"
)
var (
diff --git a/conn.go b/conn.go
index 450565dad..29367b0f1 100644
--- a/conn.go
+++ b/conn.go
@@ -18,11 +18,11 @@ import (
"os"
"path/filepath"
- log "github.com/Sirupsen/logrus"
"github.com/arduino/arduino-create-agent/upload"
"github.com/arduino/arduino-create-agent/utilities"
"github.com/gin-gonic/gin"
socketio "github.com/googollee/go-socket.io"
+ log "github.com/sirupsen/logrus"
)
type connection struct {
diff --git a/design/design.go b/design/design.go
new file mode 100644
index 000000000..d65793916
--- /dev/null
+++ b/design/design.go
@@ -0,0 +1,13 @@
+package design
+
+import . "goa.design/goa/dsl"
+
+var _ = API("arduino-create-agent", func() {
+ Title("Arduino Create Agent")
+ Description(`A companion of Arduino Create.
+ Allows the website to perform operations on the user computer,
+ such as detecting which boards are connected and upload sketches on them.`)
+ HTTP(func() {
+ Path("/v2")
+ })
+})
diff --git a/design/docs.go b/design/docs.go
new file mode 100644
index 000000000..d86fd9065
--- /dev/null
+++ b/design/docs.go
@@ -0,0 +1,10 @@
+package design
+
+import . "goa.design/goa/dsl"
+
+var _ = Service("docs", func() {
+ HTTP(func() {
+ Path("/docs")
+ })
+ Files("/pkgs", "docs/pkgs.html")
+})
diff --git a/design/pkgs.go b/design/pkgs.go
new file mode 100644
index 000000000..5fba81bf6
--- /dev/null
+++ b/design/pkgs.go
@@ -0,0 +1,136 @@
+package design
+
+import . "goa.design/goa/dsl"
+
+var _ = Service("indexes", func() {
+ Description("The indexes service manages the package_index files")
+
+ Error("invalid_url", ErrorResult, "url invalid")
+ HTTP(func() {
+ Response("invalid_url", StatusBadRequest)
+ })
+
+ Method("list", func() {
+ Result(ArrayOf(String))
+ HTTP(func() {
+ GET("/pkgs/indexes")
+ Response(StatusOK)
+ })
+ })
+
+ Method("add", func() {
+ Payload(IndexPayload)
+ Result(Operation)
+ HTTP(func() {
+ POST("/pkgs/indexes/add")
+ Response(StatusOK)
+ })
+ })
+
+ Method("remove", func() {
+ Payload(IndexPayload)
+ Result(Operation)
+ HTTP(func() {
+ POST("/pkgs/indexes/delete")
+ Response(StatusOK)
+ })
+ })
+})
+
+var _ = Service("tools", func() {
+ Description("The tools service manages the available and installed tools")
+
+ Method("available", func() {
+ Result(CollectionOf(Tool))
+ HTTP(func() {
+ GET("/pkgs/tools/available")
+ Response(StatusOK)
+ })
+ })
+
+ Method("installed", func() {
+ Result(CollectionOf(Tool))
+ HTTP(func() {
+ GET("/pkgs/tools/installed")
+ Response(StatusOK)
+ })
+ })
+
+ Method("install", func() {
+ Error("not_found", ErrorResult, "tool not found")
+ HTTP(func() {
+ Response("not_found", StatusBadRequest)
+ })
+ Payload(ToolPayload)
+ Result(Operation)
+ HTTP(func() {
+ PUT("/pkgs/tools/installed")
+ Response(StatusOK)
+ })
+ })
+
+ Method("remove", func() {
+ Payload(ToolPayload)
+ Result(Operation)
+
+ HTTP(func() {
+ DELETE("/pkgs/tools/installed/{packager}/{name}/{version}")
+ Response(StatusOK)
+ })
+ })
+})
+
+var IndexPayload = Type("arduino.index", func() {
+ TypeName("IndexPayload")
+
+ Attribute("url", String, "The url of the index file", func() {
+ Example("http://downloads.arduino.cc/packages/package_index.json")
+ })
+ Required("url")
+})
+
+var ToolPayload = Type("arduino.tool", func() {
+ Description(`A tool is an executable program that can upload sketches.
+ If url is absent the tool will be searched among the package index installed`)
+ TypeName("ToolPayload")
+
+ Attribute("name", String, "The name of the tool", func() {
+ Example("avrdude")
+ })
+ Attribute("version", String, "The version of the tool", func() {
+ Example("6.3.0-arduino9")
+ })
+ Attribute("packager", String, "The packager of the tool", func() {
+ Example("arduino")
+ })
+
+ Attribute("url", String, `The url where the package can be found. Optional.
+ If present checksum must also be present.`)
+
+ Attribute("checksum", String, `A checksum of the archive. Mandatory when url is present.
+ This ensures that the package is downloaded correcly.`)
+
+ Required("name", "version", "packager")
+})
+
+var Tool = ResultType("application/vnd.arduino.tool", func() {
+ Description("A tool is an executable program that can upload sketches.")
+ TypeName("Tool")
+ Reference(ToolPayload)
+
+ Attribute("name")
+ Attribute("version")
+ Attribute("packager")
+
+ Required("name", "version", "packager")
+})
+
+var Operation = ResultType("application/vnd.arduino.operation", func() {
+ Description("Describes the result of an operation.")
+ TypeName("Operation")
+
+ Attribute("status", String, "The status of the operation", func() {
+ Example("ok")
+ })
+ Required("status")
+})
diff --git a/discovery.go b/discovery.go
index 5c75fabe5..e83d7a46d 100644
--- a/discovery.go
+++ b/discovery.go
@@ -33,8 +33,8 @@ import (
"strings"
"time"
- log "github.com/Sirupsen/logrus"
"github.com/oleksandr/bonjour"
+ log "github.com/sirupsen/logrus"
)
const timeoutConst = 2
diff --git a/docs/pkgs.html b/docs/pkgs.html
new file mode 100644
index 000000000..c505805f5
--- /dev/null
+++ b/docs/pkgs.html
@@ -0,0 +1,123 @@
+
+
+
+
+ Interactive docs for pkgs api
+
+
+
+ pkgs api manage the indexes and tools installed on the system.
+
+ Indexes
+ An index file contains the info about a core and its tools. You can see an example at https://downloads.arduino.cc/packages/package_index.json
+
+ index files are saved (with an urlencoded filename) in the folder ~/.arduino-create/indexes
+
+ List Indexes
+ You can list the indexes installed in the system with this simple GET
+
+
+
+
+ Add Indexes
+ You can add a new index with a POST request
+
+
+
+
+ You can now check if the new package_index was downloaded by repeating List Indexes.
+
+ Remove Indexes
+ You can add a new index with a POST request
+
+
+
+
+ You can now check if the new package_index was removed by repeating List Indexes.
+
+
+ Tools
+ A tool is an executable that can be used to program a board.
+
+ tools are saved in the folder ~/.arduino-create
with a structure like {packager}/{name}/{version}
+
+ List Available Tools
+ You can list the available tools that could be installed from an index with this simple GET. (Remember to add
+ indexes)
+
+
+
+
+ List Installed Tools
+ You can list the tools installed in the system with this simple GET
+
+
+
+
+
+ Install a tool from an index file
+ You can install one of the available tools with a PUT request
+
+
+
+
+
+ Remove an installed tool
+ You can remove one of the installed tools with a DELETE request
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/gen/docs/client.go b/gen/docs/client.go
new file mode 100644
index 000000000..480f8dc7a
--- /dev/null
+++ b/gen/docs/client.go
@@ -0,0 +1,21 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// docs client
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package docs
+
+import (
+ goa "goa.design/goa"
+)
+
+// Client is the "docs" service client.
+type Client struct {
+}
+
+// NewClient initializes a "docs" service client given the endpoints.
+func NewClient(goa.Endpoint) *Client {
+ return &Client{}
+}
diff --git a/gen/docs/endpoints.go b/gen/docs/endpoints.go
new file mode 100644
index 000000000..c03e4ab49
--- /dev/null
+++ b/gen/docs/endpoints.go
@@ -0,0 +1,25 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// docs endpoints
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package docs
+
+import (
+ goa "goa.design/goa"
+)
+
+// Endpoints wraps the "docs" service endpoints.
+type Endpoints struct {
+}
+
+// NewEndpoints wraps the methods of the "docs" service with endpoints.
+func NewEndpoints(s Service) *Endpoints {
+ return &Endpoints{}
+}
+
+// Use applies the given middleware to all the "docs" service endpoints.
+func (e *Endpoints) Use(m func(goa.Endpoint) goa.Endpoint) {
+}
diff --git a/gen/docs/service.go b/gen/docs/service.go
new file mode 100644
index 000000000..3b1e0a265
--- /dev/null
+++ b/gen/docs/service.go
@@ -0,0 +1,22 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// docs service
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package docs
+
+// Service is the docs service interface.
+type Service interface {
+}
+
+// ServiceName is the name of the service as defined in the design. This is the
+// same value that is set in the endpoint request contexts under the ServiceKey
+// key.
+const ServiceName = "docs"
+
+// MethodNames lists the service method names as defined in the design. These
+// are the same values that are set in the endpoint request contexts under the
+// MethodKey key.
+var MethodNames = [0]string{}
diff --git a/gen/http/cli/arduino_create_agent/cli.go b/gen/http/cli/arduino_create_agent/cli.go
new file mode 100644
index 000000000..bdf38b33c
--- /dev/null
+++ b/gen/http/cli/arduino_create_agent/cli.go
@@ -0,0 +1,324 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// arduino-create-agent HTTP client CLI support package
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package cli
+
+import (
+ "flag"
+ "fmt"
+ "net/http"
+ "os"
+
+ indexesc "github.com/arduino/arduino-create-agent/gen/http/indexes/client"
+ toolsc "github.com/arduino/arduino-create-agent/gen/http/tools/client"
+ goa "goa.design/goa"
+ goahttp "goa.design/goa/http"
+)
+
+// UsageCommands returns the set of commands and sub-commands using the format
+//
+// command (subcommand1|subcommand2|...)
+//
+func UsageCommands() string {
+ return `indexes (list|add|remove)
+tools (available|installed|install|remove)
+`
+}
+
+// UsageExamples produces an example of a valid invocation of the CLI tool.
+func UsageExamples() string {
+ return os.Args[0] + ` indexes list` + "\n" +
+ os.Args[0] + ` tools available` + "\n" +
+ ""
+}
+
+// ParseEndpoint returns the endpoint and payload as specified on the command
+// line.
+func ParseEndpoint(
+ scheme, host string,
+ doer goahttp.Doer,
+ enc func(*http.Request) goahttp.Encoder,
+ dec func(*http.Response) goahttp.Decoder,
+ restore bool,
+) (goa.Endpoint, interface{}, error) {
+ var (
+ indexesFlags = flag.NewFlagSet("indexes", flag.ContinueOnError)
+
+ indexesListFlags = flag.NewFlagSet("list", flag.ExitOnError)
+
+ indexesAddFlags = flag.NewFlagSet("add", flag.ExitOnError)
+ indexesAddBodyFlag = indexesAddFlags.String("body", "REQUIRED", "")
+
+ indexesRemoveFlags = flag.NewFlagSet("remove", flag.ExitOnError)
+ indexesRemoveBodyFlag = indexesRemoveFlags.String("body", "REQUIRED", "")
+
+ toolsFlags = flag.NewFlagSet("tools", flag.ContinueOnError)
+
+ toolsAvailableFlags = flag.NewFlagSet("available", flag.ExitOnError)
+
+ toolsInstalledFlags = flag.NewFlagSet("installed", flag.ExitOnError)
+
+ toolsInstallFlags = flag.NewFlagSet("install", flag.ExitOnError)
+ toolsInstallBodyFlag = toolsInstallFlags.String("body", "REQUIRED", "")
+
+ toolsRemoveFlags = flag.NewFlagSet("remove", flag.ExitOnError)
+ toolsRemoveBodyFlag = toolsRemoveFlags.String("body", "REQUIRED", "")
+ toolsRemovePackagerFlag = toolsRemoveFlags.String("packager", "REQUIRED", "The packager of the tool")
+ toolsRemoveNameFlag = toolsRemoveFlags.String("name", "REQUIRED", "The name of the tool")
+ toolsRemoveVersionFlag = toolsRemoveFlags.String("version", "REQUIRED", "The version of the tool")
+ )
+ indexesFlags.Usage = indexesUsage
+ indexesListFlags.Usage = indexesListUsage
+ indexesAddFlags.Usage = indexesAddUsage
+ indexesRemoveFlags.Usage = indexesRemoveUsage
+
+ toolsFlags.Usage = toolsUsage
+ toolsAvailableFlags.Usage = toolsAvailableUsage
+ toolsInstalledFlags.Usage = toolsInstalledUsage
+ toolsInstallFlags.Usage = toolsInstallUsage
+ toolsRemoveFlags.Usage = toolsRemoveUsage
+
+ if err := flag.CommandLine.Parse(os.Args[1:]); err != nil {
+ return nil, nil, err
+ }
+
+ if len(os.Args) < flag.NFlag()+3 {
+ return nil, nil, fmt.Errorf("not enough arguments")
+ }
+
+ var (
+ svcn string
+ svcf *flag.FlagSet
+ )
+ {
+ svcn = os.Args[1+flag.NFlag()]
+ switch svcn {
+ case "indexes":
+ svcf = indexesFlags
+ case "tools":
+ svcf = toolsFlags
+ default:
+ return nil, nil, fmt.Errorf("unknown service %q", svcn)
+ }
+ }
+ if err := svcf.Parse(os.Args[2+flag.NFlag():]); err != nil {
+ return nil, nil, err
+ }
+
+ var (
+ epn string
+ epf *flag.FlagSet
+ )
+ {
+ epn = os.Args[2+flag.NFlag()+svcf.NFlag()]
+ switch svcn {
+ case "indexes":
+ switch epn {
+ case "list":
+ epf = indexesListFlags
+
+ case "add":
+ epf = indexesAddFlags
+
+ case "remove":
+ epf = indexesRemoveFlags
+
+ }
+
+ case "tools":
+ switch epn {
+ case "available":
+ epf = toolsAvailableFlags
+
+ case "installed":
+ epf = toolsInstalledFlags
+
+ case "install":
+ epf = toolsInstallFlags
+
+ case "remove":
+ epf = toolsRemoveFlags
+
+ }
+
+ }
+ }
+ if epf == nil {
+ return nil, nil, fmt.Errorf("unknown %q endpoint %q", svcn, epn)
+ }
+
+ // Parse endpoint flags if any
+ if len(os.Args) > 2+flag.NFlag()+svcf.NFlag() {
+ if err := epf.Parse(os.Args[3+flag.NFlag()+svcf.NFlag():]); err != nil {
+ return nil, nil, err
+ }
+ }
+
+ var (
+ data interface{}
+ endpoint goa.Endpoint
+ err error
+ )
+ {
+ switch svcn {
+ case "indexes":
+ c := indexesc.NewClient(scheme, host, doer, enc, dec, restore)
+ switch epn {
+ case "list":
+ endpoint = c.List()
+ data = nil
+ case "add":
+ endpoint = c.Add()
+ data, err = indexesc.BuildAddPayload(*indexesAddBodyFlag)
+ case "remove":
+ endpoint = c.Remove()
+ data, err = indexesc.BuildRemovePayload(*indexesRemoveBodyFlag)
+ }
+ case "tools":
+ c := toolsc.NewClient(scheme, host, doer, enc, dec, restore)
+ switch epn {
+ case "available":
+ endpoint = c.Available()
+ data = nil
+ case "installed":
+ endpoint = c.Installed()
+ data = nil
+ case "install":
+ endpoint = c.Install()
+ data, err = toolsc.BuildInstallPayload(*toolsInstallBodyFlag)
+ case "remove":
+ endpoint = c.Remove()
+ data, err = toolsc.BuildRemovePayload(*toolsRemoveBodyFlag, *toolsRemovePackagerFlag, *toolsRemoveNameFlag, *toolsRemoveVersionFlag)
+ }
+ }
+ }
+ if err != nil {
+ return nil, nil, err
+ }
+
+ return endpoint, data, nil
+}
+
+// indexesUsage displays the usage of the indexes command and its subcommands.
+func indexesUsage() {
+ fmt.Fprintf(os.Stderr, `The indexes service manages the package_index files
+Usage:
+ %s [globalflags] indexes COMMAND [flags]
+
+COMMAND:
+ list: List implements list.
+ add: Add implements add.
+ remove: Remove implements remove.
+
+Additional help:
+ %s indexes COMMAND --help
+`, os.Args[0], os.Args[0])
+}
+func indexesListUsage() {
+ fmt.Fprintf(os.Stderr, `%s [flags] indexes list
+
+List implements list.
+
+Example:
+ `+os.Args[0]+` indexes list
+`, os.Args[0])
+}
+
+func indexesAddUsage() {
+ fmt.Fprintf(os.Stderr, `%s [flags] indexes add -body JSON
+
+Add implements add.
+ -body JSON:
+
+Example:
+ `+os.Args[0]+` indexes add --body '{
+ "url": "http://downloads.arduino.cc/packages/package_index.json"
+ }'
+`, os.Args[0])
+}
+
+func indexesRemoveUsage() {
+ fmt.Fprintf(os.Stderr, `%s [flags] indexes remove -body JSON
+
+Remove implements remove.
+ -body JSON:
+
+Example:
+ `+os.Args[0]+` indexes remove --body '{
+ "url": "http://downloads.arduino.cc/packages/package_index.json"
+ }'
+`, os.Args[0])
+}
+
+// toolsUsage displays the usage of the tools command and its subcommands.
+func toolsUsage() {
+ fmt.Fprintf(os.Stderr, `The tools service manages the available and installed tools
+Usage:
+ %s [globalflags] tools COMMAND [flags]
+
+COMMAND:
+ available: Available implements available.
+ installed: Installed implements installed.
+ install: Install implements install.
+ remove: Remove implements remove.
+
+Additional help:
+ %s tools COMMAND --help
+`, os.Args[0], os.Args[0])
+}
+func toolsAvailableUsage() {
+ fmt.Fprintf(os.Stderr, `%s [flags] tools available
+
+Available implements available.
+
+Example:
+ `+os.Args[0]+` tools available
+`, os.Args[0])
+}
+
+func toolsInstalledUsage() {
+ fmt.Fprintf(os.Stderr, `%s [flags] tools installed
+
+Installed implements installed.
+
+Example:
+ `+os.Args[0]+` tools installed
+`, os.Args[0])
+}
+
+func toolsInstallUsage() {
+ fmt.Fprintf(os.Stderr, `%s [flags] tools install -body JSON
+
+Install implements install.
+ -body JSON:
+
+Example:
+ `+os.Args[0]+` tools install --body '{
+ "checksum": "Quam voluptas voluptates expedita rem ipsum.",
+ "name": "avrdude",
+ "packager": "arduino",
+ "url": "Iusto libero explicabo beatae dolor adipisci nulla.",
+ "version": "6.3.0-arduino9"
+ }'
+`, os.Args[0])
+}
+
+func toolsRemoveUsage() {
+ fmt.Fprintf(os.Stderr, `%s [flags] tools remove -body JSON -packager STRING -name STRING -version STRING
+
+Remove implements remove.
+ -body JSON:
+ -packager STRING: The packager of the tool
+ -name STRING: The name of the tool
+ -version STRING: The version of the tool
+
+Example:
+ `+os.Args[0]+` tools remove --body '{
+ "url": "http://downloads.arduino.cc/packages/package_index.json"
+ }' --packager "arduino" --name "avrdude" --version "6.3.0-arduino9"
+`, os.Args[0])
+}
diff --git a/gen/http/docs/client/client.go b/gen/http/docs/client/client.go
new file mode 100644
index 000000000..b7f6ee012
--- /dev/null
+++ b/gen/http/docs/client/client.go
@@ -0,0 +1,44 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// docs client HTTP transport
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package client
+
+import (
+ "net/http"
+
+ goahttp "goa.design/goa/http"
+)
+
+// Client lists the docs service endpoint HTTP clients.
+type Client struct {
+ // RestoreResponseBody controls whether the response bodies are reset after
+ // decoding so they can be read again.
+ RestoreResponseBody bool
+
+ scheme string
+ host string
+ encoder func(*http.Request) goahttp.Encoder
+ decoder func(*http.Response) goahttp.Decoder
+}
+
+// NewClient instantiates HTTP clients for all the docs service servers.
+func NewClient(
+ scheme string,
+ host string,
+ doer goahttp.Doer,
+ enc func(*http.Request) goahttp.Encoder,
+ dec func(*http.Response) goahttp.Decoder,
+ restoreBody bool,
+) *Client {
+ return &Client{
+ RestoreResponseBody: restoreBody,
+ scheme: scheme,
+ host: host,
+ decoder: dec,
+ encoder: enc,
+ }
+}
diff --git a/gen/http/docs/client/encode_decode.go b/gen/http/docs/client/encode_decode.go
new file mode 100644
index 000000000..e6b7b4af3
--- /dev/null
+++ b/gen/http/docs/client/encode_decode.go
@@ -0,0 +1,8 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// docs HTTP client encoders and decoders
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package client
diff --git a/gen/http/docs/client/paths.go b/gen/http/docs/client/paths.go
new file mode 100644
index 000000000..f0c4e10eb
--- /dev/null
+++ b/gen/http/docs/client/paths.go
@@ -0,0 +1,8 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// HTTP request path constructors for the docs service.
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package client
diff --git a/gen/http/docs/client/types.go b/gen/http/docs/client/types.go
new file mode 100644
index 000000000..d2e2410ee
--- /dev/null
+++ b/gen/http/docs/client/types.go
@@ -0,0 +1,8 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// docs HTTP client types
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package client
diff --git a/gen/http/docs/server/encode_decode.go b/gen/http/docs/server/encode_decode.go
new file mode 100644
index 000000000..df1840332
--- /dev/null
+++ b/gen/http/docs/server/encode_decode.go
@@ -0,0 +1,8 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// docs HTTP server encoders and decoders
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package server
diff --git a/gen/http/docs/server/paths.go b/gen/http/docs/server/paths.go
new file mode 100644
index 000000000..72ae3fc84
--- /dev/null
+++ b/gen/http/docs/server/paths.go
@@ -0,0 +1,8 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// HTTP request path constructors for the docs service.
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package server
diff --git a/gen/http/docs/server/server.go b/gen/http/docs/server/server.go
new file mode 100644
index 000000000..627b1fb60
--- /dev/null
+++ b/gen/http/docs/server/server.go
@@ -0,0 +1,73 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// docs HTTP server
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package server
+
+import (
+ "context"
+ "net/http"
+
+ docs "github.com/arduino/arduino-create-agent/gen/docs"
+ goahttp "goa.design/goa/http"
+)
+
+// Server lists the docs service endpoint HTTP handlers.
+type Server struct {
+ Mounts []*MountPoint
+}
+
+// ErrorNamer is an interface implemented by generated error structs that
+// exposes the name of the error as defined in the design.
+type ErrorNamer interface {
+ ErrorName() string
+}
+
+// MountPoint holds information about the mounted endpoints.
+type MountPoint struct {
+ // Method is the name of the service method served by the mounted HTTP handler.
+ Method string
+ // Verb is the HTTP method used to match requests to the mounted handler.
+ Verb string
+ // Pattern is the HTTP request path pattern used to match requests to the
+ // mounted handler.
+ Pattern string
+}
+
+// New instantiates HTTP handlers for all the docs service endpoints.
+func New(
+ e *docs.Endpoints,
+ mux goahttp.Muxer,
+ dec func(*http.Request) goahttp.Decoder,
+ enc func(context.Context, http.ResponseWriter) goahttp.Encoder,
+ eh func(context.Context, http.ResponseWriter, error),
+) *Server {
+ return &Server{
+ Mounts: []*MountPoint{
+ {"docs/pkgs.html", "GET", "/v2/docs/pkgs"},
+ },
+ }
+}
+
+// Service returns the name of the service served.
+func (s *Server) Service() string { return "docs" }
+
+// Use wraps the server handlers with the given middleware.
+func (s *Server) Use(m func(http.Handler) http.Handler) {
+}
+
+// Mount configures the mux to serve the docs endpoints.
+func Mount(mux goahttp.Muxer) {
+ MountDocsPkgsHTML(mux, http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ http.ServeFile(w, r, "docs/pkgs.html")
+ }))
+}
+
+// MountDocsPkgsHTML configures the mux to serve GET request made to
+// "/v2/docs/pkgs".
+func MountDocsPkgsHTML(mux goahttp.Muxer, h http.Handler) {
+ mux.Handle("GET", "/v2/docs/pkgs", h.ServeHTTP)
+}
diff --git a/gen/http/docs/server/types.go b/gen/http/docs/server/types.go
new file mode 100644
index 000000000..123e3bc30
--- /dev/null
+++ b/gen/http/docs/server/types.go
@@ -0,0 +1,8 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// docs HTTP server types
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package server
diff --git a/gen/http/indexes/client/cli.go b/gen/http/indexes/client/cli.go
new file mode 100644
index 000000000..f6a2ff094
--- /dev/null
+++ b/gen/http/indexes/client/cli.go
@@ -0,0 +1,55 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// indexes HTTP client CLI support package
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package client
+
+import (
+ "encoding/json"
+ "fmt"
+
+ indexes "github.com/arduino/arduino-create-agent/gen/indexes"
+)
+
+// BuildAddPayload builds the payload for the indexes add endpoint from CLI
+// flags.
+func BuildAddPayload(indexesAddBody string) (*indexes.IndexPayload, error) {
+ var err error
+ var body AddRequestBody
+ {
+ err = json.Unmarshal([]byte(indexesAddBody), &body)
+ if err != nil {
+ return nil, fmt.Errorf("invalid JSON for body, example of valid JSON:\n%s", "'{\n \"url\": \"http://downloads.arduino.cc/packages/package_index.json\"\n }'")
+ }
+ }
+ if err != nil {
+ return nil, err
+ }
+ v := &indexes.IndexPayload{
+ URL: body.URL,
+ }
+ return v, nil
+}
+
+// BuildRemovePayload builds the payload for the indexes remove endpoint from
+// CLI flags.
+func BuildRemovePayload(indexesRemoveBody string) (*indexes.IndexPayload, error) {
+ var err error
+ var body RemoveRequestBody
+ {
+ err = json.Unmarshal([]byte(indexesRemoveBody), &body)
+ if err != nil {
+ return nil, fmt.Errorf("invalid JSON for body, example of valid JSON:\n%s", "'{\n \"url\": \"http://downloads.arduino.cc/packages/package_index.json\"\n }'")
+ }
+ }
+ if err != nil {
+ return nil, err
+ }
+ v := &indexes.IndexPayload{
+ URL: body.URL,
+ }
+ return v, nil
+}
diff --git a/gen/http/indexes/client/client.go b/gen/http/indexes/client/client.go
new file mode 100644
index 000000000..418f060c2
--- /dev/null
+++ b/gen/http/indexes/client/client.go
@@ -0,0 +1,128 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// indexes client HTTP transport
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package client
+
+import (
+ "context"
+ "net/http"
+
+ goa "goa.design/goa"
+ goahttp "goa.design/goa/http"
+)
+
+// Client lists the indexes service endpoint HTTP clients.
+type Client struct {
+ // List Doer is the HTTP client used to make requests to the list endpoint.
+ ListDoer goahttp.Doer
+
+ // Add Doer is the HTTP client used to make requests to the add endpoint.
+ AddDoer goahttp.Doer
+
+ // Remove Doer is the HTTP client used to make requests to the remove endpoint.
+ RemoveDoer goahttp.Doer
+
+ // RestoreResponseBody controls whether the response bodies are reset after
+ // decoding so they can be read again.
+ RestoreResponseBody bool
+
+ scheme string
+ host string
+ encoder func(*http.Request) goahttp.Encoder
+ decoder func(*http.Response) goahttp.Decoder
+}
+
+// NewClient instantiates HTTP clients for all the indexes service servers.
+func NewClient(
+ scheme string,
+ host string,
+ doer goahttp.Doer,
+ enc func(*http.Request) goahttp.Encoder,
+ dec func(*http.Response) goahttp.Decoder,
+ restoreBody bool,
+) *Client {
+ return &Client{
+ ListDoer: doer,
+ AddDoer: doer,
+ RemoveDoer: doer,
+ RestoreResponseBody: restoreBody,
+ scheme: scheme,
+ host: host,
+ decoder: dec,
+ encoder: enc,
+ }
+}
+
+// List returns an endpoint that makes HTTP requests to the indexes service
+// list server.
+func (c *Client) List() goa.Endpoint {
+ var (
+ decodeResponse = DecodeListResponse(c.decoder, c.RestoreResponseBody)
+ )
+ return func(ctx context.Context, v interface{}) (interface{}, error) {
+ req, err := c.BuildListRequest(ctx, v)
+ if err != nil {
+ return nil, err
+ }
+ resp, err := c.ListDoer.Do(req)
+
+ if err != nil {
+ return nil, goahttp.ErrRequestError("indexes", "list", err)
+ }
+ return decodeResponse(resp)
+ }
+}
+
+// Add returns an endpoint that makes HTTP requests to the indexes service add
+// server.
+func (c *Client) Add() goa.Endpoint {
+ var (
+ encodeRequest = EncodeAddRequest(c.encoder)
+ decodeResponse = DecodeAddResponse(c.decoder, c.RestoreResponseBody)
+ )
+ return func(ctx context.Context, v interface{}) (interface{}, error) {
+ req, err := c.BuildAddRequest(ctx, v)
+ if err != nil {
+ return nil, err
+ }
+ err = encodeRequest(req, v)
+ if err != nil {
+ return nil, err
+ }
+ resp, err := c.AddDoer.Do(req)
+
+ if err != nil {
+ return nil, goahttp.ErrRequestError("indexes", "add", err)
+ }
+ return decodeResponse(resp)
+ }
+}
+
+// Remove returns an endpoint that makes HTTP requests to the indexes service
+// remove server.
+func (c *Client) Remove() goa.Endpoint {
+ var (
+ encodeRequest = EncodeRemoveRequest(c.encoder)
+ decodeResponse = DecodeRemoveResponse(c.decoder, c.RestoreResponseBody)
+ )
+ return func(ctx context.Context, v interface{}) (interface{}, error) {
+ req, err := c.BuildRemoveRequest(ctx, v)
+ if err != nil {
+ return nil, err
+ }
+ err = encodeRequest(req, v)
+ if err != nil {
+ return nil, err
+ }
+ resp, err := c.RemoveDoer.Do(req)
+
+ if err != nil {
+ return nil, goahttp.ErrRequestError("indexes", "remove", err)
+ }
+ return decodeResponse(resp)
+ }
+}
diff --git a/gen/http/indexes/client/encode_decode.go b/gen/http/indexes/client/encode_decode.go
new file mode 100644
index 000000000..3bca6b10c
--- /dev/null
+++ b/gen/http/indexes/client/encode_decode.go
@@ -0,0 +1,267 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// indexes HTTP client encoders and decoders
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package client
+
+import (
+ "bytes"
+ "context"
+ "io/ioutil"
+ "net/http"
+ "net/url"
+
+ indexes "github.com/arduino/arduino-create-agent/gen/indexes"
+ indexesviews "github.com/arduino/arduino-create-agent/gen/indexes/views"
+ goahttp "goa.design/goa/http"
+)
+
+// BuildListRequest instantiates a HTTP request object with method and path set
+// to call the "indexes" service "list" endpoint
+func (c *Client) BuildListRequest(ctx context.Context, v interface{}) (*http.Request, error) {
+ u := &url.URL{Scheme: c.scheme, Host: c.host, Path: ListIndexesPath()}
+ req, err := http.NewRequest("GET", u.String(), nil)
+ if err != nil {
+ return nil, goahttp.ErrInvalidURL("indexes", "list", u.String(), err)
+ }
+ if ctx != nil {
+ req = req.WithContext(ctx)
+ }
+
+ return req, nil
+}
+
+// DecodeListResponse returns a decoder for responses returned by the indexes
+// list endpoint. restoreBody controls whether the response body should be
+// restored after having been read.
+// DecodeListResponse may return the following errors:
+// - "invalid_url" (type *goa.ServiceError): http.StatusBadRequest
+// - error: internal error
+func DecodeListResponse(decoder func(*http.Response) goahttp.Decoder, restoreBody bool) func(*http.Response) (interface{}, error) {
+ return func(resp *http.Response) (interface{}, error) {
+ if restoreBody {
+ b, err := ioutil.ReadAll(resp.Body)
+ if err != nil {
+ return nil, err
+ }
+ resp.Body = ioutil.NopCloser(bytes.NewBuffer(b))
+ defer func() {
+ resp.Body = ioutil.NopCloser(bytes.NewBuffer(b))
+ }()
+ } else {
+ defer resp.Body.Close()
+ }
+ switch resp.StatusCode {
+ case http.StatusOK:
+ var (
+ body []string
+ err error
+ )
+ err = decoder(resp).Decode(&body)
+ if err != nil {
+ return nil, goahttp.ErrDecodingError("indexes", "list", err)
+ }
+ return body, nil
+ case http.StatusBadRequest:
+ var (
+ body ListInvalidURLResponseBody
+ err error
+ )
+ err = decoder(resp).Decode(&body)
+ if err != nil {
+ return nil, goahttp.ErrDecodingError("indexes", "list", err)
+ }
+ err = ValidateListInvalidURLResponseBody(&body)
+ if err != nil {
+ return nil, goahttp.ErrValidationError("indexes", "list", err)
+ }
+ return nil, NewListInvalidURL(&body)
+ default:
+ body, _ := ioutil.ReadAll(resp.Body)
+ return nil, goahttp.ErrInvalidResponse("indexes", "list", resp.StatusCode, string(body))
+ }
+ }
+}
+
+// BuildAddRequest instantiates a HTTP request object with method and path set
+// to call the "indexes" service "add" endpoint
+func (c *Client) BuildAddRequest(ctx context.Context, v interface{}) (*http.Request, error) {
+ u := &url.URL{Scheme: c.scheme, Host: c.host, Path: AddIndexesPath()}
+ req, err := http.NewRequest("POST", u.String(), nil)
+ if err != nil {
+ return nil, goahttp.ErrInvalidURL("indexes", "add", u.String(), err)
+ }
+ if ctx != nil {
+ req = req.WithContext(ctx)
+ }
+
+ return req, nil
+}
+
+// EncodeAddRequest returns an encoder for requests sent to the indexes add
+// server.
+func EncodeAddRequest(encoder func(*http.Request) goahttp.Encoder) func(*http.Request, interface{}) error {
+ return func(req *http.Request, v interface{}) error {
+ p, ok := v.(*indexes.IndexPayload)
+ if !ok {
+ return goahttp.ErrInvalidType("indexes", "add", "*indexes.IndexPayload", v)
+ }
+ body := NewAddRequestBody(p)
+ if err := encoder(req).Encode(&body); err != nil {
+ return goahttp.ErrEncodingError("indexes", "add", err)
+ }
+ return nil
+ }
+}
+
+// DecodeAddResponse returns a decoder for responses returned by the indexes
+// add endpoint. restoreBody controls whether the response body should be
+// restored after having been read.
+// DecodeAddResponse may return the following errors:
+// - "invalid_url" (type *goa.ServiceError): http.StatusBadRequest
+// - error: internal error
+func DecodeAddResponse(decoder func(*http.Response) goahttp.Decoder, restoreBody bool) func(*http.Response) (interface{}, error) {
+ return func(resp *http.Response) (interface{}, error) {
+ if restoreBody {
+ b, err := ioutil.ReadAll(resp.Body)
+ if err != nil {
+ return nil, err
+ }
+ resp.Body = ioutil.NopCloser(bytes.NewBuffer(b))
+ defer func() {
+ resp.Body = ioutil.NopCloser(bytes.NewBuffer(b))
+ }()
+ } else {
+ defer resp.Body.Close()
+ }
+ switch resp.StatusCode {
+ case http.StatusOK:
+ var (
+ body AddResponseBody
+ err error
+ )
+ err = decoder(resp).Decode(&body)
+ if err != nil {
+ return nil, goahttp.ErrDecodingError("indexes", "add", err)
+ }
+ p := NewAddOperationOK(&body)
+ view := "default"
+ vres := &indexesviews.Operation{p, view}
+ if err = indexesviews.ValidateOperation(vres); err != nil {
+ return nil, goahttp.ErrValidationError("indexes", "add", err)
+ }
+ res := indexes.NewOperation(vres)
+ return res, nil
+ case http.StatusBadRequest:
+ var (
+ body AddInvalidURLResponseBody
+ err error
+ )
+ err = decoder(resp).Decode(&body)
+ if err != nil {
+ return nil, goahttp.ErrDecodingError("indexes", "add", err)
+ }
+ err = ValidateAddInvalidURLResponseBody(&body)
+ if err != nil {
+ return nil, goahttp.ErrValidationError("indexes", "add", err)
+ }
+ return nil, NewAddInvalidURL(&body)
+ default:
+ body, _ := ioutil.ReadAll(resp.Body)
+ return nil, goahttp.ErrInvalidResponse("indexes", "add", resp.StatusCode, string(body))
+ }
+ }
+}
+
+// BuildRemoveRequest instantiates a HTTP request object with method and path
+// set to call the "indexes" service "remove" endpoint
+func (c *Client) BuildRemoveRequest(ctx context.Context, v interface{}) (*http.Request, error) {
+ u := &url.URL{Scheme: c.scheme, Host: c.host, Path: RemoveIndexesPath()}
+ req, err := http.NewRequest("POST", u.String(), nil)
+ if err != nil {
+ return nil, goahttp.ErrInvalidURL("indexes", "remove", u.String(), err)
+ }
+ if ctx != nil {
+ req = req.WithContext(ctx)
+ }
+
+ return req, nil
+}
+
+// EncodeRemoveRequest returns an encoder for requests sent to the indexes
+// remove server.
+func EncodeRemoveRequest(encoder func(*http.Request) goahttp.Encoder) func(*http.Request, interface{}) error {
+ return func(req *http.Request, v interface{}) error {
+ p, ok := v.(*indexes.IndexPayload)
+ if !ok {
+ return goahttp.ErrInvalidType("indexes", "remove", "*indexes.IndexPayload", v)
+ }
+ body := NewRemoveRequestBody(p)
+ if err := encoder(req).Encode(&body); err != nil {
+ return goahttp.ErrEncodingError("indexes", "remove", err)
+ }
+ return nil
+ }
+}
+
+// DecodeRemoveResponse returns a decoder for responses returned by the indexes
+// remove endpoint. restoreBody controls whether the response body should be
+// restored after having been read.
+// DecodeRemoveResponse may return the following errors:
+// - "invalid_url" (type *goa.ServiceError): http.StatusBadRequest
+// - error: internal error
+func DecodeRemoveResponse(decoder func(*http.Response) goahttp.Decoder, restoreBody bool) func(*http.Response) (interface{}, error) {
+ return func(resp *http.Response) (interface{}, error) {
+ if restoreBody {
+ b, err := ioutil.ReadAll(resp.Body)
+ if err != nil {
+ return nil, err
+ }
+ resp.Body = ioutil.NopCloser(bytes.NewBuffer(b))
+ defer func() {
+ resp.Body = ioutil.NopCloser(bytes.NewBuffer(b))
+ }()
+ } else {
+ defer resp.Body.Close()
+ }
+ switch resp.StatusCode {
+ case http.StatusOK:
+ var (
+ body RemoveResponseBody
+ err error
+ )
+ err = decoder(resp).Decode(&body)
+ if err != nil {
+ return nil, goahttp.ErrDecodingError("indexes", "remove", err)
+ }
+ p := NewRemoveOperationOK(&body)
+ view := "default"
+ vres := &indexesviews.Operation{p, view}
+ if err = indexesviews.ValidateOperation(vres); err != nil {
+ return nil, goahttp.ErrValidationError("indexes", "remove", err)
+ }
+ res := indexes.NewOperation(vres)
+ return res, nil
+ case http.StatusBadRequest:
+ var (
+ body RemoveInvalidURLResponseBody
+ err error
+ )
+ err = decoder(resp).Decode(&body)
+ if err != nil {
+ return nil, goahttp.ErrDecodingError("indexes", "remove", err)
+ }
+ err = ValidateRemoveInvalidURLResponseBody(&body)
+ if err != nil {
+ return nil, goahttp.ErrValidationError("indexes", "remove", err)
+ }
+ return nil, NewRemoveInvalidURL(&body)
+ default:
+ body, _ := ioutil.ReadAll(resp.Body)
+ return nil, goahttp.ErrInvalidResponse("indexes", "remove", resp.StatusCode, string(body))
+ }
+ }
+}
diff --git a/gen/http/indexes/client/paths.go b/gen/http/indexes/client/paths.go
new file mode 100644
index 000000000..24fb5b1cb
--- /dev/null
+++ b/gen/http/indexes/client/paths.go
@@ -0,0 +1,23 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// HTTP request path constructors for the indexes service.
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package client
+
+// ListIndexesPath returns the URL path to the indexes service list HTTP endpoint.
+func ListIndexesPath() string {
+ return "/v2/pkgs/indexes"
+}
+
+// AddIndexesPath returns the URL path to the indexes service add HTTP endpoint.
+func AddIndexesPath() string {
+ return "/v2/pkgs/indexes/add"
+}
+
+// RemoveIndexesPath returns the URL path to the indexes service remove HTTP endpoint.
+func RemoveIndexesPath() string {
+ return "/v2/pkgs/indexes/delete"
+}
diff --git a/gen/http/indexes/client/types.go b/gen/http/indexes/client/types.go
new file mode 100644
index 000000000..4d9d04b06
--- /dev/null
+++ b/gen/http/indexes/client/types.go
@@ -0,0 +1,244 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// indexes HTTP client types
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package client
+
+import (
+ indexes "github.com/arduino/arduino-create-agent/gen/indexes"
+ indexesviews "github.com/arduino/arduino-create-agent/gen/indexes/views"
+ goa "goa.design/goa"
+)
+
+// AddRequestBody is the type of the "indexes" service "add" endpoint HTTP
+// request body.
+type AddRequestBody struct {
+ // The url of the index file
+ URL string `form:"url" json:"url" xml:"url"`
+}
+
+// RemoveRequestBody is the type of the "indexes" service "remove" endpoint
+// HTTP request body.
+type RemoveRequestBody struct {
+ // The url of the index file
+ URL string `form:"url" json:"url" xml:"url"`
+}
+
+// AddResponseBody is the type of the "indexes" service "add" endpoint HTTP
+// response body.
+type AddResponseBody struct {
+ // The status of the operation
+ Status *string `form:"status,omitempty" json:"status,omitempty" xml:"status,omitempty"`
+}
+
+// RemoveResponseBody is the type of the "indexes" service "remove" endpoint
+// HTTP response body.
+type RemoveResponseBody struct {
+ // The status of the operation
+ Status *string `form:"status,omitempty" json:"status,omitempty" xml:"status,omitempty"`
+}
+
+// ListInvalidURLResponseBody is the type of the "indexes" service "list"
+// endpoint HTTP response body for the "invalid_url" error.
+type ListInvalidURLResponseBody struct {
+ // Name is the name of this class of errors.
+ Name *string `form:"name,omitempty" json:"name,omitempty" xml:"name,omitempty"`
+ // ID is a unique identifier for this particular occurrence of the problem.
+ ID *string `form:"id,omitempty" json:"id,omitempty" xml:"id,omitempty"`
+ // Message is a human-readable explanation specific to this occurrence of the
+ // problem.
+ Message *string `form:"message,omitempty" json:"message,omitempty" xml:"message,omitempty"`
+ // Is the error temporary?
+ Temporary *bool `form:"temporary,omitempty" json:"temporary,omitempty" xml:"temporary,omitempty"`
+ // Is the error a timeout?
+ Timeout *bool `form:"timeout,omitempty" json:"timeout,omitempty" xml:"timeout,omitempty"`
+ // Is the error a server-side fault?
+ Fault *bool `form:"fault,omitempty" json:"fault,omitempty" xml:"fault,omitempty"`
+}
+
+// AddInvalidURLResponseBody is the type of the "indexes" service "add"
+// endpoint HTTP response body for the "invalid_url" error.
+type AddInvalidURLResponseBody struct {
+ // Name is the name of this class of errors.
+ Name *string `form:"name,omitempty" json:"name,omitempty" xml:"name,omitempty"`
+ // ID is a unique identifier for this particular occurrence of the problem.
+ ID *string `form:"id,omitempty" json:"id,omitempty" xml:"id,omitempty"`
+ // Message is a human-readable explanation specific to this occurrence of the
+ // problem.
+ Message *string `form:"message,omitempty" json:"message,omitempty" xml:"message,omitempty"`
+ // Is the error temporary?
+ Temporary *bool `form:"temporary,omitempty" json:"temporary,omitempty" xml:"temporary,omitempty"`
+ // Is the error a timeout?
+ Timeout *bool `form:"timeout,omitempty" json:"timeout,omitempty" xml:"timeout,omitempty"`
+ // Is the error a server-side fault?
+ Fault *bool `form:"fault,omitempty" json:"fault,omitempty" xml:"fault,omitempty"`
+}
+
+// RemoveInvalidURLResponseBody is the type of the "indexes" service "remove"
+// endpoint HTTP response body for the "invalid_url" error.
+type RemoveInvalidURLResponseBody struct {
+ // Name is the name of this class of errors.
+ Name *string `form:"name,omitempty" json:"name,omitempty" xml:"name,omitempty"`
+ // ID is a unique identifier for this particular occurrence of the problem.
+ ID *string `form:"id,omitempty" json:"id,omitempty" xml:"id,omitempty"`
+ // Message is a human-readable explanation specific to this occurrence of the
+ // problem.
+ Message *string `form:"message,omitempty" json:"message,omitempty" xml:"message,omitempty"`
+ // Is the error temporary?
+ Temporary *bool `form:"temporary,omitempty" json:"temporary,omitempty" xml:"temporary,omitempty"`
+ // Is the error a timeout?
+ Timeout *bool `form:"timeout,omitempty" json:"timeout,omitempty" xml:"timeout,omitempty"`
+ // Is the error a server-side fault?
+ Fault *bool `form:"fault,omitempty" json:"fault,omitempty" xml:"fault,omitempty"`
+}
+
+// NewAddRequestBody builds the HTTP request body from the payload of the "add"
+// endpoint of the "indexes" service.
+func NewAddRequestBody(p *indexes.IndexPayload) *AddRequestBody {
+ body := &AddRequestBody{
+ URL: p.URL,
+ }
+ return body
+}
+
+// NewRemoveRequestBody builds the HTTP request body from the payload of the
+// "remove" endpoint of the "indexes" service.
+func NewRemoveRequestBody(p *indexes.IndexPayload) *RemoveRequestBody {
+ body := &RemoveRequestBody{
+ URL: p.URL,
+ }
+ return body
+}
+
+// NewListInvalidURL builds a indexes service list endpoint invalid_url error.
+func NewListInvalidURL(body *ListInvalidURLResponseBody) *goa.ServiceError {
+ v := &goa.ServiceError{
+ Name: *body.Name,
+ ID: *body.ID,
+ Message: *body.Message,
+ Temporary: *body.Temporary,
+ Timeout: *body.Timeout,
+ Fault: *body.Fault,
+ }
+ return v
+}
+
+// NewAddOperationOK builds a "indexes" service "add" endpoint result from a
+// HTTP "OK" response.
+func NewAddOperationOK(body *AddResponseBody) *indexesviews.OperationView {
+ v := &indexesviews.OperationView{
+ Status: body.Status,
+ }
+ return v
+}
+
+// NewAddInvalidURL builds a indexes service add endpoint invalid_url error.
+func NewAddInvalidURL(body *AddInvalidURLResponseBody) *goa.ServiceError {
+ v := &goa.ServiceError{
+ Name: *body.Name,
+ ID: *body.ID,
+ Message: *body.Message,
+ Temporary: *body.Temporary,
+ Timeout: *body.Timeout,
+ Fault: *body.Fault,
+ }
+ return v
+}
+
+// NewRemoveOperationOK builds a "indexes" service "remove" endpoint result
+// from a HTTP "OK" response.
+func NewRemoveOperationOK(body *RemoveResponseBody) *indexesviews.OperationView {
+ v := &indexesviews.OperationView{
+ Status: body.Status,
+ }
+ return v
+}
+
+// NewRemoveInvalidURL builds a indexes service remove endpoint invalid_url
+// error.
+func NewRemoveInvalidURL(body *RemoveInvalidURLResponseBody) *goa.ServiceError {
+ v := &goa.ServiceError{
+ Name: *body.Name,
+ ID: *body.ID,
+ Message: *body.Message,
+ Temporary: *body.Temporary,
+ Timeout: *body.Timeout,
+ Fault: *body.Fault,
+ }
+ return v
+}
+
+// ValidateListInvalidURLResponseBody runs the validations defined on
+// list_invalid_url_response_body
+func ValidateListInvalidURLResponseBody(body *ListInvalidURLResponseBody) (err error) {
+ if body.Name == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("name", "body"))
+ }
+ if body.ID == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("id", "body"))
+ }
+ if body.Message == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("message", "body"))
+ }
+ if body.Temporary == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("temporary", "body"))
+ }
+ if body.Timeout == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("timeout", "body"))
+ }
+ if body.Fault == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("fault", "body"))
+ }
+ return
+}
+
+// ValidateAddInvalidURLResponseBody runs the validations defined on
+// add_invalid_url_response_body
+func ValidateAddInvalidURLResponseBody(body *AddInvalidURLResponseBody) (err error) {
+ if body.Name == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("name", "body"))
+ }
+ if body.ID == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("id", "body"))
+ }
+ if body.Message == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("message", "body"))
+ }
+ if body.Temporary == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("temporary", "body"))
+ }
+ if body.Timeout == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("timeout", "body"))
+ }
+ if body.Fault == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("fault", "body"))
+ }
+ return
+}
+
+// ValidateRemoveInvalidURLResponseBody runs the validations defined on
+// remove_invalid_url_response_body
+func ValidateRemoveInvalidURLResponseBody(body *RemoveInvalidURLResponseBody) (err error) {
+ if body.Name == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("name", "body"))
+ }
+ if body.ID == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("id", "body"))
+ }
+ if body.Message == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("message", "body"))
+ }
+ if body.Temporary == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("temporary", "body"))
+ }
+ if body.Timeout == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("timeout", "body"))
+ }
+ if body.Fault == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("fault", "body"))
+ }
+ return
+}
diff --git a/gen/http/indexes/server/encode_decode.go b/gen/http/indexes/server/encode_decode.go
new file mode 100644
index 000000000..4172333cf
--- /dev/null
+++ b/gen/http/indexes/server/encode_decode.go
@@ -0,0 +1,173 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// indexes HTTP server encoders and decoders
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package server
+
+import (
+ "context"
+ "io"
+ "net/http"
+
+ indexesviews "github.com/arduino/arduino-create-agent/gen/indexes/views"
+ goa "goa.design/goa"
+ goahttp "goa.design/goa/http"
+)
+
+// EncodeListResponse returns an encoder for responses returned by the indexes
+// list endpoint.
+func EncodeListResponse(encoder func(context.Context, http.ResponseWriter) goahttp.Encoder) func(context.Context, http.ResponseWriter, interface{}) error {
+ return func(ctx context.Context, w http.ResponseWriter, v interface{}) error {
+ res := v.([]string)
+ enc := encoder(ctx, w)
+ body := res
+ w.WriteHeader(http.StatusOK)
+ return enc.Encode(body)
+ }
+}
+
+// EncodeListError returns an encoder for errors returned by the list indexes
+// endpoint.
+func EncodeListError(encoder func(context.Context, http.ResponseWriter) goahttp.Encoder) func(context.Context, http.ResponseWriter, error) error {
+ encodeError := goahttp.ErrorEncoder(encoder)
+ return func(ctx context.Context, w http.ResponseWriter, v error) error {
+ en, ok := v.(ErrorNamer)
+ if !ok {
+ return encodeError(ctx, w, v)
+ }
+ switch en.ErrorName() {
+ case "invalid_url":
+ res := v.(*goa.ServiceError)
+ enc := encoder(ctx, w)
+ body := NewListInvalidURLResponseBody(res)
+ w.Header().Set("goa-error", "invalid_url")
+ w.WriteHeader(http.StatusBadRequest)
+ return enc.Encode(body)
+ default:
+ return encodeError(ctx, w, v)
+ }
+ }
+}
+
+// EncodeAddResponse returns an encoder for responses returned by the indexes
+// add endpoint.
+func EncodeAddResponse(encoder func(context.Context, http.ResponseWriter) goahttp.Encoder) func(context.Context, http.ResponseWriter, interface{}) error {
+ return func(ctx context.Context, w http.ResponseWriter, v interface{}) error {
+ res := v.(*indexesviews.Operation)
+ enc := encoder(ctx, w)
+ body := NewAddResponseBody(res.Projected)
+ w.WriteHeader(http.StatusOK)
+ return enc.Encode(body)
+ }
+}
+
+// DecodeAddRequest returns a decoder for requests sent to the indexes add
+// endpoint.
+func DecodeAddRequest(mux goahttp.Muxer, decoder func(*http.Request) goahttp.Decoder) func(*http.Request) (interface{}, error) {
+ return func(r *http.Request) (interface{}, error) {
+ var (
+ body AddRequestBody
+ err error
+ )
+ err = decoder(r).Decode(&body)
+ if err != nil {
+ if err == io.EOF {
+ return nil, goa.MissingPayloadError()
+ }
+ return nil, goa.DecodePayloadError(err.Error())
+ }
+ err = ValidateAddRequestBody(&body)
+ if err != nil {
+ return nil, err
+ }
+ payload := NewAddIndexPayload(&body)
+
+ return payload, nil
+ }
+}
+
+// EncodeAddError returns an encoder for errors returned by the add indexes
+// endpoint.
+func EncodeAddError(encoder func(context.Context, http.ResponseWriter) goahttp.Encoder) func(context.Context, http.ResponseWriter, error) error {
+ encodeError := goahttp.ErrorEncoder(encoder)
+ return func(ctx context.Context, w http.ResponseWriter, v error) error {
+ en, ok := v.(ErrorNamer)
+ if !ok {
+ return encodeError(ctx, w, v)
+ }
+ switch en.ErrorName() {
+ case "invalid_url":
+ res := v.(*goa.ServiceError)
+ enc := encoder(ctx, w)
+ body := NewAddInvalidURLResponseBody(res)
+ w.Header().Set("goa-error", "invalid_url")
+ w.WriteHeader(http.StatusBadRequest)
+ return enc.Encode(body)
+ default:
+ return encodeError(ctx, w, v)
+ }
+ }
+}
+
+// EncodeRemoveResponse returns an encoder for responses returned by the
+// indexes remove endpoint.
+func EncodeRemoveResponse(encoder func(context.Context, http.ResponseWriter) goahttp.Encoder) func(context.Context, http.ResponseWriter, interface{}) error {
+ return func(ctx context.Context, w http.ResponseWriter, v interface{}) error {
+ res := v.(*indexesviews.Operation)
+ enc := encoder(ctx, w)
+ body := NewRemoveResponseBody(res.Projected)
+ w.WriteHeader(http.StatusOK)
+ return enc.Encode(body)
+ }
+}
+
+// DecodeRemoveRequest returns a decoder for requests sent to the indexes
+// remove endpoint.
+func DecodeRemoveRequest(mux goahttp.Muxer, decoder func(*http.Request) goahttp.Decoder) func(*http.Request) (interface{}, error) {
+ return func(r *http.Request) (interface{}, error) {
+ var (
+ body RemoveRequestBody
+ err error
+ )
+ err = decoder(r).Decode(&body)
+ if err != nil {
+ if err == io.EOF {
+ return nil, goa.MissingPayloadError()
+ }
+ return nil, goa.DecodePayloadError(err.Error())
+ }
+ err = ValidateRemoveRequestBody(&body)
+ if err != nil {
+ return nil, err
+ }
+ payload := NewRemoveIndexPayload(&body)
+
+ return payload, nil
+ }
+}
+
+// EncodeRemoveError returns an encoder for errors returned by the remove
+// indexes endpoint.
+func EncodeRemoveError(encoder func(context.Context, http.ResponseWriter) goahttp.Encoder) func(context.Context, http.ResponseWriter, error) error {
+ encodeError := goahttp.ErrorEncoder(encoder)
+ return func(ctx context.Context, w http.ResponseWriter, v error) error {
+ en, ok := v.(ErrorNamer)
+ if !ok {
+ return encodeError(ctx, w, v)
+ }
+ switch en.ErrorName() {
+ case "invalid_url":
+ res := v.(*goa.ServiceError)
+ enc := encoder(ctx, w)
+ body := NewRemoveInvalidURLResponseBody(res)
+ w.Header().Set("goa-error", "invalid_url")
+ w.WriteHeader(http.StatusBadRequest)
+ return enc.Encode(body)
+ default:
+ return encodeError(ctx, w, v)
+ }
+ }
+}
diff --git a/gen/http/indexes/server/paths.go b/gen/http/indexes/server/paths.go
new file mode 100644
index 000000000..f1ec8674d
--- /dev/null
+++ b/gen/http/indexes/server/paths.go
@@ -0,0 +1,23 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// HTTP request path constructors for the indexes service.
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package server
+
+// ListIndexesPath returns the URL path to the indexes service list HTTP endpoint.
+func ListIndexesPath() string {
+ return "/v2/pkgs/indexes"
+}
+
+// AddIndexesPath returns the URL path to the indexes service add HTTP endpoint.
+func AddIndexesPath() string {
+ return "/v2/pkgs/indexes/add"
+}
+
+// RemoveIndexesPath returns the URL path to the indexes service remove HTTP endpoint.
+func RemoveIndexesPath() string {
+ return "/v2/pkgs/indexes/delete"
+}
diff --git a/gen/http/indexes/server/server.go b/gen/http/indexes/server/server.go
new file mode 100644
index 000000000..99c912659
--- /dev/null
+++ b/gen/http/indexes/server/server.go
@@ -0,0 +1,227 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// indexes HTTP server
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package server
+
+import (
+ "context"
+ "net/http"
+
+ indexes "github.com/arduino/arduino-create-agent/gen/indexes"
+ goa "goa.design/goa"
+ goahttp "goa.design/goa/http"
+)
+
+// Server lists the indexes service endpoint HTTP handlers.
+type Server struct {
+ Mounts []*MountPoint
+ List http.Handler
+ Add http.Handler
+ Remove http.Handler
+}
+
+// ErrorNamer is an interface implemented by generated error structs that
+// exposes the name of the error as defined in the design.
+type ErrorNamer interface {
+ ErrorName() string
+}
+
+// MountPoint holds information about the mounted endpoints.
+type MountPoint struct {
+ // Method is the name of the service method served by the mounted HTTP handler.
+ Method string
+ // Verb is the HTTP method used to match requests to the mounted handler.
+ Verb string
+ // Pattern is the HTTP request path pattern used to match requests to the
+ // mounted handler.
+ Pattern string
+}
+
+// New instantiates HTTP handlers for all the indexes service endpoints.
+func New(
+ e *indexes.Endpoints,
+ mux goahttp.Muxer,
+ dec func(*http.Request) goahttp.Decoder,
+ enc func(context.Context, http.ResponseWriter) goahttp.Encoder,
+ eh func(context.Context, http.ResponseWriter, error),
+) *Server {
+ return &Server{
+ Mounts: []*MountPoint{
+ {"List", "GET", "/v2/pkgs/indexes"},
+ {"Add", "POST", "/v2/pkgs/indexes/add"},
+ {"Remove", "POST", "/v2/pkgs/indexes/delete"},
+ },
+ List: NewListHandler(e.List, mux, dec, enc, eh),
+ Add: NewAddHandler(e.Add, mux, dec, enc, eh),
+ Remove: NewRemoveHandler(e.Remove, mux, dec, enc, eh),
+ }
+}
+
+// Service returns the name of the service served.
+func (s *Server) Service() string { return "indexes" }
+
+// Use wraps the server handlers with the given middleware.
+func (s *Server) Use(m func(http.Handler) http.Handler) {
+ s.List = m(s.List)
+ s.Add = m(s.Add)
+ s.Remove = m(s.Remove)
+}
+
+// Mount configures the mux to serve the indexes endpoints.
+func Mount(mux goahttp.Muxer, h *Server) {
+ MountListHandler(mux, h.List)
+ MountAddHandler(mux, h.Add)
+ MountRemoveHandler(mux, h.Remove)
+}
+
+// MountListHandler configures the mux to serve the "indexes" service "list"
+// endpoint.
+func MountListHandler(mux goahttp.Muxer, h http.Handler) {
+ f, ok := h.(http.HandlerFunc)
+ if !ok {
+ f = func(w http.ResponseWriter, r *http.Request) {
+ h.ServeHTTP(w, r)
+ }
+ }
+ mux.Handle("GET", "/v2/pkgs/indexes", f)
+}
+
+// NewListHandler creates a HTTP handler which loads the HTTP request and calls
+// the "indexes" service "list" endpoint.
+func NewListHandler(
+ endpoint goa.Endpoint,
+ mux goahttp.Muxer,
+ dec func(*http.Request) goahttp.Decoder,
+ enc func(context.Context, http.ResponseWriter) goahttp.Encoder,
+ eh func(context.Context, http.ResponseWriter, error),
+) http.Handler {
+ var (
+ encodeResponse = EncodeListResponse(enc)
+ encodeError = EncodeListError(enc)
+ )
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ ctx := context.WithValue(r.Context(), goahttp.AcceptTypeKey, r.Header.Get("Accept"))
+ ctx = context.WithValue(ctx, goa.MethodKey, "list")
+ ctx = context.WithValue(ctx, goa.ServiceKey, "indexes")
+
+ res, err := endpoint(ctx, nil)
+
+ if err != nil {
+ if err := encodeError(ctx, w, err); err != nil {
+ eh(ctx, w, err)
+ }
+ return
+ }
+ if err := encodeResponse(ctx, w, res); err != nil {
+ eh(ctx, w, err)
+ }
+ })
+}
+
+// MountAddHandler configures the mux to serve the "indexes" service "add"
+// endpoint.
+func MountAddHandler(mux goahttp.Muxer, h http.Handler) {
+ f, ok := h.(http.HandlerFunc)
+ if !ok {
+ f = func(w http.ResponseWriter, r *http.Request) {
+ h.ServeHTTP(w, r)
+ }
+ }
+ mux.Handle("POST", "/v2/pkgs/indexes/add", f)
+}
+
+// NewAddHandler creates a HTTP handler which loads the HTTP request and calls
+// the "indexes" service "add" endpoint.
+func NewAddHandler(
+ endpoint goa.Endpoint,
+ mux goahttp.Muxer,
+ dec func(*http.Request) goahttp.Decoder,
+ enc func(context.Context, http.ResponseWriter) goahttp.Encoder,
+ eh func(context.Context, http.ResponseWriter, error),
+) http.Handler {
+ var (
+ decodeRequest = DecodeAddRequest(mux, dec)
+ encodeResponse = EncodeAddResponse(enc)
+ encodeError = EncodeAddError(enc)
+ )
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ ctx := context.WithValue(r.Context(), goahttp.AcceptTypeKey, r.Header.Get("Accept"))
+ ctx = context.WithValue(ctx, goa.MethodKey, "add")
+ ctx = context.WithValue(ctx, goa.ServiceKey, "indexes")
+ payload, err := decodeRequest(r)
+ if err != nil {
+ if err := encodeError(ctx, w, err); err != nil {
+ eh(ctx, w, err)
+ }
+ return
+ }
+
+ res, err := endpoint(ctx, payload)
+
+ if err != nil {
+ if err := encodeError(ctx, w, err); err != nil {
+ eh(ctx, w, err)
+ }
+ return
+ }
+ if err := encodeResponse(ctx, w, res); err != nil {
+ eh(ctx, w, err)
+ }
+ })
+}
+
+// MountRemoveHandler configures the mux to serve the "indexes" service
+// "remove" endpoint.
+func MountRemoveHandler(mux goahttp.Muxer, h http.Handler) {
+ f, ok := h.(http.HandlerFunc)
+ if !ok {
+ f = func(w http.ResponseWriter, r *http.Request) {
+ h.ServeHTTP(w, r)
+ }
+ }
+ mux.Handle("POST", "/v2/pkgs/indexes/delete", f)
+}
+
+// NewRemoveHandler creates a HTTP handler which loads the HTTP request and
+// calls the "indexes" service "remove" endpoint.
+func NewRemoveHandler(
+ endpoint goa.Endpoint,
+ mux goahttp.Muxer,
+ dec func(*http.Request) goahttp.Decoder,
+ enc func(context.Context, http.ResponseWriter) goahttp.Encoder,
+ eh func(context.Context, http.ResponseWriter, error),
+) http.Handler {
+ var (
+ decodeRequest = DecodeRemoveRequest(mux, dec)
+ encodeResponse = EncodeRemoveResponse(enc)
+ encodeError = EncodeRemoveError(enc)
+ )
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ ctx := context.WithValue(r.Context(), goahttp.AcceptTypeKey, r.Header.Get("Accept"))
+ ctx = context.WithValue(ctx, goa.MethodKey, "remove")
+ ctx = context.WithValue(ctx, goa.ServiceKey, "indexes")
+ payload, err := decodeRequest(r)
+ if err != nil {
+ if err := encodeError(ctx, w, err); err != nil {
+ eh(ctx, w, err)
+ }
+ return
+ }
+
+ res, err := endpoint(ctx, payload)
+
+ if err != nil {
+ if err := encodeError(ctx, w, err); err != nil {
+ eh(ctx, w, err)
+ }
+ return
+ }
+ if err := encodeResponse(ctx, w, res); err != nil {
+ eh(ctx, w, err)
+ }
+ })
+}
diff --git a/gen/http/indexes/server/types.go b/gen/http/indexes/server/types.go
new file mode 100644
index 000000000..4f6792278
--- /dev/null
+++ b/gen/http/indexes/server/types.go
@@ -0,0 +1,188 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// indexes HTTP server types
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package server
+
+import (
+ indexes "github.com/arduino/arduino-create-agent/gen/indexes"
+ indexesviews "github.com/arduino/arduino-create-agent/gen/indexes/views"
+ goa "goa.design/goa"
+)
+
+// AddRequestBody is the type of the "indexes" service "add" endpoint HTTP
+// request body.
+type AddRequestBody struct {
+ // The url of the index file
+ URL *string `form:"url,omitempty" json:"url,omitempty" xml:"url,omitempty"`
+}
+
+// RemoveRequestBody is the type of the "indexes" service "remove" endpoint
+// HTTP request body.
+type RemoveRequestBody struct {
+ // The url of the index file
+ URL *string `form:"url,omitempty" json:"url,omitempty" xml:"url,omitempty"`
+}
+
+// AddResponseBody is the type of the "indexes" service "add" endpoint HTTP
+// response body.
+type AddResponseBody struct {
+ // The status of the operation
+ Status string `form:"status" json:"status" xml:"status"`
+}
+
+// RemoveResponseBody is the type of the "indexes" service "remove" endpoint
+// HTTP response body.
+type RemoveResponseBody struct {
+ // The status of the operation
+ Status string `form:"status" json:"status" xml:"status"`
+}
+
+// ListInvalidURLResponseBody is the type of the "indexes" service "list"
+// endpoint HTTP response body for the "invalid_url" error.
+type ListInvalidURLResponseBody struct {
+ // Name is the name of this class of errors.
+ Name string `form:"name" json:"name" xml:"name"`
+ // ID is a unique identifier for this particular occurrence of the problem.
+ ID string `form:"id" json:"id" xml:"id"`
+ // Message is a human-readable explanation specific to this occurrence of the
+ // problem.
+ Message string `form:"message" json:"message" xml:"message"`
+ // Is the error temporary?
+ Temporary bool `form:"temporary" json:"temporary" xml:"temporary"`
+ // Is the error a timeout?
+ Timeout bool `form:"timeout" json:"timeout" xml:"timeout"`
+ // Is the error a server-side fault?
+ Fault bool `form:"fault" json:"fault" xml:"fault"`
+}
+
+// AddInvalidURLResponseBody is the type of the "indexes" service "add"
+// endpoint HTTP response body for the "invalid_url" error.
+type AddInvalidURLResponseBody struct {
+ // Name is the name of this class of errors.
+ Name string `form:"name" json:"name" xml:"name"`
+ // ID is a unique identifier for this particular occurrence of the problem.
+ ID string `form:"id" json:"id" xml:"id"`
+ // Message is a human-readable explanation specific to this occurrence of the
+ // problem.
+ Message string `form:"message" json:"message" xml:"message"`
+ // Is the error temporary?
+ Temporary bool `form:"temporary" json:"temporary" xml:"temporary"`
+ // Is the error a timeout?
+ Timeout bool `form:"timeout" json:"timeout" xml:"timeout"`
+ // Is the error a server-side fault?
+ Fault bool `form:"fault" json:"fault" xml:"fault"`
+}
+
+// RemoveInvalidURLResponseBody is the type of the "indexes" service "remove"
+// endpoint HTTP response body for the "invalid_url" error.
+type RemoveInvalidURLResponseBody struct {
+ // Name is the name of this class of errors.
+ Name string `form:"name" json:"name" xml:"name"`
+ // ID is a unique identifier for this particular occurrence of the problem.
+ ID string `form:"id" json:"id" xml:"id"`
+ // Message is a human-readable explanation specific to this occurrence of the
+ // problem.
+ Message string `form:"message" json:"message" xml:"message"`
+ // Is the error temporary?
+ Temporary bool `form:"temporary" json:"temporary" xml:"temporary"`
+ // Is the error a timeout?
+ Timeout bool `form:"timeout" json:"timeout" xml:"timeout"`
+ // Is the error a server-side fault?
+ Fault bool `form:"fault" json:"fault" xml:"fault"`
+}
+
+// NewAddResponseBody builds the HTTP response body from the result of the
+// "add" endpoint of the "indexes" service.
+func NewAddResponseBody(res *indexesviews.OperationView) *AddResponseBody {
+ body := &AddResponseBody{
+ Status: *res.Status,
+ }
+ return body
+}
+
+// NewRemoveResponseBody builds the HTTP response body from the result of the
+// "remove" endpoint of the "indexes" service.
+func NewRemoveResponseBody(res *indexesviews.OperationView) *RemoveResponseBody {
+ body := &RemoveResponseBody{
+ Status: *res.Status,
+ }
+ return body
+}
+
+// NewListInvalidURLResponseBody builds the HTTP response body from the result
+// of the "list" endpoint of the "indexes" service.
+func NewListInvalidURLResponseBody(res *goa.ServiceError) *ListInvalidURLResponseBody {
+ body := &ListInvalidURLResponseBody{
+ Name: res.Name,
+ ID: res.ID,
+ Message: res.Message,
+ Temporary: res.Temporary,
+ Timeout: res.Timeout,
+ Fault: res.Fault,
+ }
+ return body
+}
+
+// NewAddInvalidURLResponseBody builds the HTTP response body from the result
+// of the "add" endpoint of the "indexes" service.
+func NewAddInvalidURLResponseBody(res *goa.ServiceError) *AddInvalidURLResponseBody {
+ body := &AddInvalidURLResponseBody{
+ Name: res.Name,
+ ID: res.ID,
+ Message: res.Message,
+ Temporary: res.Temporary,
+ Timeout: res.Timeout,
+ Fault: res.Fault,
+ }
+ return body
+}
+
+// NewRemoveInvalidURLResponseBody builds the HTTP response body from the
+// result of the "remove" endpoint of the "indexes" service.
+func NewRemoveInvalidURLResponseBody(res *goa.ServiceError) *RemoveInvalidURLResponseBody {
+ body := &RemoveInvalidURLResponseBody{
+ Name: res.Name,
+ ID: res.ID,
+ Message: res.Message,
+ Temporary: res.Temporary,
+ Timeout: res.Timeout,
+ Fault: res.Fault,
+ }
+ return body
+}
+
+// NewAddIndexPayload builds a indexes service add endpoint payload.
+func NewAddIndexPayload(body *AddRequestBody) *indexes.IndexPayload {
+ v := &indexes.IndexPayload{
+ URL: *body.URL,
+ }
+ return v
+}
+
+// NewRemoveIndexPayload builds a indexes service remove endpoint payload.
+func NewRemoveIndexPayload(body *RemoveRequestBody) *indexes.IndexPayload {
+ v := &indexes.IndexPayload{
+ URL: *body.URL,
+ }
+ return v
+}
+
+// ValidateAddRequestBody runs the validations defined on AddRequestBody
+func ValidateAddRequestBody(body *AddRequestBody) (err error) {
+ if body.URL == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("url", "body"))
+ }
+ return
+}
+
+// ValidateRemoveRequestBody runs the validations defined on RemoveRequestBody
+func ValidateRemoveRequestBody(body *RemoveRequestBody) (err error) {
+ if body.URL == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("url", "body"))
+ }
+ return
+}
diff --git a/gen/http/openapi.json b/gen/http/openapi.json
new file mode 100644
index 000000000..231102e1a
--- /dev/null
+++ b/gen/http/openapi.json
@@ -0,0 +1 @@
+{"swagger":"2.0","info":{"title":"Arduino Create Agent","description":"A companion of Arduino Create. \n\tAllows the website to perform operations on the user computer, \n\tsuch as detecting which boards are connected and upload sketches on them.","version":""},"host":"localhost:80","consumes":["application/json","application/xml","application/gob"],"produces":["application/json","application/xml","application/gob"],"paths":{"/v2/docs/pkgs":{"get":{"summary":"Download docs/pkgs.html","operationId":"docs#/v2/docs/pkgs","responses":{"200":{"description":"File downloaded","schema":{"type":"file"}}},"schemes":["http"]}},"/v2/pkgs/indexes":{"get":{"tags":["indexes"],"summary":"list indexes","operationId":"indexes#list","responses":{"200":{"description":"OK response.","schema":{"type":"array","items":{"type":"string","example":"Harum nostrum qui ipsa minima quia dolorem."}}},"400":{"description":"Bad Request response.","schema":{"$ref":"#/definitions/Indexeslist_invalid_url_response_body"}}},"schemes":["http"]}},"/v2/pkgs/indexes/add":{"post":{"tags":["indexes"],"summary":"add indexes","operationId":"indexes#add","parameters":[{"name":"AddRequestBody","in":"body","required":true,"schema":{"$ref":"#/definitions/IndexesAddRequestBody","required":["url"]}}],"responses":{"200":{"description":"OK response.","schema":{"$ref":"#/definitions/IndexesAddResponseBody"}},"400":{"description":"Bad Request response.","schema":{"$ref":"#/definitions/Indexesadd_invalid_url_response_body"}}},"schemes":["http"]}},"/v2/pkgs/indexes/delete":{"post":{"tags":["indexes"],"summary":"remove indexes","operationId":"indexes#remove","parameters":[{"name":"RemoveRequestBody","in":"body","required":true,"schema":{"$ref":"#/definitions/IndexesRemoveRequestBody","required":["url"]}}],"responses":{"200":{"description":"OK response.","schema":{"$ref":"#/definitions/IndexesRemoveResponseBody"}},"400":{"description":"Bad Request response.","schema":{"$ref":"#/definitions/Indexesremove_invalid_url_response_body"}}},"schemes":["http"]}},"/v2/pkgs/tools/available":{"get":{"tags":["tools"],"summary":"available tools","operationId":"tools#available","responses":{"200":{"description":"OK response.","schema":{"$ref":"#/definitions/ToolsToolResponseCollection"}}},"schemes":["http"]}},"/v2/pkgs/tools/installed":{"get":{"tags":["tools"],"summary":"installed tools","operationId":"tools#installed","responses":{"200":{"description":"OK response.","schema":{"$ref":"#/definitions/ToolsToolResponseCollection"}}},"schemes":["http"]},"put":{"tags":["tools"],"summary":"install tools","operationId":"tools#install","parameters":[{"name":"InstallRequestBody","in":"body","required":true,"schema":{"$ref":"#/definitions/ToolsInstallRequestBody","required":["name","version","packager"]}}],"responses":{"200":{"description":"OK response.","schema":{"$ref":"#/definitions/ToolsInstallResponseBody"}}},"schemes":["http"]}},"/v2/pkgs/tools/installed/{packager}/{name}/{version}":{"delete":{"tags":["tools"],"summary":"remove tools","operationId":"tools#remove","parameters":[{"name":"packager","in":"path","description":"The packager of the tool","required":true,"type":"string"},{"name":"name","in":"path","description":"The name of the tool","required":true,"type":"string"},{"name":"version","in":"path","description":"The version of the tool","required":true,"type":"string"},{"name":"RemoveRequestBody","in":"body","required":true,"schema":{"$ref":"#/definitions/ToolsRemoveRequestBody"}}],"responses":{"200":{"description":"OK response.","schema":{"$ref":"#/definitions/ToolsRemoveResponseBody"}}},"schemes":["http"]}}},"definitions":{"IndexesAddRequestBody":{"title":"IndexesAddRequestBody","type":"object","properties":{"url":{"type":"string","description":"The url of the index file","example":"http://downloads.arduino.cc/packages/package_index.json"}},"example":{"url":"http://downloads.arduino.cc/packages/package_index.json"},"required":["url"]},"IndexesAddResponseBody":{"title":"Mediatype identifier: application/vnd.arduino.operation; view=default","type":"object","properties":{"status":{"type":"string","description":"The status of the operation","example":"ok"}},"description":"AddResponseBody result type (default view)","example":{"status":"ok"},"required":["status"]},"IndexesRemoveRequestBody":{"title":"IndexesRemoveRequestBody","type":"object","properties":{"url":{"type":"string","description":"The url of the index file","example":"http://downloads.arduino.cc/packages/package_index.json"}},"example":{"url":"http://downloads.arduino.cc/packages/package_index.json"},"required":["url"]},"IndexesRemoveResponseBody":{"title":"Mediatype identifier: application/vnd.arduino.operation; view=default","type":"object","properties":{"status":{"type":"string","description":"The status of the operation","example":"ok"}},"description":"RemoveResponseBody result type (default view)","example":{"status":"ok"},"required":["status"]},"Indexesadd_invalid_url_response_body":{"title":"Mediatype identifier: application/vnd.goa.error; view=default","type":"object","properties":{"fault":{"type":"boolean","description":"Is the error a server-side fault?","example":false},"id":{"type":"string","description":"ID is a unique identifier for this particular occurrence of the problem.","example":"123abc"},"message":{"type":"string","description":"Message is a human-readable explanation specific to this occurrence of the problem.","example":"parameter 'p' must be an integer"},"name":{"type":"string","description":"Name is the name of this class of errors.","example":"bad_request"},"temporary":{"type":"boolean","description":"Is the error temporary?","example":false},"timeout":{"type":"boolean","description":"Is the error a timeout?","example":false}},"description":"url invalid (default view)","example":{"fault":true,"id":"123abc","message":"parameter 'p' must be an integer","name":"bad_request","temporary":true,"timeout":true},"required":["name","id","message","temporary","timeout","fault"]},"Indexeslist_invalid_url_response_body":{"title":"Mediatype identifier: application/vnd.goa.error; view=default","type":"object","properties":{"fault":{"type":"boolean","description":"Is the error a server-side fault?","example":true},"id":{"type":"string","description":"ID is a unique identifier for this particular occurrence of the problem.","example":"123abc"},"message":{"type":"string","description":"Message is a human-readable explanation specific to this occurrence of the problem.","example":"parameter 'p' must be an integer"},"name":{"type":"string","description":"Name is the name of this class of errors.","example":"bad_request"},"temporary":{"type":"boolean","description":"Is the error temporary?","example":false},"timeout":{"type":"boolean","description":"Is the error a timeout?","example":false}},"description":"url invalid (default view)","example":{"fault":false,"id":"123abc","message":"parameter 'p' must be an integer","name":"bad_request","temporary":false,"timeout":false},"required":["name","id","message","temporary","timeout","fault"]},"Indexesremove_invalid_url_response_body":{"title":"Mediatype identifier: application/vnd.goa.error; view=default","type":"object","properties":{"fault":{"type":"boolean","description":"Is the error a server-side fault?","example":false},"id":{"type":"string","description":"ID is a unique identifier for this particular occurrence of the problem.","example":"123abc"},"message":{"type":"string","description":"Message is a human-readable explanation specific to this occurrence of the problem.","example":"parameter 'p' must be an integer"},"name":{"type":"string","description":"Name is the name of this class of errors.","example":"bad_request"},"temporary":{"type":"boolean","description":"Is the error temporary?","example":true},"timeout":{"type":"boolean","description":"Is the error a timeout?","example":true}},"description":"url invalid (default view)","example":{"fault":false,"id":"123abc","message":"parameter 'p' must be an integer","name":"bad_request","temporary":true,"timeout":false},"required":["name","id","message","temporary","timeout","fault"]},"ToolResponse":{"title":"Mediatype identifier: application/vnd.arduino.tool; view=default","type":"object","properties":{"name":{"type":"string","description":"The name of the tool","example":"avrdude"},"packager":{"type":"string","description":"The packager of the tool","example":"arduino"},"version":{"type":"string","description":"The version of the tool","example":"6.3.0-arduino9"}},"description":"A tool is an executable program that can upload sketches. (default view)","example":{"name":"avrdude","packager":"arduino","version":"6.3.0-arduino9"},"required":["name","version","packager"]},"ToolsInstallRequestBody":{"title":"ToolsInstallRequestBody","type":"object","properties":{"checksum":{"type":"string","description":"A checksum of the archive. Mandatory when url is present. \n\tThis ensures that the package is downloaded correcly.","example":"Occaecati eum."},"name":{"type":"string","description":"The name of the tool","example":"avrdude"},"packager":{"type":"string","description":"The packager of the tool","example":"arduino"},"url":{"type":"string","description":"The url where the package can be found. Optional. \n\tIf present checksum must also be present.","example":"Sit quod dolor repellat."},"version":{"type":"string","description":"The version of the tool","example":"6.3.0-arduino9"}},"example":{"checksum":"Id ut totam.","name":"avrdude","packager":"arduino","url":"Vero ipsum corporis nihil.","version":"6.3.0-arduino9"},"required":["name","version","packager"]},"ToolsInstallResponseBody":{"title":"Mediatype identifier: application/vnd.arduino.operation; view=default","type":"object","properties":{"status":{"type":"string","description":"The status of the operation","example":"ok"}},"description":"InstallResponseBody result type (default view)","example":{"status":"ok"},"required":["status"]},"ToolsRemoveRequestBody":{"title":"ToolsRemoveRequestBody","type":"object","properties":{"checksum":{"type":"string","description":"A checksum of the archive. Mandatory when url is present. \n\tThis ensures that the package is downloaded correcly.","example":"Nobis officia optio inventore."},"url":{"type":"string","description":"The url where the package can be found. Optional. \n\tIf present checksum must also be present.","example":"Inventore exercitationem."}},"example":{"checksum":"Qui modi dolorem.","url":"In voluptatibus."}},"ToolsRemoveResponseBody":{"title":"Mediatype identifier: application/vnd.arduino.operation; view=default","type":"object","properties":{"status":{"type":"string","description":"The status of the operation","example":"ok"}},"description":"RemoveResponseBody result type (default view)","example":{"status":"ok"},"required":["status"]},"ToolsToolResponseCollection":{"title":"Mediatype identifier: application/vnd.arduino.tool; type=collection; view=default","type":"array","items":{"$ref":"#/definitions/ToolResponse"},"description":"AvailableResponseBody is the result type for an array of ToolResponse (default view)","example":[{"name":"avrdude","packager":"arduino","version":"6.3.0-arduino9"},{"name":"avrdude","packager":"arduino","version":"6.3.0-arduino9"},{"name":"avrdude","packager":"arduino","version":"6.3.0-arduino9"},{"name":"avrdude","packager":"arduino","version":"6.3.0-arduino9"}]}}}
\ No newline at end of file
diff --git a/gen/http/openapi.yaml b/gen/http/openapi.yaml
new file mode 100644
index 000000000..00cf0a760
--- /dev/null
+++ b/gen/http/openapi.yaml
@@ -0,0 +1,491 @@
+swagger: "2.0"
+info:
+ title: Arduino Create Agent
+ description: "A companion of Arduino Create. \n\tAllows the website to perform operations
+ on the user computer, \n\tsuch as detecting which boards are connected and upload
+ sketches on them."
+ version: ""
+host: localhost:80
+consumes:
+- application/json
+- application/xml
+- application/gob
+produces:
+- application/json
+- application/xml
+- application/gob
+paths:
+ /v2/docs/pkgs:
+ get:
+ summary: Download docs/pkgs.html
+ operationId: docs#/v2/docs/pkgs
+ responses:
+ "200":
+ description: File downloaded
+ schema:
+ type: file
+ schemes:
+ - http
+ /v2/pkgs/indexes:
+ get:
+ tags:
+ - indexes
+ summary: list indexes
+ operationId: indexes#list
+ responses:
+ "200":
+ description: OK response.
+ schema:
+ type: array
+ items:
+ type: string
+ example: Harum nostrum qui ipsa minima quia dolorem.
+ "400":
+ description: Bad Request response.
+ schema:
+ $ref: '#/definitions/Indexeslist_invalid_url_response_body'
+ schemes:
+ - http
+ /v2/pkgs/indexes/add:
+ post:
+ tags:
+ - indexes
+ summary: add indexes
+ operationId: indexes#add
+ parameters:
+ - name: AddRequestBody
+ in: body
+ required: true
+ schema:
+ $ref: '#/definitions/IndexesAddRequestBody'
+ required:
+ - url
+ responses:
+ "200":
+ description: OK response.
+ schema:
+ $ref: '#/definitions/IndexesAddResponseBody'
+ "400":
+ description: Bad Request response.
+ schema:
+ $ref: '#/definitions/Indexesadd_invalid_url_response_body'
+ schemes:
+ - http
+ /v2/pkgs/indexes/delete:
+ post:
+ tags:
+ - indexes
+ summary: remove indexes
+ operationId: indexes#remove
+ parameters:
+ - name: RemoveRequestBody
+ in: body
+ required: true
+ schema:
+ $ref: '#/definitions/IndexesRemoveRequestBody'
+ required:
+ - url
+ responses:
+ "200":
+ description: OK response.
+ schema:
+ $ref: '#/definitions/IndexesRemoveResponseBody'
+ "400":
+ description: Bad Request response.
+ schema:
+ $ref: '#/definitions/Indexesremove_invalid_url_response_body'
+ schemes:
+ - http
+ /v2/pkgs/tools/available:
+ get:
+ tags:
+ - tools
+ summary: available tools
+ operationId: tools#available
+ responses:
+ "200":
+ description: OK response.
+ schema:
+ $ref: '#/definitions/ToolsToolResponseCollection'
+ schemes:
+ - http
+ /v2/pkgs/tools/installed:
+ get:
+ tags:
+ - tools
+ summary: installed tools
+ operationId: tools#installed
+ responses:
+ "200":
+ description: OK response.
+ schema:
+ $ref: '#/definitions/ToolsToolResponseCollection'
+ schemes:
+ - http
+ put:
+ tags:
+ - tools
+ summary: install tools
+ operationId: tools#install
+ parameters:
+ - name: InstallRequestBody
+ in: body
+ required: true
+ schema:
+ $ref: '#/definitions/ToolsInstallRequestBody'
+ required:
+ - name
+ - version
+ - packager
+ responses:
+ "200":
+ description: OK response.
+ schema:
+ $ref: '#/definitions/ToolsInstallResponseBody'
+ schemes:
+ - http
+ /v2/pkgs/tools/installed/{packager}/{name}/{version}:
+ delete:
+ tags:
+ - tools
+ summary: remove tools
+ operationId: tools#remove
+ parameters:
+ - name: packager
+ in: path
+ description: The packager of the tool
+ required: true
+ type: string
+ - name: name
+ in: path
+ description: The name of the tool
+ required: true
+ type: string
+ - name: version
+ in: path
+ description: The version of the tool
+ required: true
+ type: string
+ - name: RemoveRequestBody
+ in: body
+ required: true
+ schema:
+ $ref: '#/definitions/ToolsRemoveRequestBody'
+ responses:
+ "200":
+ description: OK response.
+ schema:
+ $ref: '#/definitions/ToolsRemoveResponseBody'
+ schemes:
+ - http
+definitions:
+ IndexesAddRequestBody:
+ title: IndexesAddRequestBody
+ type: object
+ properties:
+ url:
+ type: string
+ description: The url of the index file
+ example: http://downloads.arduino.cc/packages/package_index.json
+ example:
+ url: http://downloads.arduino.cc/packages/package_index.json
+ required:
+ - url
+ IndexesAddResponseBody:
+ title: 'Mediatype identifier: application/vnd.arduino.operation; view=default'
+ type: object
+ properties:
+ status:
+ type: string
+ description: The status of the operation
+ example: ok
+ description: AddResponseBody result type (default view)
+ example:
+ status: ok
+ required:
+ - status
+ IndexesRemoveRequestBody:
+ title: IndexesRemoveRequestBody
+ type: object
+ properties:
+ url:
+ type: string
+ description: The url of the index file
+ example: http://downloads.arduino.cc/packages/package_index.json
+ example:
+ url: http://downloads.arduino.cc/packages/package_index.json
+ required:
+ - url
+ IndexesRemoveResponseBody:
+ title: 'Mediatype identifier: application/vnd.arduino.operation; view=default'
+ type: object
+ properties:
+ status:
+ type: string
+ description: The status of the operation
+ example: ok
+ description: RemoveResponseBody result type (default view)
+ example:
+ status: ok
+ required:
+ - status
+ Indexesadd_invalid_url_response_body:
+ title: 'Mediatype identifier: application/vnd.goa.error; view=default'
+ type: object
+ properties:
+ fault:
+ type: boolean
+ description: Is the error a server-side fault?
+ example: false
+ id:
+ type: string
+ description: ID is a unique identifier for this particular occurrence of the
+ problem.
+ example: 123abc
+ message:
+ type: string
+ description: Message is a human-readable explanation specific to this occurrence
+ of the problem.
+ example: parameter 'p' must be an integer
+ name:
+ type: string
+ description: Name is the name of this class of errors.
+ example: bad_request
+ temporary:
+ type: boolean
+ description: Is the error temporary?
+ example: false
+ timeout:
+ type: boolean
+ description: Is the error a timeout?
+ example: false
+ description: url invalid (default view)
+ example:
+ fault: true
+ id: 123abc
+ message: parameter 'p' must be an integer
+ name: bad_request
+ temporary: true
+ timeout: true
+ required:
+ - name
+ - id
+ - message
+ - temporary
+ - timeout
+ - fault
+ Indexeslist_invalid_url_response_body:
+ title: 'Mediatype identifier: application/vnd.goa.error; view=default'
+ type: object
+ properties:
+ fault:
+ type: boolean
+ description: Is the error a server-side fault?
+ example: true
+ id:
+ type: string
+ description: ID is a unique identifier for this particular occurrence of the
+ problem.
+ example: 123abc
+ message:
+ type: string
+ description: Message is a human-readable explanation specific to this occurrence
+ of the problem.
+ example: parameter 'p' must be an integer
+ name:
+ type: string
+ description: Name is the name of this class of errors.
+ example: bad_request
+ temporary:
+ type: boolean
+ description: Is the error temporary?
+ example: false
+ timeout:
+ type: boolean
+ description: Is the error a timeout?
+ example: false
+ description: url invalid (default view)
+ example:
+ fault: false
+ id: 123abc
+ message: parameter 'p' must be an integer
+ name: bad_request
+ temporary: false
+ timeout: false
+ required:
+ - name
+ - id
+ - message
+ - temporary
+ - timeout
+ - fault
+ Indexesremove_invalid_url_response_body:
+ title: 'Mediatype identifier: application/vnd.goa.error; view=default'
+ type: object
+ properties:
+ fault:
+ type: boolean
+ description: Is the error a server-side fault?
+ example: false
+ id:
+ type: string
+ description: ID is a unique identifier for this particular occurrence of the
+ problem.
+ example: 123abc
+ message:
+ type: string
+ description: Message is a human-readable explanation specific to this occurrence
+ of the problem.
+ example: parameter 'p' must be an integer
+ name:
+ type: string
+ description: Name is the name of this class of errors.
+ example: bad_request
+ temporary:
+ type: boolean
+ description: Is the error temporary?
+ example: true
+ timeout:
+ type: boolean
+ description: Is the error a timeout?
+ example: true
+ description: url invalid (default view)
+ example:
+ fault: false
+ id: 123abc
+ message: parameter 'p' must be an integer
+ name: bad_request
+ temporary: true
+ timeout: false
+ required:
+ - name
+ - id
+ - message
+ - temporary
+ - timeout
+ - fault
+ ToolResponse:
+ title: 'Mediatype identifier: application/vnd.arduino.tool; view=default'
+ type: object
+ properties:
+ name:
+ type: string
+ description: The name of the tool
+ example: avrdude
+ packager:
+ type: string
+ description: The packager of the tool
+ example: arduino
+ version:
+ type: string
+ description: The version of the tool
+ example: 6.3.0-arduino9
+ description: A tool is an executable program that can upload sketches. (default
+ view)
+ example:
+ name: avrdude
+ packager: arduino
+ version: 6.3.0-arduino9
+ required:
+ - name
+ - version
+ - packager
+ ToolsInstallRequestBody:
+ title: ToolsInstallRequestBody
+ type: object
+ properties:
+ checksum:
+ type: string
+ description: "A checksum of the archive. Mandatory when url is present. \n\tThis
+ ensures that the package is downloaded correcly."
+ example: Occaecati eum.
+ name:
+ type: string
+ description: The name of the tool
+ example: avrdude
+ packager:
+ type: string
+ description: The packager of the tool
+ example: arduino
+ url:
+ type: string
+ description: "The url where the package can be found. Optional. \n\tIf present
+ checksum must also be present."
+ example: Sit quod dolor repellat.
+ version:
+ type: string
+ description: The version of the tool
+ example: 6.3.0-arduino9
+ example:
+ checksum: Id ut totam.
+ name: avrdude
+ packager: arduino
+ url: Vero ipsum corporis nihil.
+ version: 6.3.0-arduino9
+ required:
+ - name
+ - version
+ - packager
+ ToolsInstallResponseBody:
+ title: 'Mediatype identifier: application/vnd.arduino.operation; view=default'
+ type: object
+ properties:
+ status:
+ type: string
+ description: The status of the operation
+ example: ok
+ description: InstallResponseBody result type (default view)
+ example:
+ status: ok
+ required:
+ - status
+ ToolsRemoveRequestBody:
+ title: ToolsRemoveRequestBody
+ type: object
+ properties:
+ checksum:
+ type: string
+ description: "A checksum of the archive. Mandatory when url is present. \n\tThis
+ ensures that the package is downloaded correcly."
+ example: Nobis officia optio inventore.
+ url:
+ type: string
+ description: "The url where the package can be found. Optional. \n\tIf present
+ checksum must also be present."
+ example: Inventore exercitationem.
+ example:
+ checksum: Qui modi dolorem.
+ url: In voluptatibus.
+ ToolsRemoveResponseBody:
+ title: 'Mediatype identifier: application/vnd.arduino.operation; view=default'
+ type: object
+ properties:
+ status:
+ type: string
+ description: The status of the operation
+ example: ok
+ description: RemoveResponseBody result type (default view)
+ example:
+ status: ok
+ required:
+ - status
+ ToolsToolResponseCollection:
+ title: 'Mediatype identifier: application/vnd.arduino.tool; type=collection; view=default'
+ type: array
+ items:
+ $ref: '#/definitions/ToolResponse'
+ description: AvailableResponseBody is the result type for an array of ToolResponse
+ (default view)
+ example:
+ - name: avrdude
+ packager: arduino
+ version: 6.3.0-arduino9
+ - name: avrdude
+ packager: arduino
+ version: 6.3.0-arduino9
+ - name: avrdude
+ packager: arduino
+ version: 6.3.0-arduino9
+ - name: avrdude
+ packager: arduino
+ version: 6.3.0-arduino9
diff --git a/gen/http/tools/client/cli.go b/gen/http/tools/client/cli.go
new file mode 100644
index 000000000..d708a242e
--- /dev/null
+++ b/gen/http/tools/client/cli.go
@@ -0,0 +1,75 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// tools HTTP client CLI support package
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package client
+
+import (
+ "encoding/json"
+ "fmt"
+
+ tools "github.com/arduino/arduino-create-agent/gen/tools"
+)
+
+// BuildInstallPayload builds the payload for the tools install endpoint from
+// CLI flags.
+func BuildInstallPayload(toolsInstallBody string) (*tools.ToolPayload, error) {
+ var err error
+ var body InstallRequestBody
+ {
+ err = json.Unmarshal([]byte(toolsInstallBody), &body)
+ if err != nil {
+ return nil, fmt.Errorf("invalid JSON for body, example of valid JSON:\n%s", "'{\n \"checksum\": \"Quam voluptas voluptates expedita rem ipsum.\",\n \"name\": \"avrdude\",\n \"packager\": \"arduino\",\n \"url\": \"Iusto libero explicabo beatae dolor adipisci nulla.\",\n \"version\": \"6.3.0-arduino9\"\n }'")
+ }
+ }
+ if err != nil {
+ return nil, err
+ }
+ v := &tools.ToolPayload{
+ Name: body.Name,
+ Version: body.Version,
+ Packager: body.Packager,
+ URL: body.URL,
+ Checksum: body.Checksum,
+ }
+ return v, nil
+}
+
+// BuildRemovePayload builds the payload for the tools remove endpoint from CLI
+// flags.
+func BuildRemovePayload(toolsRemoveBody string, toolsRemovePackager string, toolsRemoveName string, toolsRemoveVersion string) (*tools.ToolPayload, error) {
+ var err error
+ var body RemoveRequestBody
+ {
+ err = json.Unmarshal([]byte(toolsRemoveBody), &body)
+ if err != nil {
+ return nil, fmt.Errorf("invalid JSON for body, example of valid JSON:\n%s", "'{\n \"url\": \"http://downloads.arduino.cc/packages/package_index.json\"\n }'")
+ }
+ }
+ var packager string
+ {
+ packager = toolsRemovePackager
+ }
+ var name string
+ {
+ name = toolsRemoveName
+ }
+ var version string
+ {
+ version = toolsRemoveVersion
+ }
+ if err != nil {
+ return nil, err
+ }
+ v := &tools.ToolPayload{
+ URL: body.URL,
+ Checksum: body.Checksum,
+ }
+ v.Packager = packager
+ v.Name = name
+ v.Version = version
+ return v, nil
+}
diff --git a/gen/http/tools/client/client.go b/gen/http/tools/client/client.go
new file mode 100644
index 000000000..b2bb68414
--- /dev/null
+++ b/gen/http/tools/client/client.go
@@ -0,0 +1,155 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// tools client HTTP transport
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package client
+
+import (
+ "context"
+ "net/http"
+
+ goa "goa.design/goa"
+ goahttp "goa.design/goa/http"
+)
+
+// Client lists the tools service endpoint HTTP clients.
+type Client struct {
+ // Available Doer is the HTTP client used to make requests to the available
+ // endpoint.
+ AvailableDoer goahttp.Doer
+
+ // Installed Doer is the HTTP client used to make requests to the installed
+ // endpoint.
+ InstalledDoer goahttp.Doer
+
+ // Install Doer is the HTTP client used to make requests to the install
+ // endpoint.
+ InstallDoer goahttp.Doer
+
+ // Remove Doer is the HTTP client used to make requests to the remove endpoint.
+ RemoveDoer goahttp.Doer
+
+ // RestoreResponseBody controls whether the response bodies are reset after
+ // decoding so they can be read again.
+ RestoreResponseBody bool
+
+ scheme string
+ host string
+ encoder func(*http.Request) goahttp.Encoder
+ decoder func(*http.Response) goahttp.Decoder
+}
+
+// NewClient instantiates HTTP clients for all the tools service servers.
+func NewClient(
+ scheme string,
+ host string,
+ doer goahttp.Doer,
+ enc func(*http.Request) goahttp.Encoder,
+ dec func(*http.Response) goahttp.Decoder,
+ restoreBody bool,
+) *Client {
+ return &Client{
+ AvailableDoer: doer,
+ InstalledDoer: doer,
+ InstallDoer: doer,
+ RemoveDoer: doer,
+ RestoreResponseBody: restoreBody,
+ scheme: scheme,
+ host: host,
+ decoder: dec,
+ encoder: enc,
+ }
+}
+
+// Available returns an endpoint that makes HTTP requests to the tools service
+// available server.
+func (c *Client) Available() goa.Endpoint {
+ var (
+ decodeResponse = DecodeAvailableResponse(c.decoder, c.RestoreResponseBody)
+ )
+ return func(ctx context.Context, v interface{}) (interface{}, error) {
+ req, err := c.BuildAvailableRequest(ctx, v)
+ if err != nil {
+ return nil, err
+ }
+ resp, err := c.AvailableDoer.Do(req)
+
+ if err != nil {
+ return nil, goahttp.ErrRequestError("tools", "available", err)
+ }
+ return decodeResponse(resp)
+ }
+}
+
+// Installed returns an endpoint that makes HTTP requests to the tools service
+// installed server.
+func (c *Client) Installed() goa.Endpoint {
+ var (
+ decodeResponse = DecodeInstalledResponse(c.decoder, c.RestoreResponseBody)
+ )
+ return func(ctx context.Context, v interface{}) (interface{}, error) {
+ req, err := c.BuildInstalledRequest(ctx, v)
+ if err != nil {
+ return nil, err
+ }
+ resp, err := c.InstalledDoer.Do(req)
+
+ if err != nil {
+ return nil, goahttp.ErrRequestError("tools", "installed", err)
+ }
+ return decodeResponse(resp)
+ }
+}
+
+// Install returns an endpoint that makes HTTP requests to the tools service
+// install server.
+func (c *Client) Install() goa.Endpoint {
+ var (
+ encodeRequest = EncodeInstallRequest(c.encoder)
+ decodeResponse = DecodeInstallResponse(c.decoder, c.RestoreResponseBody)
+ )
+ return func(ctx context.Context, v interface{}) (interface{}, error) {
+ req, err := c.BuildInstallRequest(ctx, v)
+ if err != nil {
+ return nil, err
+ }
+ err = encodeRequest(req, v)
+ if err != nil {
+ return nil, err
+ }
+ resp, err := c.InstallDoer.Do(req)
+
+ if err != nil {
+ return nil, goahttp.ErrRequestError("tools", "install", err)
+ }
+ return decodeResponse(resp)
+ }
+}
+
+// Remove returns an endpoint that makes HTTP requests to the tools service
+// remove server.
+func (c *Client) Remove() goa.Endpoint {
+ var (
+ encodeRequest = EncodeRemoveRequest(c.encoder)
+ decodeResponse = DecodeRemoveResponse(c.decoder, c.RestoreResponseBody)
+ )
+ return func(ctx context.Context, v interface{}) (interface{}, error) {
+ req, err := c.BuildRemoveRequest(ctx, v)
+ if err != nil {
+ return nil, err
+ }
+ err = encodeRequest(req, v)
+ if err != nil {
+ return nil, err
+ }
+ resp, err := c.RemoveDoer.Do(req)
+
+ if err != nil {
+ return nil, goahttp.ErrRequestError("tools", "remove", err)
+ }
+ return decodeResponse(resp)
+ }
+}
diff --git a/gen/http/tools/client/encode_decode.go b/gen/http/tools/client/encode_decode.go
new file mode 100644
index 000000000..beda9d7a8
--- /dev/null
+++ b/gen/http/tools/client/encode_decode.go
@@ -0,0 +1,294 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// tools HTTP client encoders and decoders
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package client
+
+import (
+ "bytes"
+ "context"
+ "io/ioutil"
+ "net/http"
+ "net/url"
+
+ tools "github.com/arduino/arduino-create-agent/gen/tools"
+ toolsviews "github.com/arduino/arduino-create-agent/gen/tools/views"
+ goahttp "goa.design/goa/http"
+)
+
+// BuildAvailableRequest instantiates a HTTP request object with method and
+// path set to call the "tools" service "available" endpoint
+func (c *Client) BuildAvailableRequest(ctx context.Context, v interface{}) (*http.Request, error) {
+ u := &url.URL{Scheme: c.scheme, Host: c.host, Path: AvailableToolsPath()}
+ req, err := http.NewRequest("GET", u.String(), nil)
+ if err != nil {
+ return nil, goahttp.ErrInvalidURL("tools", "available", u.String(), err)
+ }
+ if ctx != nil {
+ req = req.WithContext(ctx)
+ }
+
+ return req, nil
+}
+
+// DecodeAvailableResponse returns a decoder for responses returned by the
+// tools available endpoint. restoreBody controls whether the response body
+// should be restored after having been read.
+func DecodeAvailableResponse(decoder func(*http.Response) goahttp.Decoder, restoreBody bool) func(*http.Response) (interface{}, error) {
+ return func(resp *http.Response) (interface{}, error) {
+ if restoreBody {
+ b, err := ioutil.ReadAll(resp.Body)
+ if err != nil {
+ return nil, err
+ }
+ resp.Body = ioutil.NopCloser(bytes.NewBuffer(b))
+ defer func() {
+ resp.Body = ioutil.NopCloser(bytes.NewBuffer(b))
+ }()
+ } else {
+ defer resp.Body.Close()
+ }
+ switch resp.StatusCode {
+ case http.StatusOK:
+ var (
+ body AvailableResponseBody
+ err error
+ )
+ err = decoder(resp).Decode(&body)
+ if err != nil {
+ return nil, goahttp.ErrDecodingError("tools", "available", err)
+ }
+ p := NewAvailableToolCollectionOK(body)
+ view := "default"
+ vres := toolsviews.ToolCollection{p, view}
+ if err = toolsviews.ValidateToolCollection(vres); err != nil {
+ return nil, goahttp.ErrValidationError("tools", "available", err)
+ }
+ res := tools.NewToolCollection(vres)
+ return res, nil
+ default:
+ body, _ := ioutil.ReadAll(resp.Body)
+ return nil, goahttp.ErrInvalidResponse("tools", "available", resp.StatusCode, string(body))
+ }
+ }
+}
+
+// BuildInstalledRequest instantiates a HTTP request object with method and
+// path set to call the "tools" service "installed" endpoint
+func (c *Client) BuildInstalledRequest(ctx context.Context, v interface{}) (*http.Request, error) {
+ u := &url.URL{Scheme: c.scheme, Host: c.host, Path: InstalledToolsPath()}
+ req, err := http.NewRequest("GET", u.String(), nil)
+ if err != nil {
+ return nil, goahttp.ErrInvalidURL("tools", "installed", u.String(), err)
+ }
+ if ctx != nil {
+ req = req.WithContext(ctx)
+ }
+
+ return req, nil
+}
+
+// DecodeInstalledResponse returns a decoder for responses returned by the
+// tools installed endpoint. restoreBody controls whether the response body
+// should be restored after having been read.
+func DecodeInstalledResponse(decoder func(*http.Response) goahttp.Decoder, restoreBody bool) func(*http.Response) (interface{}, error) {
+ return func(resp *http.Response) (interface{}, error) {
+ if restoreBody {
+ b, err := ioutil.ReadAll(resp.Body)
+ if err != nil {
+ return nil, err
+ }
+ resp.Body = ioutil.NopCloser(bytes.NewBuffer(b))
+ defer func() {
+ resp.Body = ioutil.NopCloser(bytes.NewBuffer(b))
+ }()
+ } else {
+ defer resp.Body.Close()
+ }
+ switch resp.StatusCode {
+ case http.StatusOK:
+ var (
+ body InstalledResponseBody
+ err error
+ )
+ err = decoder(resp).Decode(&body)
+ if err != nil {
+ return nil, goahttp.ErrDecodingError("tools", "installed", err)
+ }
+ p := NewInstalledToolCollectionOK(body)
+ view := "default"
+ vres := toolsviews.ToolCollection{p, view}
+ if err = toolsviews.ValidateToolCollection(vres); err != nil {
+ return nil, goahttp.ErrValidationError("tools", "installed", err)
+ }
+ res := tools.NewToolCollection(vres)
+ return res, nil
+ default:
+ body, _ := ioutil.ReadAll(resp.Body)
+ return nil, goahttp.ErrInvalidResponse("tools", "installed", resp.StatusCode, string(body))
+ }
+ }
+}
+
+// BuildInstallRequest instantiates a HTTP request object with method and path
+// set to call the "tools" service "install" endpoint
+func (c *Client) BuildInstallRequest(ctx context.Context, v interface{}) (*http.Request, error) {
+ u := &url.URL{Scheme: c.scheme, Host: c.host, Path: InstallToolsPath()}
+ req, err := http.NewRequest("PUT", u.String(), nil)
+ if err != nil {
+ return nil, goahttp.ErrInvalidURL("tools", "install", u.String(), err)
+ }
+ if ctx != nil {
+ req = req.WithContext(ctx)
+ }
+
+ return req, nil
+}
+
+// EncodeInstallRequest returns an encoder for requests sent to the tools
+// install server.
+func EncodeInstallRequest(encoder func(*http.Request) goahttp.Encoder) func(*http.Request, interface{}) error {
+ return func(req *http.Request, v interface{}) error {
+ p, ok := v.(*tools.ToolPayload)
+ if !ok {
+ return goahttp.ErrInvalidType("tools", "install", "*tools.ToolPayload", v)
+ }
+ body := NewInstallRequestBody(p)
+ if err := encoder(req).Encode(&body); err != nil {
+ return goahttp.ErrEncodingError("tools", "install", err)
+ }
+ return nil
+ }
+}
+
+// DecodeInstallResponse returns a decoder for responses returned by the tools
+// install endpoint. restoreBody controls whether the response body should be
+// restored after having been read.
+func DecodeInstallResponse(decoder func(*http.Response) goahttp.Decoder, restoreBody bool) func(*http.Response) (interface{}, error) {
+ return func(resp *http.Response) (interface{}, error) {
+ if restoreBody {
+ b, err := ioutil.ReadAll(resp.Body)
+ if err != nil {
+ return nil, err
+ }
+ resp.Body = ioutil.NopCloser(bytes.NewBuffer(b))
+ defer func() {
+ resp.Body = ioutil.NopCloser(bytes.NewBuffer(b))
+ }()
+ } else {
+ defer resp.Body.Close()
+ }
+ switch resp.StatusCode {
+ case http.StatusOK:
+ var (
+ body InstallResponseBody
+ err error
+ )
+ err = decoder(resp).Decode(&body)
+ if err != nil {
+ return nil, goahttp.ErrDecodingError("tools", "install", err)
+ }
+ p := NewInstallOperationOK(&body)
+ view := "default"
+ vres := &toolsviews.Operation{p, view}
+ if err = toolsviews.ValidateOperation(vres); err != nil {
+ return nil, goahttp.ErrValidationError("tools", "install", err)
+ }
+ res := tools.NewOperation(vres)
+ return res, nil
+ default:
+ body, _ := ioutil.ReadAll(resp.Body)
+ return nil, goahttp.ErrInvalidResponse("tools", "install", resp.StatusCode, string(body))
+ }
+ }
+}
+
+// BuildRemoveRequest instantiates a HTTP request object with method and path
+// set to call the "tools" service "remove" endpoint
+func (c *Client) BuildRemoveRequest(ctx context.Context, v interface{}) (*http.Request, error) {
+ var (
+ packager string
+ name string
+ version string
+ )
+ {
+ p, ok := v.(*tools.ToolPayload)
+ if !ok {
+ return nil, goahttp.ErrInvalidType("tools", "remove", "*tools.ToolPayload", v)
+ }
+ packager = p.Packager
+ name = p.Name
+ version = p.Version
+ }
+ u := &url.URL{Scheme: c.scheme, Host: c.host, Path: RemoveToolsPath(packager, name, version)}
+ req, err := http.NewRequest("DELETE", u.String(), nil)
+ if err != nil {
+ return nil, goahttp.ErrInvalidURL("tools", "remove", u.String(), err)
+ }
+ if ctx != nil {
+ req = req.WithContext(ctx)
+ }
+
+ return req, nil
+}
+
+// EncodeRemoveRequest returns an encoder for requests sent to the tools remove
+// server.
+func EncodeRemoveRequest(encoder func(*http.Request) goahttp.Encoder) func(*http.Request, interface{}) error {
+ return func(req *http.Request, v interface{}) error {
+ p, ok := v.(*tools.ToolPayload)
+ if !ok {
+ return goahttp.ErrInvalidType("tools", "remove", "*tools.ToolPayload", v)
+ }
+ body := NewRemoveRequestBody(p)
+ if err := encoder(req).Encode(&body); err != nil {
+ return goahttp.ErrEncodingError("tools", "remove", err)
+ }
+ return nil
+ }
+}
+
+// DecodeRemoveResponse returns a decoder for responses returned by the tools
+// remove endpoint. restoreBody controls whether the response body should be
+// restored after having been read.
+func DecodeRemoveResponse(decoder func(*http.Response) goahttp.Decoder, restoreBody bool) func(*http.Response) (interface{}, error) {
+ return func(resp *http.Response) (interface{}, error) {
+ if restoreBody {
+ b, err := ioutil.ReadAll(resp.Body)
+ if err != nil {
+ return nil, err
+ }
+ resp.Body = ioutil.NopCloser(bytes.NewBuffer(b))
+ defer func() {
+ resp.Body = ioutil.NopCloser(bytes.NewBuffer(b))
+ }()
+ } else {
+ defer resp.Body.Close()
+ }
+ switch resp.StatusCode {
+ case http.StatusOK:
+ var (
+ body RemoveResponseBody
+ err error
+ )
+ err = decoder(resp).Decode(&body)
+ if err != nil {
+ return nil, goahttp.ErrDecodingError("tools", "remove", err)
+ }
+ p := NewRemoveOperationOK(&body)
+ view := "default"
+ vres := &toolsviews.Operation{p, view}
+ if err = toolsviews.ValidateOperation(vres); err != nil {
+ return nil, goahttp.ErrValidationError("tools", "remove", err)
+ }
+ res := tools.NewOperation(vres)
+ return res, nil
+ default:
+ body, _ := ioutil.ReadAll(resp.Body)
+ return nil, goahttp.ErrInvalidResponse("tools", "remove", resp.StatusCode, string(body))
+ }
+ }
+}
diff --git a/gen/http/tools/client/paths.go b/gen/http/tools/client/paths.go
new file mode 100644
index 000000000..ab66d03f4
--- /dev/null
+++ b/gen/http/tools/client/paths.go
@@ -0,0 +1,32 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// HTTP request path constructors for the tools service.
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package client
+
+import (
+ "fmt"
+)
+
+// AvailableToolsPath returns the URL path to the tools service available HTTP endpoint.
+func AvailableToolsPath() string {
+ return "/v2/pkgs/tools/available"
+}
+
+// InstalledToolsPath returns the URL path to the tools service installed HTTP endpoint.
+func InstalledToolsPath() string {
+ return "/v2/pkgs/tools/installed"
+}
+
+// InstallToolsPath returns the URL path to the tools service install HTTP endpoint.
+func InstallToolsPath() string {
+ return "/v2/pkgs/tools/installed"
+}
+
+// RemoveToolsPath returns the URL path to the tools service remove HTTP endpoint.
+func RemoveToolsPath(packager string, name string, version string) string {
+ return fmt.Sprintf("/v2/pkgs/tools/installed/%v/%v/%v", packager, name, version)
+}
diff --git a/gen/http/tools/client/types.go b/gen/http/tools/client/types.go
new file mode 100644
index 000000000..255a61504
--- /dev/null
+++ b/gen/http/tools/client/types.go
@@ -0,0 +1,157 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// tools HTTP client types
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package client
+
+import (
+ tools "github.com/arduino/arduino-create-agent/gen/tools"
+ toolsviews "github.com/arduino/arduino-create-agent/gen/tools/views"
+ goa "goa.design/goa"
+)
+
+// InstallRequestBody is the type of the "tools" service "install" endpoint
+// HTTP request body.
+type InstallRequestBody struct {
+ // The name of the tool
+ Name string `form:"name" json:"name" xml:"name"`
+ // The version of the tool
+ Version string `form:"version" json:"version" xml:"version"`
+ // The packager of the tool
+ Packager string `form:"packager" json:"packager" xml:"packager"`
+ // The url where the package can be found. Optional.
+ // If present checksum must also be present.
+ URL *string `form:"url,omitempty" json:"url,omitempty" xml:"url,omitempty"`
+ // A checksum of the archive. Mandatory when url is present.
+ // This ensures that the package is downloaded correcly.
+ Checksum *string `form:"checksum,omitempty" json:"checksum,omitempty" xml:"checksum,omitempty"`
+}
+
+// RemoveRequestBody is the type of the "tools" service "remove" endpoint HTTP
+// request body.
+type RemoveRequestBody struct {
+ // The url where the package can be found. Optional.
+ // If present checksum must also be present.
+ URL *string `form:"url,omitempty" json:"url,omitempty" xml:"url,omitempty"`
+ // A checksum of the archive. Mandatory when url is present.
+ // This ensures that the package is downloaded correcly.
+ Checksum *string `form:"checksum,omitempty" json:"checksum,omitempty" xml:"checksum,omitempty"`
+}
+
+// AvailableResponseBody is the type of the "tools" service "available"
+// endpoint HTTP response body.
+type AvailableResponseBody []*ToolResponse
+
+// InstalledResponseBody is the type of the "tools" service "installed"
+// endpoint HTTP response body.
+type InstalledResponseBody []*ToolResponse
+
+// InstallResponseBody is the type of the "tools" service "install" endpoint
+// HTTP response body.
+type InstallResponseBody struct {
+ // The status of the operation
+ Status *string `form:"status,omitempty" json:"status,omitempty" xml:"status,omitempty"`
+}
+
+// RemoveResponseBody is the type of the "tools" service "remove" endpoint HTTP
+// response body.
+type RemoveResponseBody struct {
+ // The status of the operation
+ Status *string `form:"status,omitempty" json:"status,omitempty" xml:"status,omitempty"`
+}
+
+// ToolResponse is used to define fields on response body types.
+type ToolResponse struct {
+ // The name of the tool
+ Name *string `form:"name,omitempty" json:"name,omitempty" xml:"name,omitempty"`
+ // The version of the tool
+ Version *string `form:"version,omitempty" json:"version,omitempty" xml:"version,omitempty"`
+ // The packager of the tool
+ Packager *string `form:"packager,omitempty" json:"packager,omitempty" xml:"packager,omitempty"`
+}
+
+// NewInstallRequestBody builds the HTTP request body from the payload of the
+// "install" endpoint of the "tools" service.
+func NewInstallRequestBody(p *tools.ToolPayload) *InstallRequestBody {
+ body := &InstallRequestBody{
+ Name: p.Name,
+ Version: p.Version,
+ Packager: p.Packager,
+ URL: p.URL,
+ Checksum: p.Checksum,
+ }
+ return body
+}
+
+// NewRemoveRequestBody builds the HTTP request body from the payload of the
+// "remove" endpoint of the "tools" service.
+func NewRemoveRequestBody(p *tools.ToolPayload) *RemoveRequestBody {
+ body := &RemoveRequestBody{
+ URL: p.URL,
+ Checksum: p.Checksum,
+ }
+ return body
+}
+
+// NewAvailableToolCollectionOK builds a "tools" service "available" endpoint
+// result from a HTTP "OK" response.
+func NewAvailableToolCollectionOK(body AvailableResponseBody) toolsviews.ToolCollectionView {
+ v := make([]*toolsviews.ToolView, len(body))
+ for i, val := range body {
+ v[i] = &toolsviews.ToolView{
+ Name: val.Name,
+ Version: val.Version,
+ Packager: val.Packager,
+ }
+ }
+ return v
+}
+
+// NewInstalledToolCollectionOK builds a "tools" service "installed" endpoint
+// result from a HTTP "OK" response.
+func NewInstalledToolCollectionOK(body InstalledResponseBody) toolsviews.ToolCollectionView {
+ v := make([]*toolsviews.ToolView, len(body))
+ for i, val := range body {
+ v[i] = &toolsviews.ToolView{
+ Name: val.Name,
+ Version: val.Version,
+ Packager: val.Packager,
+ }
+ }
+ return v
+}
+
+// NewInstallOperationOK builds a "tools" service "install" endpoint result
+// from a HTTP "OK" response.
+func NewInstallOperationOK(body *InstallResponseBody) *toolsviews.OperationView {
+ v := &toolsviews.OperationView{
+ Status: body.Status,
+ }
+ return v
+}
+
+// NewRemoveOperationOK builds a "tools" service "remove" endpoint result from
+// a HTTP "OK" response.
+func NewRemoveOperationOK(body *RemoveResponseBody) *toolsviews.OperationView {
+ v := &toolsviews.OperationView{
+ Status: body.Status,
+ }
+ return v
+}
+
+// ValidateToolResponse runs the validations defined on ToolResponse
+func ValidateToolResponse(body *ToolResponse) (err error) {
+ if body.Name == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("name", "body"))
+ }
+ if body.Version == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("version", "body"))
+ }
+ if body.Packager == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("packager", "body"))
+ }
+ return
+}
diff --git a/gen/http/tools/server/encode_decode.go b/gen/http/tools/server/encode_decode.go
new file mode 100644
index 000000000..5746d12e4
--- /dev/null
+++ b/gen/http/tools/server/encode_decode.go
@@ -0,0 +1,123 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// tools HTTP server encoders and decoders
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package server
+
+import (
+ "context"
+ "io"
+ "net/http"
+
+ toolsviews "github.com/arduino/arduino-create-agent/gen/tools/views"
+ goa "goa.design/goa"
+ goahttp "goa.design/goa/http"
+)
+
+// EncodeAvailableResponse returns an encoder for responses returned by the
+// tools available endpoint.
+func EncodeAvailableResponse(encoder func(context.Context, http.ResponseWriter) goahttp.Encoder) func(context.Context, http.ResponseWriter, interface{}) error {
+ return func(ctx context.Context, w http.ResponseWriter, v interface{}) error {
+ res := v.(toolsviews.ToolCollection)
+ enc := encoder(ctx, w)
+ body := NewToolResponseCollection(res.Projected)
+ w.WriteHeader(http.StatusOK)
+ return enc.Encode(body)
+ }
+}
+
+// EncodeInstalledResponse returns an encoder for responses returned by the
+// tools installed endpoint.
+func EncodeInstalledResponse(encoder func(context.Context, http.ResponseWriter) goahttp.Encoder) func(context.Context, http.ResponseWriter, interface{}) error {
+ return func(ctx context.Context, w http.ResponseWriter, v interface{}) error {
+ res := v.(toolsviews.ToolCollection)
+ enc := encoder(ctx, w)
+ body := NewToolResponseCollection(res.Projected)
+ w.WriteHeader(http.StatusOK)
+ return enc.Encode(body)
+ }
+}
+
+// EncodeInstallResponse returns an encoder for responses returned by the tools
+// install endpoint.
+func EncodeInstallResponse(encoder func(context.Context, http.ResponseWriter) goahttp.Encoder) func(context.Context, http.ResponseWriter, interface{}) error {
+ return func(ctx context.Context, w http.ResponseWriter, v interface{}) error {
+ res := v.(*toolsviews.Operation)
+ enc := encoder(ctx, w)
+ body := NewInstallResponseBody(res.Projected)
+ w.WriteHeader(http.StatusOK)
+ return enc.Encode(body)
+ }
+}
+
+// DecodeInstallRequest returns a decoder for requests sent to the tools
+// install endpoint.
+func DecodeInstallRequest(mux goahttp.Muxer, decoder func(*http.Request) goahttp.Decoder) func(*http.Request) (interface{}, error) {
+ return func(r *http.Request) (interface{}, error) {
+ var (
+ body InstallRequestBody
+ err error
+ )
+ err = decoder(r).Decode(&body)
+ if err != nil {
+ if err == io.EOF {
+ return nil, goa.MissingPayloadError()
+ }
+ return nil, goa.DecodePayloadError(err.Error())
+ }
+ err = ValidateInstallRequestBody(&body)
+ if err != nil {
+ return nil, err
+ }
+ payload := NewInstallToolPayload(&body)
+
+ return payload, nil
+ }
+}
+
+// EncodeRemoveResponse returns an encoder for responses returned by the tools
+// remove endpoint.
+func EncodeRemoveResponse(encoder func(context.Context, http.ResponseWriter) goahttp.Encoder) func(context.Context, http.ResponseWriter, interface{}) error {
+ return func(ctx context.Context, w http.ResponseWriter, v interface{}) error {
+ res := v.(*toolsviews.Operation)
+ enc := encoder(ctx, w)
+ body := NewRemoveResponseBody(res.Projected)
+ w.WriteHeader(http.StatusOK)
+ return enc.Encode(body)
+ }
+}
+
+// DecodeRemoveRequest returns a decoder for requests sent to the tools remove
+// endpoint.
+func DecodeRemoveRequest(mux goahttp.Muxer, decoder func(*http.Request) goahttp.Decoder) func(*http.Request) (interface{}, error) {
+ return func(r *http.Request) (interface{}, error) {
+ var (
+ body RemoveRequestBody
+ err error
+ )
+ err = decoder(r).Decode(&body)
+ if err != nil {
+ if err == io.EOF {
+ return nil, goa.MissingPayloadError()
+ }
+ return nil, goa.DecodePayloadError(err.Error())
+ }
+
+ var (
+ packager string
+ name string
+ version string
+
+ params = mux.Vars(r)
+ )
+ packager = params["packager"]
+ name = params["name"]
+ version = params["version"]
+ payload := NewRemoveToolPayload(&body, packager, name, version)
+
+ return payload, nil
+ }
+}
diff --git a/gen/http/tools/server/paths.go b/gen/http/tools/server/paths.go
new file mode 100644
index 000000000..49cffd067
--- /dev/null
+++ b/gen/http/tools/server/paths.go
@@ -0,0 +1,32 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// HTTP request path constructors for the tools service.
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package server
+
+import (
+ "fmt"
+)
+
+// AvailableToolsPath returns the URL path to the tools service available HTTP endpoint.
+func AvailableToolsPath() string {
+ return "/v2/pkgs/tools/available"
+}
+
+// InstalledToolsPath returns the URL path to the tools service installed HTTP endpoint.
+func InstalledToolsPath() string {
+ return "/v2/pkgs/tools/installed"
+}
+
+// InstallToolsPath returns the URL path to the tools service install HTTP endpoint.
+func InstallToolsPath() string {
+ return "/v2/pkgs/tools/installed"
+}
+
+// RemoveToolsPath returns the URL path to the tools service remove HTTP endpoint.
+func RemoveToolsPath(packager string, name string, version string) string {
+ return fmt.Sprintf("/v2/pkgs/tools/installed/%v/%v/%v", packager, name, version)
+}
diff --git a/gen/http/tools/server/server.go b/gen/http/tools/server/server.go
new file mode 100644
index 000000000..a25ff092d
--- /dev/null
+++ b/gen/http/tools/server/server.go
@@ -0,0 +1,276 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// tools HTTP server
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package server
+
+import (
+ "context"
+ "net/http"
+
+ tools "github.com/arduino/arduino-create-agent/gen/tools"
+ goa "goa.design/goa"
+ goahttp "goa.design/goa/http"
+)
+
+// Server lists the tools service endpoint HTTP handlers.
+type Server struct {
+ Mounts []*MountPoint
+ Available http.Handler
+ Installed http.Handler
+ Install http.Handler
+ Remove http.Handler
+}
+
+// ErrorNamer is an interface implemented by generated error structs that
+// exposes the name of the error as defined in the design.
+type ErrorNamer interface {
+ ErrorName() string
+}
+
+// MountPoint holds information about the mounted endpoints.
+type MountPoint struct {
+ // Method is the name of the service method served by the mounted HTTP handler.
+ Method string
+ // Verb is the HTTP method used to match requests to the mounted handler.
+ Verb string
+ // Pattern is the HTTP request path pattern used to match requests to the
+ // mounted handler.
+ Pattern string
+}
+
+// New instantiates HTTP handlers for all the tools service endpoints.
+func New(
+ e *tools.Endpoints,
+ mux goahttp.Muxer,
+ dec func(*http.Request) goahttp.Decoder,
+ enc func(context.Context, http.ResponseWriter) goahttp.Encoder,
+ eh func(context.Context, http.ResponseWriter, error),
+) *Server {
+ return &Server{
+ Mounts: []*MountPoint{
+ {"Available", "GET", "/v2/pkgs/tools/available"},
+ {"Installed", "GET", "/v2/pkgs/tools/installed"},
+ {"Install", "PUT", "/v2/pkgs/tools/installed"},
+ {"Remove", "DELETE", "/v2/pkgs/tools/installed/{packager}/{name}/{version}"},
+ },
+ Available: NewAvailableHandler(e.Available, mux, dec, enc, eh),
+ Installed: NewInstalledHandler(e.Installed, mux, dec, enc, eh),
+ Install: NewInstallHandler(e.Install, mux, dec, enc, eh),
+ Remove: NewRemoveHandler(e.Remove, mux, dec, enc, eh),
+ }
+}
+
+// Service returns the name of the service served.
+func (s *Server) Service() string { return "tools" }
+
+// Use wraps the server handlers with the given middleware.
+func (s *Server) Use(m func(http.Handler) http.Handler) {
+ s.Available = m(s.Available)
+ s.Installed = m(s.Installed)
+ s.Install = m(s.Install)
+ s.Remove = m(s.Remove)
+}
+
+// Mount configures the mux to serve the tools endpoints.
+func Mount(mux goahttp.Muxer, h *Server) {
+ MountAvailableHandler(mux, h.Available)
+ MountInstalledHandler(mux, h.Installed)
+ MountInstallHandler(mux, h.Install)
+ MountRemoveHandler(mux, h.Remove)
+}
+
+// MountAvailableHandler configures the mux to serve the "tools" service
+// "available" endpoint.
+func MountAvailableHandler(mux goahttp.Muxer, h http.Handler) {
+ f, ok := h.(http.HandlerFunc)
+ if !ok {
+ f = func(w http.ResponseWriter, r *http.Request) {
+ h.ServeHTTP(w, r)
+ }
+ }
+ mux.Handle("GET", "/v2/pkgs/tools/available", f)
+}
+
+// NewAvailableHandler creates a HTTP handler which loads the HTTP request and
+// calls the "tools" service "available" endpoint.
+func NewAvailableHandler(
+ endpoint goa.Endpoint,
+ mux goahttp.Muxer,
+ dec func(*http.Request) goahttp.Decoder,
+ enc func(context.Context, http.ResponseWriter) goahttp.Encoder,
+ eh func(context.Context, http.ResponseWriter, error),
+) http.Handler {
+ var (
+ encodeResponse = EncodeAvailableResponse(enc)
+ encodeError = goahttp.ErrorEncoder(enc)
+ )
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ ctx := context.WithValue(r.Context(), goahttp.AcceptTypeKey, r.Header.Get("Accept"))
+ ctx = context.WithValue(ctx, goa.MethodKey, "available")
+ ctx = context.WithValue(ctx, goa.ServiceKey, "tools")
+
+ res, err := endpoint(ctx, nil)
+
+ if err != nil {
+ if err := encodeError(ctx, w, err); err != nil {
+ eh(ctx, w, err)
+ }
+ return
+ }
+ if err := encodeResponse(ctx, w, res); err != nil {
+ eh(ctx, w, err)
+ }
+ })
+}
+
+// MountInstalledHandler configures the mux to serve the "tools" service
+// "installed" endpoint.
+func MountInstalledHandler(mux goahttp.Muxer, h http.Handler) {
+ f, ok := h.(http.HandlerFunc)
+ if !ok {
+ f = func(w http.ResponseWriter, r *http.Request) {
+ h.ServeHTTP(w, r)
+ }
+ }
+ mux.Handle("GET", "/v2/pkgs/tools/installed", f)
+}
+
+// NewInstalledHandler creates a HTTP handler which loads the HTTP request and
+// calls the "tools" service "installed" endpoint.
+func NewInstalledHandler(
+ endpoint goa.Endpoint,
+ mux goahttp.Muxer,
+ dec func(*http.Request) goahttp.Decoder,
+ enc func(context.Context, http.ResponseWriter) goahttp.Encoder,
+ eh func(context.Context, http.ResponseWriter, error),
+) http.Handler {
+ var (
+ encodeResponse = EncodeInstalledResponse(enc)
+ encodeError = goahttp.ErrorEncoder(enc)
+ )
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ ctx := context.WithValue(r.Context(), goahttp.AcceptTypeKey, r.Header.Get("Accept"))
+ ctx = context.WithValue(ctx, goa.MethodKey, "installed")
+ ctx = context.WithValue(ctx, goa.ServiceKey, "tools")
+
+ res, err := endpoint(ctx, nil)
+
+ if err != nil {
+ if err := encodeError(ctx, w, err); err != nil {
+ eh(ctx, w, err)
+ }
+ return
+ }
+ if err := encodeResponse(ctx, w, res); err != nil {
+ eh(ctx, w, err)
+ }
+ })
+}
+
+// MountInstallHandler configures the mux to serve the "tools" service
+// "install" endpoint.
+func MountInstallHandler(mux goahttp.Muxer, h http.Handler) {
+ f, ok := h.(http.HandlerFunc)
+ if !ok {
+ f = func(w http.ResponseWriter, r *http.Request) {
+ h.ServeHTTP(w, r)
+ }
+ }
+ mux.Handle("PUT", "/v2/pkgs/tools/installed", f)
+}
+
+// NewInstallHandler creates a HTTP handler which loads the HTTP request and
+// calls the "tools" service "install" endpoint.
+func NewInstallHandler(
+ endpoint goa.Endpoint,
+ mux goahttp.Muxer,
+ dec func(*http.Request) goahttp.Decoder,
+ enc func(context.Context, http.ResponseWriter) goahttp.Encoder,
+ eh func(context.Context, http.ResponseWriter, error),
+) http.Handler {
+ var (
+ decodeRequest = DecodeInstallRequest(mux, dec)
+ encodeResponse = EncodeInstallResponse(enc)
+ encodeError = goahttp.ErrorEncoder(enc)
+ )
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ ctx := context.WithValue(r.Context(), goahttp.AcceptTypeKey, r.Header.Get("Accept"))
+ ctx = context.WithValue(ctx, goa.MethodKey, "install")
+ ctx = context.WithValue(ctx, goa.ServiceKey, "tools")
+ payload, err := decodeRequest(r)
+ if err != nil {
+ if err := encodeError(ctx, w, err); err != nil {
+ eh(ctx, w, err)
+ }
+ return
+ }
+
+ res, err := endpoint(ctx, payload)
+
+ if err != nil {
+ if err := encodeError(ctx, w, err); err != nil {
+ eh(ctx, w, err)
+ }
+ return
+ }
+ if err := encodeResponse(ctx, w, res); err != nil {
+ eh(ctx, w, err)
+ }
+ })
+}
+
+// MountRemoveHandler configures the mux to serve the "tools" service "remove"
+// endpoint.
+func MountRemoveHandler(mux goahttp.Muxer, h http.Handler) {
+ f, ok := h.(http.HandlerFunc)
+ if !ok {
+ f = func(w http.ResponseWriter, r *http.Request) {
+ h.ServeHTTP(w, r)
+ }
+ }
+ mux.Handle("DELETE", "/v2/pkgs/tools/installed/{packager}/{name}/{version}", f)
+}
+
+// NewRemoveHandler creates a HTTP handler which loads the HTTP request and
+// calls the "tools" service "remove" endpoint.
+func NewRemoveHandler(
+ endpoint goa.Endpoint,
+ mux goahttp.Muxer,
+ dec func(*http.Request) goahttp.Decoder,
+ enc func(context.Context, http.ResponseWriter) goahttp.Encoder,
+ eh func(context.Context, http.ResponseWriter, error),
+) http.Handler {
+ var (
+ decodeRequest = DecodeRemoveRequest(mux, dec)
+ encodeResponse = EncodeRemoveResponse(enc)
+ encodeError = goahttp.ErrorEncoder(enc)
+ )
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ ctx := context.WithValue(r.Context(), goahttp.AcceptTypeKey, r.Header.Get("Accept"))
+ ctx = context.WithValue(ctx, goa.MethodKey, "remove")
+ ctx = context.WithValue(ctx, goa.ServiceKey, "tools")
+ payload, err := decodeRequest(r)
+ if err != nil {
+ if err := encodeError(ctx, w, err); err != nil {
+ eh(ctx, w, err)
+ }
+ return
+ }
+
+ res, err := endpoint(ctx, payload)
+
+ if err != nil {
+ if err := encodeError(ctx, w, err); err != nil {
+ eh(ctx, w, err)
+ }
+ return
+ }
+ if err := encodeResponse(ctx, w, res); err != nil {
+ eh(ctx, w, err)
+ }
+ })
+}
diff --git a/gen/http/tools/server/types.go b/gen/http/tools/server/types.go
new file mode 100644
index 000000000..49d37011d
--- /dev/null
+++ b/gen/http/tools/server/types.go
@@ -0,0 +1,140 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// tools HTTP server types
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package server
+
+import (
+ tools "github.com/arduino/arduino-create-agent/gen/tools"
+ toolsviews "github.com/arduino/arduino-create-agent/gen/tools/views"
+ goa "goa.design/goa"
+)
+
+// InstallRequestBody is the type of the "tools" service "install" endpoint
+// HTTP request body.
+type InstallRequestBody struct {
+ // The name of the tool
+ Name *string `form:"name,omitempty" json:"name,omitempty" xml:"name,omitempty"`
+ // The version of the tool
+ Version *string `form:"version,omitempty" json:"version,omitempty" xml:"version,omitempty"`
+ // The packager of the tool
+ Packager *string `form:"packager,omitempty" json:"packager,omitempty" xml:"packager,omitempty"`
+ // The url where the package can be found. Optional.
+ // If present checksum must also be present.
+ URL *string `form:"url,omitempty" json:"url,omitempty" xml:"url,omitempty"`
+ // A checksum of the archive. Mandatory when url is present.
+ // This ensures that the package is downloaded correcly.
+ Checksum *string `form:"checksum,omitempty" json:"checksum,omitempty" xml:"checksum,omitempty"`
+}
+
+// RemoveRequestBody is the type of the "tools" service "remove" endpoint HTTP
+// request body.
+type RemoveRequestBody struct {
+ // The url where the package can be found. Optional.
+ // If present checksum must also be present.
+ URL *string `form:"url,omitempty" json:"url,omitempty" xml:"url,omitempty"`
+ // A checksum of the archive. Mandatory when url is present.
+ // This ensures that the package is downloaded correcly.
+ Checksum *string `form:"checksum,omitempty" json:"checksum,omitempty" xml:"checksum,omitempty"`
+}
+
+// ToolResponseCollection is the type of the "tools" service "available"
+// endpoint HTTP response body.
+type ToolResponseCollection []*ToolResponse
+
+// InstallResponseBody is the type of the "tools" service "install" endpoint
+// HTTP response body.
+type InstallResponseBody struct {
+ // The status of the operation
+ Status string `form:"status" json:"status" xml:"status"`
+}
+
+// RemoveResponseBody is the type of the "tools" service "remove" endpoint HTTP
+// response body.
+type RemoveResponseBody struct {
+ // The status of the operation
+ Status string `form:"status" json:"status" xml:"status"`
+}
+
+// ToolResponse is used to define fields on response body types.
+type ToolResponse struct {
+ // The name of the tool
+ Name string `form:"name" json:"name" xml:"name"`
+ // The version of the tool
+ Version string `form:"version" json:"version" xml:"version"`
+ // The packager of the tool
+ Packager string `form:"packager" json:"packager" xml:"packager"`
+}
+
+// NewToolResponseCollection builds the HTTP response body from the result of
+// the "available" endpoint of the "tools" service.
+func NewToolResponseCollection(res toolsviews.ToolCollectionView) ToolResponseCollection {
+ body := make([]*ToolResponse, len(res))
+ for i, val := range res {
+ body[i] = &ToolResponse{
+ Name: *val.Name,
+ Version: *val.Version,
+ Packager: *val.Packager,
+ }
+ }
+ return body
+}
+
+// NewInstallResponseBody builds the HTTP response body from the result of the
+// "install" endpoint of the "tools" service.
+func NewInstallResponseBody(res *toolsviews.OperationView) *InstallResponseBody {
+ body := &InstallResponseBody{
+ Status: *res.Status,
+ }
+ return body
+}
+
+// NewRemoveResponseBody builds the HTTP response body from the result of the
+// "remove" endpoint of the "tools" service.
+func NewRemoveResponseBody(res *toolsviews.OperationView) *RemoveResponseBody {
+ body := &RemoveResponseBody{
+ Status: *res.Status,
+ }
+ return body
+}
+
+// NewInstallToolPayload builds a tools service install endpoint payload.
+func NewInstallToolPayload(body *InstallRequestBody) *tools.ToolPayload {
+ v := &tools.ToolPayload{
+ Name: *body.Name,
+ Version: *body.Version,
+ Packager: *body.Packager,
+ URL: body.URL,
+ Checksum: body.Checksum,
+ }
+ return v
+}
+
+// NewRemoveToolPayload builds a tools service remove endpoint payload.
+func NewRemoveToolPayload(body *RemoveRequestBody, packager string, name string, version string) *tools.ToolPayload {
+ v := &tools.ToolPayload{
+ URL: body.URL,
+ Checksum: body.Checksum,
+ }
+ v.Packager = packager
+ v.Name = name
+ v.Version = version
+ return v
+}
+
+// ValidateInstallRequestBody runs the validations defined on InstallRequestBody
+func ValidateInstallRequestBody(body *InstallRequestBody) (err error) {
+ if body.Name == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("name", "body"))
+ }
+ if body.Version == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("version", "body"))
+ }
+ if body.Packager == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("packager", "body"))
+ }
+ return
+}
diff --git a/gen/indexes/client.go b/gen/indexes/client.go
new file mode 100644
index 000000000..a95c0f640
--- /dev/null
+++ b/gen/indexes/client.go
@@ -0,0 +1,60 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// indexes client
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package indexes
+
+import (
+ "context"
+
+ goa "goa.design/goa"
+)
+
+// Client is the "indexes" service client.
+type Client struct {
+ ListEndpoint goa.Endpoint
+ AddEndpoint goa.Endpoint
+ RemoveEndpoint goa.Endpoint
+}
+
+// NewClient initializes a "indexes" service client given the endpoints.
+func NewClient(list, add, remove goa.Endpoint) *Client {
+ return &Client{
+ ListEndpoint: list,
+ AddEndpoint: add,
+ RemoveEndpoint: remove,
+ }
+}
+
+// List calls the "list" endpoint of the "indexes" service.
+func (c *Client) List(ctx context.Context) (res []string, err error) {
+ var ires interface{}
+ ires, err = c.ListEndpoint(ctx, nil)
+ if err != nil {
+ return
+ }
+ return ires.([]string), nil
+}
+
+// Add calls the "add" endpoint of the "indexes" service.
+func (c *Client) Add(ctx context.Context, p *IndexPayload) (res *Operation, err error) {
+ var ires interface{}
+ ires, err = c.AddEndpoint(ctx, p)
+ if err != nil {
+ return
+ }
+ return ires.(*Operation), nil
+}
+
+// Remove calls the "remove" endpoint of the "indexes" service.
+func (c *Client) Remove(ctx context.Context, p *IndexPayload) (res *Operation, err error) {
+ var ires interface{}
+ ires, err = c.RemoveEndpoint(ctx, p)
+ if err != nil {
+ return
+ }
+ return ires.(*Operation), nil
+}
diff --git a/gen/indexes/endpoints.go b/gen/indexes/endpoints.go
new file mode 100644
index 000000000..052938e1e
--- /dev/null
+++ b/gen/indexes/endpoints.go
@@ -0,0 +1,73 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// indexes endpoints
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package indexes
+
+import (
+ "context"
+
+ goa "goa.design/goa"
+)
+
+// Endpoints wraps the "indexes" service endpoints.
+type Endpoints struct {
+ List goa.Endpoint
+ Add goa.Endpoint
+ Remove goa.Endpoint
+}
+
+// NewEndpoints wraps the methods of the "indexes" service with endpoints.
+func NewEndpoints(s Service) *Endpoints {
+ return &Endpoints{
+ List: NewListEndpoint(s),
+ Add: NewAddEndpoint(s),
+ Remove: NewRemoveEndpoint(s),
+ }
+}
+
+// Use applies the given middleware to all the "indexes" service endpoints.
+func (e *Endpoints) Use(m func(goa.Endpoint) goa.Endpoint) {
+ e.List = m(e.List)
+ e.Add = m(e.Add)
+ e.Remove = m(e.Remove)
+}
+
+// NewListEndpoint returns an endpoint function that calls the method "list" of
+// service "indexes".
+func NewListEndpoint(s Service) goa.Endpoint {
+ return func(ctx context.Context, req interface{}) (interface{}, error) {
+ return s.List(ctx)
+ }
+}
+
+// NewAddEndpoint returns an endpoint function that calls the method "add" of
+// service "indexes".
+func NewAddEndpoint(s Service) goa.Endpoint {
+ return func(ctx context.Context, req interface{}) (interface{}, error) {
+ p := req.(*IndexPayload)
+ res, err := s.Add(ctx, p)
+ if err != nil {
+ return nil, err
+ }
+ vres := NewViewedOperation(res, "default")
+ return vres, nil
+ }
+}
+
+// NewRemoveEndpoint returns an endpoint function that calls the method
+// "remove" of service "indexes".
+func NewRemoveEndpoint(s Service) goa.Endpoint {
+ return func(ctx context.Context, req interface{}) (interface{}, error) {
+ p := req.(*IndexPayload)
+ res, err := s.Remove(ctx, p)
+ if err != nil {
+ return nil, err
+ }
+ vres := NewViewedOperation(res, "default")
+ return vres, nil
+ }
+}
diff --git a/gen/indexes/service.go b/gen/indexes/service.go
new file mode 100644
index 000000000..ad57b8f88
--- /dev/null
+++ b/gen/indexes/service.go
@@ -0,0 +1,97 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// indexes service
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package indexes
+
+import (
+ "context"
+
+ indexesviews "github.com/arduino/arduino-create-agent/gen/indexes/views"
+ "goa.design/goa"
+)
+
+// The indexes service manages the package_index files
+type Service interface {
+ // List implements list.
+ List(context.Context) (res []string, err error)
+ // Add implements add.
+ Add(context.Context, *IndexPayload) (res *Operation, err error)
+ // Remove implements remove.
+ Remove(context.Context, *IndexPayload) (res *Operation, err error)
+}
+
+// ServiceName is the name of the service as defined in the design. This is the
+// same value that is set in the endpoint request contexts under the ServiceKey
+// key.
+const ServiceName = "indexes"
+
+// MethodNames lists the service method names as defined in the design. These
+// are the same values that are set in the endpoint request contexts under the
+// MethodKey key.
+var MethodNames = [3]string{"list", "add", "remove"}
+
+// IndexPayload is the payload type of the indexes service add method.
+type IndexPayload struct {
+ // The url of the index file
+ URL string
+}
+
+// Operation is the result type of the indexes service add method.
+type Operation struct {
+ // The status of the operation
+ Status string
+}
+
+// MakeInvalidURL builds a goa.ServiceError from an error.
+func MakeInvalidURL(err error) *goa.ServiceError {
+ return &goa.ServiceError{
+ Name: "invalid_url",
+ ID: goa.NewErrorID(),
+ Message: err.Error(),
+ }
+}
+
+// NewOperation initializes result type Operation from viewed result type
+// Operation.
+func NewOperation(vres *indexesviews.Operation) *Operation {
+ var res *Operation
+ switch vres.View {
+ case "default", "":
+ res = newOperation(vres.Projected)
+ }
+ return res
+}
+
+// NewViewedOperation initializes viewed result type Operation from result type
+// Operation using the given view.
+func NewViewedOperation(res *Operation, view string) *indexesviews.Operation {
+ var vres *indexesviews.Operation
+ switch view {
+ case "default", "":
+ p := newOperationView(res)
+ vres = &indexesviews.Operation{p, "default"}
+ }
+ return vres
+}
+
+// newOperation converts projected type Operation to service type Operation.
+func newOperation(vres *indexesviews.OperationView) *Operation {
+ res := &Operation{}
+ if vres.Status != nil {
+ res.Status = *vres.Status
+ }
+ return res
+}
+
+// newOperationView projects result type Operation into projected type
+// OperationView using the "default" view.
+func newOperationView(res *Operation) *indexesviews.OperationView {
+ vres := &indexesviews.OperationView{
+ Status: &res.Status,
+ }
+ return vres
+}
diff --git a/gen/indexes/views/view.go b/gen/indexes/views/view.go
new file mode 100644
index 000000000..15af1da02
--- /dev/null
+++ b/gen/indexes/views/view.go
@@ -0,0 +1,47 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// indexes views
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package views
+
+import (
+ goa "goa.design/goa"
+)
+
+// Operation is the viewed result type that is projected based on a view.
+type Operation struct {
+ // Type to project
+ Projected *OperationView
+ // View to render
+ View string
+}
+
+// OperationView is a type that runs validations on a projected type.
+type OperationView struct {
+ // The status of the operation
+ Status *string
+}
+
+// ValidateOperation runs the validations defined on the viewed result type
+// Operation.
+func ValidateOperation(result *Operation) (err error) {
+ switch result.View {
+ case "default", "":
+ err = ValidateOperationView(result.Projected)
+ default:
+ err = goa.InvalidEnumValueError("view", result.View, []interface{}{"default"})
+ }
+ return
+}
+
+// ValidateOperationView runs the validations defined on OperationView using
+// the "default" view.
+func ValidateOperationView(result *OperationView) (err error) {
+ if result.Status == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("status", "result"))
+ }
+ return
+}
diff --git a/gen/tools/client.go b/gen/tools/client.go
new file mode 100644
index 000000000..8c3cc70f1
--- /dev/null
+++ b/gen/tools/client.go
@@ -0,0 +1,75 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// tools client
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package tools
+
+import (
+ "context"
+
+ goa "goa.design/goa"
+)
+
+// Client is the "tools" service client.
+type Client struct {
+ AvailableEndpoint goa.Endpoint
+ InstalledEndpoint goa.Endpoint
+ InstallEndpoint goa.Endpoint
+ RemoveEndpoint goa.Endpoint
+}
+
+// NewClient initializes a "tools" service client given the endpoints.
+func NewClient(available, installed, install, remove goa.Endpoint) *Client {
+ return &Client{
+ AvailableEndpoint: available,
+ InstalledEndpoint: installed,
+ InstallEndpoint: install,
+ RemoveEndpoint: remove,
+ }
+}
+
+// Available calls the "available" endpoint of the "tools" service.
+func (c *Client) Available(ctx context.Context) (res ToolCollection, err error) {
+ var ires interface{}
+ ires, err = c.AvailableEndpoint(ctx, nil)
+ if err != nil {
+ return
+ }
+ return ires.(ToolCollection), nil
+}
+
+// Installed calls the "installed" endpoint of the "tools" service.
+func (c *Client) Installed(ctx context.Context) (res ToolCollection, err error) {
+ var ires interface{}
+ ires, err = c.InstalledEndpoint(ctx, nil)
+ if err != nil {
+ return
+ }
+ return ires.(ToolCollection), nil
+}
+
+// Install calls the "install" endpoint of the "tools" service.
+// Install may return the following errors:
+// - "not_found" (type *goa.ServiceError): tool not found
+// - error: internal error
+func (c *Client) Install(ctx context.Context, p *ToolPayload) (res *Operation, err error) {
+ var ires interface{}
+ ires, err = c.InstallEndpoint(ctx, p)
+ if err != nil {
+ return
+ }
+ return ires.(*Operation), nil
+}
+
+// Remove calls the "remove" endpoint of the "tools" service.
+func (c *Client) Remove(ctx context.Context, p *ToolPayload) (res *Operation, err error) {
+ var ires interface{}
+ ires, err = c.RemoveEndpoint(ctx, p)
+ if err != nil {
+ return
+ }
+ return ires.(*Operation), nil
+}
diff --git a/gen/tools/endpoints.go b/gen/tools/endpoints.go
new file mode 100644
index 000000000..c2caa0d0a
--- /dev/null
+++ b/gen/tools/endpoints.go
@@ -0,0 +1,94 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// tools endpoints
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package tools
+
+import (
+ "context"
+
+ goa "goa.design/goa"
+)
+
+// Endpoints wraps the "tools" service endpoints.
+type Endpoints struct {
+ Available goa.Endpoint
+ Installed goa.Endpoint
+ Install goa.Endpoint
+ Remove goa.Endpoint
+}
+
+// NewEndpoints wraps the methods of the "tools" service with endpoints.
+func NewEndpoints(s Service) *Endpoints {
+ return &Endpoints{
+ Available: NewAvailableEndpoint(s),
+ Installed: NewInstalledEndpoint(s),
+ Install: NewInstallEndpoint(s),
+ Remove: NewRemoveEndpoint(s),
+ }
+}
+
+// Use applies the given middleware to all the "tools" service endpoints.
+func (e *Endpoints) Use(m func(goa.Endpoint) goa.Endpoint) {
+ e.Available = m(e.Available)
+ e.Installed = m(e.Installed)
+ e.Install = m(e.Install)
+ e.Remove = m(e.Remove)
+}
+
+// NewAvailableEndpoint returns an endpoint function that calls the method
+// "available" of service "tools".
+func NewAvailableEndpoint(s Service) goa.Endpoint {
+ return func(ctx context.Context, req interface{}) (interface{}, error) {
+ res, err := s.Available(ctx)
+ if err != nil {
+ return nil, err
+ }
+ vres := NewViewedToolCollection(res, "default")
+ return vres, nil
+ }
+}
+
+// NewInstalledEndpoint returns an endpoint function that calls the method
+// "installed" of service "tools".
+func NewInstalledEndpoint(s Service) goa.Endpoint {
+ return func(ctx context.Context, req interface{}) (interface{}, error) {
+ res, err := s.Installed(ctx)
+ if err != nil {
+ return nil, err
+ }
+ vres := NewViewedToolCollection(res, "default")
+ return vres, nil
+ }
+}
+
+// NewInstallEndpoint returns an endpoint function that calls the method
+// "install" of service "tools".
+func NewInstallEndpoint(s Service) goa.Endpoint {
+ return func(ctx context.Context, req interface{}) (interface{}, error) {
+ p := req.(*ToolPayload)
+ res, err := s.Install(ctx, p)
+ if err != nil {
+ return nil, err
+ }
+ vres := NewViewedOperation(res, "default")
+ return vres, nil
+ }
+}
+
+// NewRemoveEndpoint returns an endpoint function that calls the method
+// "remove" of service "tools".
+func NewRemoveEndpoint(s Service) goa.Endpoint {
+ return func(ctx context.Context, req interface{}) (interface{}, error) {
+ p := req.(*ToolPayload)
+ res, err := s.Remove(ctx, p)
+ if err != nil {
+ return nil, err
+ }
+ vres := NewViewedOperation(res, "default")
+ return vres, nil
+ }
+}
diff --git a/gen/tools/service.go b/gen/tools/service.go
new file mode 100644
index 000000000..12d90c902
--- /dev/null
+++ b/gen/tools/service.go
@@ -0,0 +1,191 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// tools service
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package tools
+
+import (
+ "context"
+
+ toolsviews "github.com/arduino/arduino-create-agent/gen/tools/views"
+ "goa.design/goa"
+)
+
+// The tools service manages the available and installed tools
+type Service interface {
+ // Available implements available.
+ Available(context.Context) (res ToolCollection, err error)
+ // Installed implements installed.
+ Installed(context.Context) (res ToolCollection, err error)
+ // Install implements install.
+ Install(context.Context, *ToolPayload) (res *Operation, err error)
+ // Remove implements remove.
+ Remove(context.Context, *ToolPayload) (res *Operation, err error)
+}
+
+// ServiceName is the name of the service as defined in the design. This is the
+// same value that is set in the endpoint request contexts under the ServiceKey
+// key.
+const ServiceName = "tools"
+
+// MethodNames lists the service method names as defined in the design. These
+// are the same values that are set in the endpoint request contexts under the
+// MethodKey key.
+var MethodNames = [4]string{"available", "installed", "install", "remove"}
+
+// ToolCollection is the result type of the tools service available method.
+type ToolCollection []*Tool
+
+// ToolPayload is the payload type of the tools service install method.
+type ToolPayload struct {
+ // The name of the tool
+ Name string
+ // The version of the tool
+ Version string
+ // The packager of the tool
+ Packager string
+ // The url where the package can be found. Optional.
+ // If present checksum must also be present.
+ URL *string
+ // A checksum of the archive. Mandatory when url is present.
+ // This ensures that the package is downloaded correcly.
+ Checksum *string
+}
+
+// Operation is the result type of the tools service install method.
+type Operation struct {
+ // The status of the operation
+ Status string
+}
+
+// A tool is an executable program that can upload sketches.
+type Tool struct {
+ // The name of the tool
+ Name string
+ // The version of the tool
+ Version string
+ // The packager of the tool
+ Packager string
+}
+
+// MakeNotFound builds a goa.ServiceError from an error.
+func MakeNotFound(err error) *goa.ServiceError {
+ return &goa.ServiceError{
+ Name: "not_found",
+ ID: goa.NewErrorID(),
+ Message: err.Error(),
+ }
+}
+
+// NewToolCollection initializes result type ToolCollection from viewed result
+// type ToolCollection.
+func NewToolCollection(vres toolsviews.ToolCollection) ToolCollection {
+ var res ToolCollection
+ switch vres.View {
+ case "default", "":
+ res = newToolCollection(vres.Projected)
+ }
+ return res
+}
+
+// NewViewedToolCollection initializes viewed result type ToolCollection from
+// result type ToolCollection using the given view.
+func NewViewedToolCollection(res ToolCollection, view string) toolsviews.ToolCollection {
+ var vres toolsviews.ToolCollection
+ switch view {
+ case "default", "":
+ p := newToolCollectionView(res)
+ vres = toolsviews.ToolCollection{p, "default"}
+ }
+ return vres
+}
+
+// NewOperation initializes result type Operation from viewed result type
+// Operation.
+func NewOperation(vres *toolsviews.Operation) *Operation {
+ var res *Operation
+ switch vres.View {
+ case "default", "":
+ res = newOperation(vres.Projected)
+ }
+ return res
+}
+
+// NewViewedOperation initializes viewed result type Operation from result type
+// Operation using the given view.
+func NewViewedOperation(res *Operation, view string) *toolsviews.Operation {
+ var vres *toolsviews.Operation
+ switch view {
+ case "default", "":
+ p := newOperationView(res)
+ vres = &toolsviews.Operation{p, "default"}
+ }
+ return vres
+}
+
+// newToolCollection converts projected type ToolCollection to service type
+// ToolCollection.
+func newToolCollection(vres toolsviews.ToolCollectionView) ToolCollection {
+ res := make(ToolCollection, len(vres))
+ for i, n := range vres {
+ res[i] = newTool(n)
+ }
+ return res
+}
+
+// newToolCollectionView projects result type ToolCollection into projected
+// type ToolCollectionView using the "default" view.
+func newToolCollectionView(res ToolCollection) toolsviews.ToolCollectionView {
+ vres := make(toolsviews.ToolCollectionView, len(res))
+ for i, n := range res {
+ vres[i] = newToolView(n)
+ }
+ return vres
+}
+
+// newTool converts projected type Tool to service type Tool.
+func newTool(vres *toolsviews.ToolView) *Tool {
+ res := &Tool{}
+ if vres.Name != nil {
+ res.Name = *vres.Name
+ }
+ if vres.Version != nil {
+ res.Version = *vres.Version
+ }
+ if vres.Packager != nil {
+ res.Packager = *vres.Packager
+ }
+ return res
+}
+
+// newToolView projects result type Tool into projected type ToolView using the
+// "default" view.
+func newToolView(res *Tool) *toolsviews.ToolView {
+ vres := &toolsviews.ToolView{
+ Name: &res.Name,
+ Version: &res.Version,
+ Packager: &res.Packager,
+ }
+ return vres
+}
+
+// newOperation converts projected type Operation to service type Operation.
+func newOperation(vres *toolsviews.OperationView) *Operation {
+ res := &Operation{}
+ if vres.Status != nil {
+ res.Status = *vres.Status
+ }
+ return res
+}
+
+// newOperationView projects result type Operation into projected type
+// OperationView using the "default" view.
+func newOperationView(res *Operation) *toolsviews.OperationView {
+ vres := &toolsviews.OperationView{
+ Status: &res.Status,
+ }
+ return vres
+}
diff --git a/gen/tools/views/view.go b/gen/tools/views/view.go
new file mode 100644
index 000000000..10bd4e8ea
--- /dev/null
+++ b/gen/tools/views/view.go
@@ -0,0 +1,106 @@
+// Code generated by goa v2.0.0-wip, DO NOT EDIT.
+//
+// tools views
+//
+// Command:
+// $ goa gen github.com/arduino/arduino-create-agent/design
+
+package views
+
+import (
+ goa "goa.design/goa"
+)
+
+// ToolCollection is the viewed result type that is projected based on a view.
+type ToolCollection struct {
+ // Type to project
+ Projected ToolCollectionView
+ // View to render
+ View string
+}
+
+// Operation is the viewed result type that is projected based on a view.
+type Operation struct {
+ // Type to project
+ Projected *OperationView
+ // View to render
+ View string
+}
+
+// ToolCollectionView is a type that runs validations on a projected type.
+type ToolCollectionView []*ToolView
+
+// ToolView is a type that runs validations on a projected type.
+type ToolView struct {
+ // The name of the tool
+ Name *string
+ // The version of the tool
+ Version *string
+ // The packager of the tool
+ Packager *string
+}
+
+// OperationView is a type that runs validations on a projected type.
+type OperationView struct {
+ // The status of the operation
+ Status *string
+}
+
+// ValidateToolCollection runs the validations defined on the viewed result
+// type ToolCollection.
+func ValidateToolCollection(result ToolCollection) (err error) {
+ switch result.View {
+ case "default", "":
+ err = ValidateToolCollectionView(result.Projected)
+ default:
+ err = goa.InvalidEnumValueError("view", result.View, []interface{}{"default"})
+ }
+ return
+}
+
+// ValidateOperation runs the validations defined on the viewed result type
+// Operation.
+func ValidateOperation(result *Operation) (err error) {
+ switch result.View {
+ case "default", "":
+ err = ValidateOperationView(result.Projected)
+ default:
+ err = goa.InvalidEnumValueError("view", result.View, []interface{}{"default"})
+ }
+ return
+}
+
+// ValidateToolCollectionView runs the validations defined on
+// ToolCollectionView using the "default" view.
+func ValidateToolCollectionView(result ToolCollectionView) (err error) {
+ for _, item := range result {
+ if err2 := ValidateToolView(item); err2 != nil {
+ err = goa.MergeErrors(err, err2)
+ }
+ }
+ return
+}
+
+// ValidateToolView runs the validations defined on ToolView using the
+// "default" view.
+func ValidateToolView(result *ToolView) (err error) {
+ if result.Name == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("name", "result"))
+ }
+ if result.Version == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("version", "result"))
+ }
+ if result.Packager == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("packager", "result"))
+ }
+ return
+}
+
+// ValidateOperationView runs the validations defined on OperationView using
+// the "default" view.
+func ValidateOperationView(result *OperationView) (err error) {
+ if result.Status == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("status", "result"))
+ }
+ return
+}
diff --git a/hub.go b/hub.go
index 9b0744970..160fd6b7e 100755
--- a/hub.go
+++ b/hub.go
@@ -12,9 +12,9 @@ import (
"strconv"
"strings"
- log "github.com/Sirupsen/logrus"
"github.com/arduino/arduino-create-agent/upload"
"github.com/kardianos/osext"
+ log "github.com/sirupsen/logrus"
)
type hub struct {
diff --git a/main.go b/main.go
index 6b0012dfd..d25604795 100755
--- a/main.go
+++ b/main.go
@@ -15,14 +15,14 @@ import (
"text/template"
"time"
- "github.com/go-ini/ini"
-
- log "github.com/Sirupsen/logrus"
"github.com/arduino/arduino-create-agent/tools"
"github.com/arduino/arduino-create-agent/utilities"
+ "github.com/arduino/arduino-create-agent/v2"
"github.com/gin-gonic/gin"
+ "github.com/go-ini/ini"
cors "github.com/itsjamie/gin-cors"
"github.com/kardianos/osext"
+ log "github.com/sirupsen/logrus"
//"github.com/sanbornm/go-selfupdate/selfupdate" #included in update.go to change heavily
)
@@ -288,6 +288,10 @@ func loop() {
r.POST("/pause", pauseHandler)
r.POST("/update", updateHandler)
+ // Mount goa handlers
+ goa := v2.Server(directory)
+ r.Any("/v2/*path", gin.WrapH(goa))
+
go func() {
// check if certificates exist; if not, use plain http
if _, err := os.Stat(filepath.Join(dest, "cert.pem")); os.IsNotExist(err) {
diff --git a/seriallist.go b/seriallist.go
index 2c6c6dd77..c948c23fb 100755
--- a/seriallist.go
+++ b/seriallist.go
@@ -7,7 +7,7 @@ import (
"regexp"
"strings"
- log "github.com/Sirupsen/logrus"
+ log "github.com/sirupsen/logrus"
"go.bug.st/serial.v1/enumerator"
)
diff --git a/serialport.go b/serialport.go
index b9a7dd46c..7b2952a29 100755
--- a/serialport.go
+++ b/serialport.go
@@ -8,7 +8,7 @@ import (
"time"
"unicode/utf8"
- log "github.com/Sirupsen/logrus"
+ log "github.com/sirupsen/logrus"
serial "go.bug.st/serial.v1"
)
diff --git a/tools/download.go b/tools/download.go
index 57ac0419d..c5fa4f31f 100644
--- a/tools/download.go
+++ b/tools/download.go
@@ -227,7 +227,7 @@ func (t *Tools) Download(pack, name, version, behaviour string) error {
// Decompress
t.Logger("Unpacking tool " + name)
- location := path.Join( dir(), pack, correctTool.Name, correctTool.Version)
+ location := path.Join(dir(), pack, correctTool.Name, correctTool.Version)
err = os.RemoveAll(location)
if err != nil {
@@ -393,7 +393,7 @@ func findBaseDir(dirList []string) string {
return commonBaseDir
}
-func extractZip(log func(msg string) , body []byte, location string) (string, error) {
+func extractZip(log func(msg string), body []byte, location string) (string, error) {
path, err := utilities.SaveFileonTempDir("tooldownloaded.zip", bytes.NewReader(body))
r, err := zip.OpenReader(path)
if err != nil {
@@ -442,7 +442,7 @@ func extractZip(log func(msg string) , body []byte, location string) (string, e
return location, nil
}
-func extractTarGz(log func(msg string),body []byte, location string) (string, error) {
+func extractTarGz(log func(msg string), body []byte, location string) (string, error) {
bodyCopy := make([]byte, len(body))
copy(bodyCopy, body)
tarFile, _ := gzip.NewReader(bytes.NewReader(body))
@@ -506,8 +506,7 @@ func extractTarGz(log func(msg string),body []byte, location string) (string, er
return location, nil
}
-
-func extractBz2(log func(msg string),body []byte, location string) (string, error) {
+func extractBz2(log func(msg string), body []byte, location string) (string, error) {
bodyCopy := make([]byte, len(body))
copy(bodyCopy, body)
tarFile := bzip2.NewReader(bytes.NewReader(body))
@@ -573,7 +572,6 @@ func extractBz2(log func(msg string),body []byte, location string) (string, err
return location, nil
}
-
func (t *Tools) installDrivers(location string) error {
OK_PRESSED := 6
extension := ".bat"
diff --git a/tools/tools.go b/tools/tools.go
index d9f18f8ae..615cabf8e 100644
--- a/tools/tools.go
+++ b/tools/tools.go
@@ -2,6 +2,7 @@ package tools
import (
"encoding/json"
+ "fmt"
"io/ioutil"
"os"
"os/user"
@@ -60,6 +61,16 @@ func (t *Tools) GetLocation(command string) (string, error) {
var location string
var ok bool
+ // Load installed
+ fmt.Println(t.installed)
+
+ err := t.readMap()
+ if err != nil {
+ return "", err
+ }
+
+ fmt.Println(t.installed)
+
// use string similarity to resolve a runtime var with a "similar" map element
if location, ok = t.installed[command]; !ok {
maxSimilarity := 0.0
diff --git a/trayicon.go b/trayicon.go
index aa8638447..c7b1d671d 100644
--- a/trayicon.go
+++ b/trayicon.go
@@ -35,11 +35,11 @@ import (
"path/filepath"
"runtime"
- log "github.com/Sirupsen/logrus"
"github.com/arduino/arduino-create-agent/icon"
"github.com/facchinm/systray"
"github.com/go-ini/ini"
"github.com/kardianos/osext"
+ log "github.com/sirupsen/logrus"
"github.com/skratchdot/open-golang/open"
"go.bug.st/serial.v1"
)
diff --git a/updater/updater.go b/updater/updater.go
index 524d2a895..d80d6ae5b 100644
--- a/updater/updater.go
+++ b/updater/updater.go
@@ -14,9 +14,9 @@ import (
"runtime"
"time"
- log "github.com/Sirupsen/logrus"
- "gopkg.in/inconshreveable/go-update.v0"
"github.com/kr/binarydist"
+ log "github.com/sirupsen/logrus"
+ "gopkg.in/inconshreveable/go-update.v0"
"github.com/kardianos/osext"
)
diff --git a/upload/upload_test.go b/upload/upload_test.go
index a6c01de2d..adf495917 100644
--- a/upload/upload_test.go
+++ b/upload/upload_test.go
@@ -5,9 +5,9 @@ import (
"strings"
"testing"
- "github.com/Sirupsen/logrus"
"github.com/arduino/arduino-create-agent/upload"
homedir "github.com/mitchellh/go-homedir"
+ "github.com/sirupsen/logrus"
)
type mockTools struct{}
diff --git a/v2/http.go b/v2/http.go
new file mode 100644
index 000000000..535faf72e
--- /dev/null
+++ b/v2/http.go
@@ -0,0 +1,66 @@
+package v2
+
+import (
+ "context"
+ "net/http"
+ "path/filepath"
+
+ docssvr "github.com/arduino/arduino-create-agent/gen/http/docs/server"
+ indexessvr "github.com/arduino/arduino-create-agent/gen/http/indexes/server"
+ toolssvr "github.com/arduino/arduino-create-agent/gen/http/tools/server"
+ indexessvc "github.com/arduino/arduino-create-agent/gen/indexes"
+ toolssvc "github.com/arduino/arduino-create-agent/gen/tools"
+ "github.com/arduino/arduino-create-agent/v2/pkgs"
+ "github.com/sirupsen/logrus"
+ goahttp "goa.design/goa/http"
+ "goa.design/goa/http/middleware"
+)
+
+func Server(home string) http.Handler {
+ mux := goahttp.NewMuxer()
+
+ // Instantiate logger
+ logger := logrus.New()
+ logger.SetLevel(logrus.DebugLevel)
+ logAdapter := LogAdapter{Logger: logger}
+
+ // Mount indexes
+ indexesSvc := pkgs.Indexes{
+ Log: logger,
+ Folder: filepath.Join(home, "indexes"),
+ }
+ indexesEndpoints := indexessvc.NewEndpoints(&indexesSvc)
+ indexesServer := indexessvr.New(indexesEndpoints, mux, goahttp.RequestDecoder,
+ goahttp.ResponseEncoder, errorHandler(logger))
+ indexessvr.Mount(mux, indexesServer)
+
+ // Mount tools
+ toolsSvc := pkgs.Tools{
+ Folder: home,
+ Indexes: &indexesSvc,
+ }
+ toolsEndpoints := toolssvc.NewEndpoints(&toolsSvc)
+ toolsServer := toolssvr.New(toolsEndpoints, mux, goahttp.RequestDecoder, goahttp.ResponseEncoder, errorHandler(logger))
+ toolssvr.Mount(mux, toolsServer)
+
+ // Mount docs
+ docssvr.New(nil, mux, goahttp.RequestDecoder, goahttp.ResponseEncoder, errorHandler(logger))
+ docssvr.Mount(mux)
+
+ // Mount middlewares
+ handler := middleware.Log(logAdapter)(mux)
+ handler = middleware.RequestID()(handler)
+
+ return handler
+}
+
+// errorHandler returns a function that writes and logs the given error.
+// The function also writes and logs the error unique ID so that it's possible
+// to correlate.
+func errorHandler(logger *logrus.Logger) func(context.Context, http.ResponseWriter, error) {
+ return func(ctx context.Context, w http.ResponseWriter, err error) {
+ id := ctx.Value(middleware.RequestIDKey).(string)
+ w.Write([]byte("[" + id + "] encoding: " + err.Error()))
+ logger.Printf("[%s] ERROR: %s", id, err.Error())
+ }
+}
diff --git a/v2/log.go b/v2/log.go
new file mode 100644
index 000000000..4f22b20e0
--- /dev/null
+++ b/v2/log.go
@@ -0,0 +1,29 @@
+package v2
+
+import (
+ "bytes"
+ "fmt"
+
+ "github.com/sirupsen/logrus"
+)
+
+type LogAdapter struct {
+ *logrus.Logger
+}
+
+func (a LogAdapter) Log(keyvals ...interface{}) error {
+ n := (len(keyvals) + 1) / 2
+ if len(keyvals)%2 != 0 {
+ keyvals = append(keyvals, "MISSING")
+ }
+ var fm bytes.Buffer
+ vals := make([]interface{}, n)
+ for i := 0; i < len(keyvals); i += 2 {
+ k := keyvals[i]
+ v := keyvals[i+1]
+ vals[i/2] = v
+ fm.WriteString(fmt.Sprintf(" %s=%%+v", k))
+ }
+ a.Logger.Printf(fm.String(), vals...)
+ return nil
+}
diff --git a/v2/pkgs/indexes.go b/v2/pkgs/indexes.go
new file mode 100644
index 000000000..032d16614
--- /dev/null
+++ b/v2/pkgs/indexes.go
@@ -0,0 +1,100 @@
+package pkgs
+
+import (
+ "context"
+ "encoding/json"
+ "io/ioutil"
+ "net/url"
+ "os"
+ "path/filepath"
+
+ "github.com/arduino/arduino-create-agent/gen/indexes"
+ "github.com/sirupsen/logrus"
+ "go.bug.st/downloader"
+)
+
+// Indexes is a client that implements github.com/arduino/arduino-create-agent/gen/indexes.Service interface
+type Indexes struct {
+ Log *logrus.Logger
+ Folder string
+}
+
+// Add downloads the index file found at the url contained in the payload, and saves it in the Indexes Folder.
+// If called with an already existing index, it overwrites the file.
+// It can fail if the payload is not defined, if it contains an invalid url.
+func (c *Indexes) Add(ctx context.Context, payload *indexes.IndexPayload) (*indexes.Operation, error) {
+ // Parse url
+ indexURL, err := url.Parse(payload.URL)
+ if err != nil {
+ return nil, indexes.MakeInvalidURL(err)
+ }
+
+ // Download tmp file
+ filename := url.PathEscape(payload.URL)
+ path := filepath.Join(c.Folder, filename+".tmp")
+ d, err := downloader.Download(path, indexURL.String())
+ if err != nil {
+ return nil, err
+ }
+ err = d.Run()
+ if err != nil {
+ return nil, err
+ }
+
+ // Move tmp file
+ err = os.Rename(path, filepath.Join(c.Folder, filename))
+ if err != nil {
+ return nil, err
+ }
+
+ return &indexes.Operation{Status: "ok"}, nil
+}
+
+// Get reads the index file from the Indexes Folder, unmarshaling it
+func (c *Indexes) Get(ctx context.Context, uri string) (index Index, err error) {
+ filename := url.PathEscape(uri)
+ path := filepath.Join(c.Folder, filename)
+ data, err := ioutil.ReadFile(path)
+ if err != nil {
+ return index, err
+ }
+
+ err = json.Unmarshal(data, &index)
+ if err != nil {
+ return index, err
+ }
+
+ return index, nil
+}
+
+// List reads from the Indexes Folder and returns the indexes that have been downloaded
+func (c *Indexes) List(context.Context) ([]string, error) {
+ // Create folder if it doesn't exist
+ _ = os.MkdirAll(c.Folder, 0755)
+ // Read files
+ files, err := ioutil.ReadDir(c.Folder)
+ if err != nil {
+ return nil, err
+ }
+
+ res := make([]string, len(files))
+ for i, file := range files {
+ path, err := url.PathUnescape(file.Name())
+ if err != nil {
+ c.Log.Warn(err)
+ }
+ res[i] = path
+ }
+
+ return res, nil
+}
+
+// Remove deletes the index file from the Indexes Folder
+func (c *Indexes) Remove(ctx context.Context, payload *indexes.IndexPayload) (*indexes.Operation, error) {
+ filename := url.PathEscape(payload.URL)
+ err := os.RemoveAll(filepath.Join(c.Folder, filename))
+ if err != nil {
+ return nil, err
+ }
+ return &indexes.Operation{Status: "ok"}, nil
+}
diff --git a/v2/pkgs/indexes_test.go b/v2/pkgs/indexes_test.go
new file mode 100644
index 000000000..ee7625216
--- /dev/null
+++ b/v2/pkgs/indexes_test.go
@@ -0,0 +1,84 @@
+package pkgs_test
+
+import (
+ "context"
+ "io/ioutil"
+ "net/http"
+ "net/http/httptest"
+ "os"
+ "strings"
+ "testing"
+
+ "github.com/arduino/arduino-create-agent/gen/indexes"
+ "github.com/arduino/arduino-create-agent/v2/pkgs"
+)
+
+// TestIndexes performs a series of operations about indexes, ensuring it behaves as expected.
+func TestIndexes(t *testing.T) {
+ // Use local file as index
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ http.ServeFile(w, r, "testdata/package_index.json")
+ }))
+ defer ts.Close()
+
+ // Initialize indexes with a temp folder
+ tmp, err := ioutil.TempDir("", "")
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer os.RemoveAll(tmp)
+
+ service := pkgs.Indexes{
+ Folder: tmp,
+ }
+
+ ctx := context.Background()
+
+ // List indexes, they should be 0
+ list, err := service.List(ctx)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(list) != 0 {
+ t.Fatalf("expected %d == %d (%s)", len(list), 0, "len(list)")
+ }
+
+ // Add a faulty index
+ _, err = service.Add(ctx, &indexes.IndexPayload{URL: ":"})
+ if err == nil || !strings.Contains(err.Error(), "parse :: missing protocol scheme") {
+ t.Fatalf("expected '%v' == '%v' (%s)", err, "parse :: missing protocol scheme", "err")
+ }
+
+ // Add a new index
+ _, err = service.Add(ctx, &indexes.IndexPayload{URL: ts.URL})
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ // List indexes, they should be 1
+ list, err = service.List(ctx)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(list) != 1 {
+ t.Fatalf("expected %d == %d (%s)", len(list), 1, "len(list)")
+ }
+ if list[0] != ts.URL {
+ t.Fatalf("expected %s == %s (%s)", list[0], "downloads.arduino.cc/packages/package_index.json", "list[0]")
+ }
+
+ // Remove the index
+ _, err = service.Remove(ctx, &indexes.IndexPayload{URL: ts.URL})
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ // List indexes, they should be 0
+ list, err = service.List(ctx)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(list) != 0 {
+ t.Fatalf("expected %d == %d (%s)", len(list), 0, "len(list)")
+ }
+}
diff --git a/v2/pkgs/pkgs.go b/v2/pkgs/pkgs.go
new file mode 100644
index 000000000..271264bf8
--- /dev/null
+++ b/v2/pkgs/pkgs.go
@@ -0,0 +1,29 @@
+// Package pkgs implements the functions from
+// github.com/arduino-create-agent/gen/indexes
+// and github.com/arduino-create-agent/gen/tools.
+//
+// It allows to manage package indexes from arduino
+// cores, and to download tools used for upload.
+package pkgs
+
+// Index is the go representation of a typical
+// package-index file, stripped from every non-used field.
+type Index struct {
+ Packages []struct {
+ Name string `json:"name"`
+ Tools []Tool `json:"tools"`
+ } `json:"packages"`
+}
+
+// Tool is the go representation of the info about a
+//tool contained in a package-index file, stripped from
+//every non-used field.
+type Tool struct {
+ Name string `json:"name"`
+ Version string `json:"version"`
+ Systems []struct {
+ Host string `json:"host"`
+ URL string `json:"url"`
+ Checksum string `json:"checksum"`
+ } `json:"systems"`
+}
diff --git a/v2/pkgs/testdata/package_index.json b/v2/pkgs/testdata/package_index.json
new file mode 100644
index 000000000..d756ca513
--- /dev/null
+++ b/v2/pkgs/testdata/package_index.json
@@ -0,0 +1,5910 @@
+{
+ "packages": [
+ {
+ "name": "arduino",
+ "maintainer": "Arduino",
+ "websiteURL": "http://www.arduino.cc/",
+ "email": "packages@arduino.cc",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "platforms": [
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.2",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.2.tar.bz2",
+ "archiveFileName": "avr-1.6.2.tar.bz2",
+ "checksum": "SHA-256:2909a4c6dd6d7497e7e1b5fcaa2f66a100271417510f3a68593b65af8ff78c1c",
+ "size": "4877442",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino Uno"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "4.8.1-arduino2"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.0.1-arduino2"
+ }
+ ]
+ },
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.3",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.3.tar.bz2",
+ "archiveFileName": "avr-1.6.3.tar.bz2",
+ "checksum": "SHA-256:c30033ba70cbb2d46ee0901a331b0f83be082f9110eda0464b624fdbb51b3c7b",
+ "size": "4876816",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino Uno"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "4.8.1-arduino3"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.0.1-arduino3"
+ }
+ ]
+ },
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.4",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.4.tar.bz2",
+ "archiveFileName": "avr-1.6.4.tar.bz2",
+ "checksum": "SHA-256:8a243410aeded6dbcbc4b134ba10be5c2562d137bfcf3ac97abdc5844933b363",
+ "size": "4780884",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino Uno"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "4.8.1-arduino5"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.0.1-arduino5"
+ }
+ ]
+ },
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.5",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.5.tar.bz2",
+ "archiveFileName": "avr-1.6.5.tar.bz2",
+ "checksum": "SHA-256:c72d890aa605add677634c6b25ebc3b2ed9e44c38805b95c47eab17a1ca72db6",
+ "size": "4876957",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino Uno"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "4.8.1-arduino5"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.0.1-arduino5"
+ }
+ ]
+ },
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.6",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.6.tar.bz2",
+ "archiveFileName": "avr-1.6.6.tar.bz2",
+ "checksum": "SHA-256:08ad5db4978ebea22344edc5d77dce0923d8a644da7a14dc8072e883c76058d8",
+ "size": "4876916",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino Uno"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "4.8.1-arduino5"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.0.1-arduino5"
+ }
+ ]
+ },
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.7",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.7.tar.bz2",
+ "archiveFileName": "avr-1.6.7.tar.bz2",
+ "checksum": "SHA-256:e112992c59c0d826db9a63967679854a96b3c7b970f788a03834e211f6390caa",
+ "size": "4904283",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino Uno"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"},
+ {"name": "Arduino Gemma"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "4.8.1-arduino5"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.0.1-arduino5"
+ }
+ ]
+ },
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.8",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.8.tar.bz2",
+ "archiveFileName": "avr-1.6.8.tar.bz2",
+ "checksum": "SHA-256:0e0775347baf0d93ef344bcd851e7d8d0af2bda7916c119fa3a4ff9b984c575b",
+ "size": "4885536",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino/Genuino Uno"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino/Genuino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino/Genuino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"},
+ {"name": "Arduino Gemma"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "4.8.1-arduino5"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.0.1-arduino5"
+ }
+ ]
+ },
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.9",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.9.tar.bz2",
+ "archiveFileName": "avr-1.6.9.tar.bz2",
+ "checksum": "SHA-256:eb8bc661162bc689b3ed02c4d24f6ff964e91ace890b7d3db2e316c84ba235f0",
+ "size": "4890702",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino/Genuino Uno"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino/Genuino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino/Genuino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"},
+ {"name": "Arduino Gemma"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "4.8.1-arduino5"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.0.1-arduino5"
+ }
+ ]
+ },
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.10",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.10.tar.bz2",
+ "archiveFileName": "avr-1.6.10.tar.bz2",
+ "checksum": "SHA-256:92cad999335198f875bdaec5de0169991bee19e0058c623d2cdd0835bb26eeaa",
+ "size": "4991331",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino/Genuino Uno"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino/Genuino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino/Genuino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"},
+ {"name": "Arduino Gemma"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "4.8.1-arduino5"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.0.1-arduino5"
+ }
+ ]
+ },
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.11",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.11.tar.bz2",
+ "archiveFileName": "avr-1.6.11.tar.bz2",
+ "checksum": "SHA-256:5292d3559de74eb990ff570df43887b44d4e3c6b80ab4ab0f945e94dd68d8210",
+ "size": "4991634",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino/Genuino Uno"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino/Genuino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino/Genuino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"},
+ {"name": "Arduino Gemma"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "4.8.1-arduino5"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.0.1-arduino5"
+ }
+ ]
+ },
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.12",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.12.tar.bz2",
+ "archiveFileName": "avr-1.6.12.tar.bz2",
+ "checksum": "SHA-256:4a296a3304a51d820e680e2022fb9b9f5706144abf1e5d60c7876a0f9fc7e6d9",
+ "size": "4993586",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino/Genuino Uno"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino/Genuino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino/Genuino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"},
+ {"name": "Arduino Gemma"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "4.9.2-atmel3.5.3-arduino2"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.3.0-arduino2"
+ }
+ ]
+ },
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.13",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.13.tar.bz2",
+ "archiveFileName": "avr-1.6.13.tar.bz2",
+ "checksum": "SHA-256:617f458dd3507072b9a6f9fdc78888c66aa420a2fb081c4c1556598a2d69d643",
+ "size": "4993644",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino/Genuino Uno"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino/Genuino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino/Genuino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"},
+ {"name": "Arduino Gemma"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "4.9.2-atmel3.5.3-arduino2"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.0.1-arduino5"
+ }
+ ]
+ },
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.14",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.14.tar.bz2",
+ "archiveFileName": "avr-1.6.14.tar.bz2",
+ "checksum": "SHA-256:a5f7d66c83e3d4722f4c8dab42bdbcf5af98c611ffca1802ef3d1957b894bd92",
+ "size": "4993455",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino/Genuino Uno"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino/Genuino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino/Genuino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"},
+ {"name": "Arduino Gemma"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "4.9.2-atmel3.5.3-arduino2"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.3.0-arduino6"
+ }
+ ]
+ },
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.15",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.15.tar.bz2",
+ "archiveFileName": "avr-1.6.15.tar.bz2",
+ "checksum": "SHA-256:c222efcabbdf6fa63ba3d64afb8d149f83fa73693c413aa05a2ccd9f986ed2fa",
+ "size": "4993959",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino/Genuino Uno"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino/Genuino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino/Genuino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"},
+ {"name": "Arduino Gemma"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "4.9.2-atmel3.5.3-arduino2"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.3.0-arduino6"
+ }
+ ]
+ },
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.16",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.16.tar.bz2",
+ "archiveFileName": "avr-1.6.16.tar.bz2",
+ "checksum": "SHA-256:2e2e7a7b8ec321b62c54237e8cb03384fc434f3692c47a524e23e61df868ed9a",
+ "size": "5011829",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino/Genuino Uno"},
+ {"name": "Arduino Uno WiFi"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino/Genuino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino Leonardo Ethernet"},
+ {"name": "Arduino/Genuino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"},
+ {"name": "Arduino Gemma"},
+ {"name": "Adafruit Circuit Playground"},
+ {"name": "Arduino Yún Mini"},
+ {"name": "Arduino Industrial 101"},
+ {"name": "Linino One"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "4.9.2-atmel3.5.3-arduino2"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.3.0-arduino8"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.0.0"
+ }
+ ]
+ },
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.17",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.17.tar.bz2",
+ "archiveFileName": "avr-1.6.17.tar.bz2",
+ "checksum": "SHA-256:ecf63bb02d8f647f720be64b4bf235e0ca37b3180b90d816cbff1886f6744833",
+ "size": "5011912",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino/Genuino Uno"},
+ {"name": "Arduino Uno WiFi"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino/Genuino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino Leonardo Ethernet"},
+ {"name": "Arduino/Genuino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"},
+ {"name": "Arduino Gemma"},
+ {"name": "Adafruit Circuit Playground"},
+ {"name": "Arduino Yún Mini"},
+ {"name": "Arduino Industrial 101"},
+ {"name": "Linino One"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "4.9.2-atmel3.5.3-arduino2"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.3.0-arduino8"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.0.0"
+ }
+ ]
+ },
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.18",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.18.tar.bz2",
+ "archiveFileName": "avr-1.6.18.tar.bz2",
+ "checksum": "SHA-256:7c56e381602b779e8e81ec46deb5c8b82e1c84c134c3c1b672d0b43f5e45ee13",
+ "size": "4897659",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino/Genuino Uno"},
+ {"name": "Arduino Uno WiFi"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino/Genuino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino Leonardo Ethernet"},
+ {"name": "Arduino/Genuino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"},
+ {"name": "Arduino Gemma"},
+ {"name": "Adafruit Circuit Playground"},
+ {"name": "Arduino Yún Mini"},
+ {"name": "Arduino Industrial 101"},
+ {"name": "Linino One"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "4.9.2-atmel3.5.4-arduino2"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.3.0-arduino9"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.1.1"
+ }
+ ]
+ },
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.19",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.19.tar.bz2",
+ "archiveFileName": "avr-1.6.19.tar.bz2",
+ "checksum": "SHA-256:1c544db39f36a8468f585d6338343718c2892526227fa04805339084dd0ab4ff",
+ "size": "4959420",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino/Genuino Uno"},
+ {"name": "Arduino Uno WiFi"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino/Genuino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino Leonardo Ethernet"},
+ {"name": "Arduino/Genuino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"},
+ {"name": "Arduino Gemma"},
+ {"name": "Adafruit Circuit Playground"},
+ {"name": "Arduino Yún Mini"},
+ {"name": "Arduino Industrial 101"},
+ {"name": "Linino One"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "4.9.2-atmel3.5.4-arduino2"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.3.0-arduino9"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.1.1"
+ }
+ ]
+ },
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.20",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.20.tar.bz2",
+ "archiveFileName": "avr-1.6.20.tar.bz2",
+ "checksum": "SHA-256:61f3d59a2ab2e9191230e91e79ee91c05f32b32c33129d34d76ef87e56d257e1",
+ "size": "4897949",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino/Genuino Uno"},
+ {"name": "Arduino Uno WiFi"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino/Genuino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino Leonardo Ethernet"},
+ {"name": "Arduino/Genuino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"},
+ {"name": "Arduino Gemma"},
+ {"name": "Adafruit Circuit Playground"},
+ {"name": "Arduino Yún Mini"},
+ {"name": "Arduino Industrial 101"},
+ {"name": "Linino One"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "4.9.2-atmel3.5.4-arduino2"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.3.0-arduino9"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.1.1"
+ }
+ ]
+ },
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.21",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.21.tar.bz2",
+ "archiveFileName": "avr-1.6.21.tar.bz2",
+ "checksum": "SHA-256:7e5440b0902733f82956c89e554106f25c2ad540ac36286ccba3ceac785bcbba",
+ "size": "4897460",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino/Genuino Uno"},
+ {"name": "Arduino Uno WiFi"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino/Genuino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino Leonardo Ethernet"},
+ {"name": "Arduino/Genuino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"},
+ {"name": "Arduino Gemma"},
+ {"name": "Adafruit Circuit Playground"},
+ {"name": "Arduino Yún Mini"},
+ {"name": "Arduino Industrial 101"},
+ {"name": "Linino One"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "4.9.2-atmel3.5.4-arduino2"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.3.0-arduino9"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.1.1"
+ }
+ ]
+ },
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.22",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.22.tar.bz2",
+ "archiveFileName": "avr-1.6.22.tar.bz2",
+ "checksum": "SHA-256:e6399b2a687dd8485cbbfcbfbbd4f8e2e15fe535bbcfd1d81bf93d01e31f7d76",
+ "size": "4907422",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino/Genuino Uno"},
+ {"name": "Arduino Uno WiFi"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino/Genuino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino Leonardo Ethernet"},
+ {"name": "Arduino/Genuino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"},
+ {"name": "Arduino Gemma"},
+ {"name": "Adafruit Circuit Playground"},
+ {"name": "Arduino Yún Mini"},
+ {"name": "Arduino Industrial 101"},
+ {"name": "Linino One"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "5.4.0-atmel3.6.1-arduino2"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.3.0-arduino14"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.2.1"
+ }
+ ]
+ },
+ {
+ "name": "Arduino AVR Boards",
+ "architecture": "avr",
+ "version": "1.6.23",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/avr-1.6.23.tar.bz2",
+ "archiveFileName": "avr-1.6.23.tar.bz2",
+ "checksum": "SHA-256:18618d7f256f26cd77c35f4c888d5d1b2334f07925094fdc99ac3188722284aa",
+ "size": "5001988",
+ "boards": [
+ {"name": "Arduino Yún"},
+ {"name": "Arduino/Genuino Uno"},
+ {"name": "Arduino Uno WiFi"},
+ {"name": "Arduino Diecimila"},
+ {"name": "Arduino Nano"},
+ {"name": "Arduino/Genuino Mega"},
+ {"name": "Arduino MegaADK"},
+ {"name": "Arduino Leonardo"},
+ {"name": "Arduino Leonardo Ethernet"},
+ {"name": "Arduino/Genuino Micro"},
+ {"name": "Arduino Esplora"},
+ {"name": "Arduino Mini"},
+ {"name": "Arduino Ethernet"},
+ {"name": "Arduino Fio"},
+ {"name": "Arduino BT"},
+ {"name": "Arduino LilyPadUSB"},
+ {"name": "Arduino Lilypad"},
+ {"name": "Arduino Pro"},
+ {"name": "Arduino ATMegaNG"},
+ {"name": "Arduino Robot Control"},
+ {"name": "Arduino Robot Motor"},
+ {"name": "Arduino Gemma"},
+ {"name": "Adafruit Circuit Playground"},
+ {"name": "Arduino Yún Mini"},
+ {"name": "Arduino Industrial 101"},
+ {"name": "Linino One"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "5.4.0-atmel3.6.1-arduino2"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.3.0-arduino14"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.2.1"
+ }
+ ]
+ },
+ {
+ "name": "Arduino megaAVR Boards",
+ "architecture": "megaavr",
+ "version": "1.6.23",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/megaavr-1.6.23.tar.bz2",
+ "archiveFileName": "megaavr-1.6.23.tar.bz2",
+ "checksum": "SHA-256:fd19ed3a52f6de289f46c9b5f65e19bc5c3df95358f18693cfaf25bdfb86f43c",
+ "size": "717681",
+ "boards": [
+ {
+ "name": "Arduino Uno WiFi Rev2"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "5.4.0-atmel3.6.1-arduino2"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.3.0-arduino14"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.2.1"
+ }
+ ]
+ },
+ {
+ "name": "Arduino megaAVR Boards",
+ "architecture": "megaavr",
+ "version": "1.6.24",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/megaavr-1.6.24.tar.bz2",
+ "archiveFileName": "megaavr-1.6.24.tar.bz2",
+ "checksum": "SHA-256:0910af1d6eb44b65fee77c81a7a83ee420e6a7c1622f90754e91a5127d317a1d",
+ "size": "709516",
+ "boards": [
+ {
+ "name": "Arduino Uno WiFi Rev2"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "5.4.0-atmel3.6.1-arduino2"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.3.0-arduino14"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.2.1"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAM Boards (32-bits ARM Cortex-M3)",
+ "architecture": "sam",
+ "version": "1.6.2",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/sam-1.6.2.tar.bz2",
+ "archiveFileName": "sam-1.6.2.tar.bz2",
+ "checksum": "SHA-256:2d3c8a90bc214947cff1b816d0c2706441398efc78af7984d5250f2e50eddd5f",
+ "size": "16174730",
+ "boards": [
+ {"name": "Arduino Due"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.3-arduino"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAM Boards (32-bits ARM Cortex-M3)",
+ "architecture": "sam",
+ "version": "1.6.3",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/sam-1.6.3.tar.bz2",
+ "archiveFileName": "sam-1.6.3.tar.bz2",
+ "checksum": "SHA-256:0a6e1d5542790e38ba454c796aabbd0e48b07635a5b4d8adc044a4eba959ca27",
+ "size": "16174017",
+ "boards": [
+ {"name": "Arduino Due"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.3-arduino"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAM Boards (32-bits ARM Cortex-M3)",
+ "architecture": "sam",
+ "version": "1.6.4",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/sam-1.6.4.tar.bz2",
+ "archiveFileName": "sam-1.6.4.tar.bz2",
+ "checksum": "SHA-256:e0dc94d8ad0756b79838e99ad7409b08b07e40ed667ebe86eae11644ef7bec0d",
+ "size": "16174992",
+ "boards": [
+ {"name": "Arduino Due"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.3-arduino"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAM Boards (32-bits ARM Cortex-M3)",
+ "architecture": "sam",
+ "version": "1.6.5",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/sam-1.6.5.tar.bz2",
+ "archiveFileName": "sam-1.6.5.tar.bz2",
+ "checksum": "SHA-256:1bde19e3af3887dbc3afda46754e1136fb5a3bc535b8de9862ad7753d93902b7",
+ "size": "16529130",
+ "boards": [
+ {"name": "Arduino Due"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.6.1-arduino"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAM Boards (32-bits ARM Cortex-M3)",
+ "architecture": "sam",
+ "version": "1.6.6",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/sam-1.6.6.tar.bz2",
+ "archiveFileName": "sam-1.6.6.tar.bz2",
+ "checksum": "SHA-256:7694b698bb24cd87d7d0b2b4caa09dc6039591868e735b0bf385299aea24c3e4",
+ "size": "16528101",
+ "boards": [
+ {"name": "Arduino Due"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.6.1-arduino"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAM Boards (32-bits ARM Cortex-M3)",
+ "architecture": "sam",
+ "version": "1.6.7",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/sam-1.6.7.tar.bz2",
+ "archiveFileName": "sam-1.6.7.tar.bz2",
+ "checksum": "SHA-256:62d8bde9bf92502aeeaaf5031ed841e4e4c241fe66fc8cb23625349e6129a1ea",
+ "size": "16473479",
+ "boards": [
+ {"name": "Arduino Due"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.6.1-arduino"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAM Boards (32-bits ARM Cortex-M3)",
+ "architecture": "sam",
+ "version": "1.6.8",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/sam-1.6.8.tar.bz2",
+ "archiveFileName": "sam-1.6.8.tar.bz2",
+ "checksum": "SHA-256:cc95b49b7dfe4138687a559d963db09b9b85352051173d93ad8c272aa23c2337",
+ "size": "16471779",
+ "boards": [
+ {"name": "Arduino Due"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.6.1-arduino"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAM Boards (32-bits ARM Cortex-M3)",
+ "architecture": "sam",
+ "version": "1.6.9",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/sam-1.6.9.tar.bz2",
+ "archiveFileName": "sam-1.6.9.tar.bz2",
+ "checksum": "SHA-256:a5eaf60b5845599c24229456d6f5cd2254a275c8b34cac9d5330e4cfab6b0aa7",
+ "size": "16473503",
+ "boards": [
+ {"name": "Arduino Due"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.6.1-arduino"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAM Boards (32-bits ARM Cortex-M3)",
+ "architecture": "sam",
+ "version": "1.6.10",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/sam-1.6.10.tar.bz2",
+ "archiveFileName": "sam-1.6.10.tar.bz2",
+ "checksum": "SHA-256:c53afc342c4017a4f67b96826ace41653f795f4a82e648eb9a190ad995388906",
+ "size": "16474738",
+ "boards": [
+ {"name": "Arduino Due"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.6.1-arduino"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.0",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.0.tar.bz2",
+ "archiveFileName": "samd-1.6.0.tar.bz2",
+ "checksum": "SHA-256:c1d5118ea1bf03c71203de4eafa90dedf265ceb44c0e3712f5a1eba1b12321b0",
+ "size": "177135",
+ "boards": [
+ {
+ "name": "Arduino Zero"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.5-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.0.0-atmel"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAM Boards (32-bits ARM Cortex-M3)",
+ "architecture": "sam",
+ "version": "1.6.11",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/sam-1.6.11.tar.bz2",
+ "archiveFileName": "sam-1.6.11.tar.bz2",
+ "checksum": "SHA-256:fb8e275f39622a5574a11cef85be3ed36a6995c38a19b20de6fb48e9c7f88b70",
+ "size": "16474757",
+ "boards": [
+ {"name": "Arduino Due"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.6.1-arduino"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.0",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.0.tar.bz2",
+ "archiveFileName": "samd-1.6.0.tar.bz2",
+ "checksum": "SHA-256:c1d5118ea1bf03c71203de4eafa90dedf265ceb44c0e3712f5a1eba1b12321b0",
+ "size": "177135",
+ "boards": [
+ {
+ "name": "Arduino Zero"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.5-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.0.0-atmel"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.1",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.1.tar.bz2",
+ "archiveFileName": "samd-1.6.1.tar.bz2",
+ "checksum": "SHA-256:dddd0f906a7c470a1dc7ef5b024e34c24b915020fabb5903b7313246955514a2",
+ "size": "178354",
+ "boards": [
+ {
+ "name": "Arduino Zero"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.6-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.0.0-atmel"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.2",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.2.tar.bz2",
+ "archiveFileName": "samd-1.6.2.tar.bz2",
+ "checksum": "SHA-256:fad804d8fbd2f2b5f0e876708f953b9afd28513d794c6a973825582f1d6bacde",
+ "size": "179578",
+ "boards": [
+ {
+ "name": "Arduino/Genuino Zero"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.6.1-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.0.0-atmel"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.3",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.3.tar.bz2",
+ "archiveFileName": "samd-1.6.3.tar.bz2",
+ "checksum": "SHA-256:f3fb17d593cdf5986c201fe3639d71cc1149b43e996a3afa5533c3b4b0c5a532",
+ "size": "978163",
+ "boards": [
+ { "name": "Arduino/Genuino Zero" },
+ { "name": "Arduino/Genuino MKR1000" }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.6.1-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.0.0-atmel"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.4",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.4.tar.bz2",
+ "archiveFileName": "samd-1.6.4.tar.bz2",
+ "checksum": "SHA-256:02c788e0ae786cabf31c5c27b64d60906d4038f56ee2df689022e32525be7b70",
+ "size": "977481",
+ "boards": [
+ { "name": "Arduino/Genuino Zero" }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.6.1-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.0.0-atmel"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.5",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.5.tar.bz2",
+ "archiveFileName": "samd-1.6.5.tar.bz2",
+ "checksum": "SHA-256:3f3d70b4081b11ab07fc209702604f2ac3a52bb193b7f0d9fa5bb31071b2b34c",
+ "size": "979548",
+ "boards": [
+ { "name": "Arduino/Genuino Zero" },
+ { "name": "Arduino/Genuino MKR1000" }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.6.1-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.0.0-atmel"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.6",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.6.tar.bz2",
+ "archiveFileName": "samd-1.6.6.tar.bz2",
+ "checksum": "SHA-256:08e97127acf41ac28c195803be1d22a5cbf720d4cff58db9458af9255d6ee7b3",
+ "size": "997331",
+ "boards": [
+ { "name": "Arduino/Genuino Zero" },
+ { "name": "Arduino/Genuino MKR1000" }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.6.1-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.0.0-atmel"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.7",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.7.tar.bz2",
+ "archiveFileName": "samd-1.6.7.tar.bz2",
+ "checksum": "SHA-256:7342d0d127b1facdfbe8801759368bb0e6939b8e9a7db86eefb23135e3c701a8",
+ "size": "841503",
+ "boards": [
+ { "name": "Arduino/Genuino Zero" },
+ { "name": "Arduino/Genuino MKR1000" }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.7.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.0.0-atmel"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.8",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.8.tar.bz2",
+ "archiveFileName": "samd-1.6.8.tar.bz2",
+ "checksum": "SHA-256:05bff70561e5f10e86d1d890f647614aac989c4b2c384fc1b568e7d8c96b8267",
+ "size": "841801",
+ "boards": [
+ { "name": "Arduino/Genuino Zero" },
+ { "name": "Arduino/Genuino MKR1000" }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.7.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.0.0-atmel"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.9",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.9.tar.bz2",
+ "archiveFileName": "samd-1.6.9.tar.bz2",
+ "checksum": "SHA-256:404f64575182e3a21fbf38a729d56d50782754c1d0c5bfe17d4c9155dd84d3d2",
+ "size": "855221",
+ "boards": [
+ { "name": "Arduino/Genuino Zero" },
+ { "name": "Arduino/Genuino MKR1000" },
+ { "name": "Arduino MKRZero" }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.7.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.5.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS-Atmel",
+ "version": "1.0.0"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.10",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.10.tar.bz2",
+ "archiveFileName": "samd-1.6.10.tar.bz2",
+ "checksum": "SHA-256:b2b47e12ae032e7929d33b13b42033fca15de7ef0d5ea90dab68ebf65e6b4f6b",
+ "size": "2765368",
+ "boards": [
+ { "name": "Arduino/Genuino Zero" },
+ { "name": "Arduino/Genuino MKR1000" },
+ { "name": "Arduino MKRZero" },
+ { "name": "Arduino M0" },
+ { "name": "Arduino M0 Pro" },
+ { "name": "Arduino Tian" },
+ { "name": "Adafruit Circuit Playground Express" }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.7.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.3.0-arduino8"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.5.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS-Atmel",
+ "version": "1.0.0"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.11",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.11.tar.bz2",
+ "archiveFileName": "samd-1.6.11.tar.bz2",
+ "checksum": "SHA-256:3ace3a7850e9052f51b819dbf5c09ea487f8bbbdf3c28d5d4709137b18f12bb7",
+ "size": "2764312",
+ "boards": [
+ { "name": "Arduino/Genuino Zero" },
+ { "name": "Arduino/Genuino MKR1000" },
+ { "name": "Arduino MKRZero" },
+ { "name": "Arduino M0" },
+ { "name": "Arduino M0 Pro" },
+ { "name": "Adafruit Circuit Playground Express" }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.7.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.3.0-arduino8"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.5.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS-Atmel",
+ "version": "1.0.0"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.12",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.12.tar.bz2",
+ "archiveFileName": "samd-1.6.12.tar.bz2",
+ "checksum": "SHA-256:18803709c1024c2fb122430949df06ee316d7971552ef81c430cba14be673f24",
+ "size": "2754767",
+ "boards": [
+ { "name": "Arduino/Genuino Zero" },
+ { "name": "Arduino/Genuino MKR1000" },
+ { "name": "Arduino MKRZero" },
+ { "name": "Arduino M0 Pro" },
+ { "name": "Arduino M0" },
+ { "name": "Arduino Tian" },
+ { "name": "Adafruti Circuit Playground Express" }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.7.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino5-static"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.5.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS-Atmel",
+ "version": "1.1.0"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.13",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.13.tar.bz2",
+ "archiveFileName": "samd-1.6.13.tar.bz2",
+ "checksum": "SHA-256:217f4e0f3c5f2c1a0d74d1b78559aa6f598aeae33ad7fd06ec79eb7294552db2",
+ "size": "2777384",
+ "boards": [
+ { "name": "Arduino/Genuino Zero" },
+ { "name": "Arduino/Genuino MKR1000" },
+ { "name": "Arduino MKRZero" },
+ { "name": "Arduino MKRFox1200" },
+ { "name": "Arduino M0 Pro" },
+ { "name": "Arduino M0" },
+ { "name": "Arduino Tian" },
+ { "name": "Adafruit Circuit Playground Express" }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.7.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino6-static"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.5.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS-Atmel",
+ "version": "1.1.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.2.0"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.14",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.14.tar.bz2",
+ "archiveFileName": "samd-1.6.14.tar.bz2",
+ "checksum": "SHA-256:bb5f9c69530906cba51641a94e3e195deafafe9d6673c90eda544535f02bb7bf",
+ "size": "2746416",
+ "boards": [
+ { "name": "Arduino/Genuino Zero" },
+ { "name": "Arduino/Genuino MKR1000" },
+ { "name": "Arduino MKRZero" },
+ { "name": "Arduino MKRFox1200" },
+ { "name": "Arduino M0 Pro" },
+ { "name": "Arduino M0" },
+ { "name": "Arduino Tian" },
+ { "name": "Adafruit Circuit Playground Express" }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.7.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino6-static"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.5.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS-Atmel",
+ "version": "1.1.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.2.0"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.15",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.15.tar.bz2",
+ "archiveFileName": "samd-1.6.15.tar.bz2",
+ "checksum": "SHA-256:938b0528dddb66ebb3564dc0b6d01206498d2a1abea90a269ce59ee9dae2753d",
+ "size": "2795099",
+ "boards": [
+ { "name": "Arduino/Genuino Zero" },
+ { "name": "Arduino/Genuino MKR1000" },
+ { "name": "Arduino MKRZero" },
+ { "name": "Arduino MKRFox1200" },
+ { "name": "Arduino M0 Pro" },
+ { "name": "Arduino M0" },
+ { "name": "Arduino Tian" },
+ { "name": "Adafruit Circuit Playground Express" }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.7.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino6-static"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.5.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS-Atmel",
+ "version": "1.1.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.2.0"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.16",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.16.tar.bz2",
+ "archiveFileName": "samd-1.6.16.tar.bz2",
+ "checksum": "SHA-256:d44e32b07642f6dc5e480068f52966113baa912f3b5fa6d39cfa4112c4a25e53",
+ "size": "2796651",
+ "boards": [
+ { "name": "Arduino/Genuino Zero" },
+ { "name": "Arduino/Genuino MKR1000" },
+ { "name": "Arduino MKRZero" },
+ { "name": "Arduino MKRFox1200" },
+ { "name": "Arduino M0 Pro" },
+ { "name": "Arduino M0" },
+ { "name": "Arduino Tian" },
+ { "name": "Adafruit Circuit Playground Express" }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.7.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino6-static"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.5.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS-Atmel",
+ "version": "1.1.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.2.0"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.17",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.17.tar.bz2",
+ "archiveFileName": "samd-1.6.17.tar.bz2",
+ "checksum": "SHA-256:2c691c23cdf9974526ff33f19b7b060b5c03b7cb4708f61f8eb2629666c3c278",
+ "size": "2849852",
+ "boards": [
+ { "name": "Arduino/Genuino Zero" },
+ { "name": "Arduino/Genuino MKR1000" },
+ { "name": "Arduino MKRZERO" },
+ { "name": "Arduino MKR FOX 1200" },
+ { "name": "Arduino MKR WAN 1300" },
+ { "name": "Arduino MKR GSM 1400" },
+ { "name": "Arduino M0 Pro" },
+ { "name": "Arduino M0" },
+ { "name": "Arduino Tian" },
+ { "name": "Adafruit Circuit Playground Express" }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.7.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino6-static"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.5.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS-Atmel",
+ "version": "1.1.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.2.0"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.18",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.18.tar.bz2",
+ "archiveFileName": "samd-1.6.18.tar.bz2",
+ "checksum": "SHA-256:adbc464106abcfb031aeaa8c73ce09340f5aec8805e83bbe6f7a7da974321fa7",
+ "size": "2808624",
+ "boards": [
+ { "name": "Arduino/Genuino Zero" },
+ { "name": "Arduino/Genuino MKR1000" },
+ { "name": "Arduino MKRZERO" },
+ { "name": "Arduino MKR FOX 1200" },
+ { "name": "Arduino MKR WAN 1300" },
+ { "name": "Arduino MKR GSM 1400" },
+ { "name": "Arduino M0 Pro" },
+ { "name": "Arduino M0" },
+ { "name": "Arduino Tian" },
+ { "name": "Adafruit Circuit Playground Express" }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.7.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino6-static"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.5.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS-Atmel",
+ "version": "1.1.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.2.0"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.19",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.19.tar.bz2",
+ "archiveFileName": "samd-1.6.19.tar.bz2",
+ "checksum": "SHA-256:959c10af4413455edf914ad426e18d7f2fbf0a38ce5e9d44232cc95b3d0a8a02",
+ "size": "2844050",
+ "boards": [
+ { "name": "Arduino MKR WiFi 1010" },
+ { "name": "Arduino/Genuino Zero" },
+ { "name": "Arduino/Genuino MKR1000" },
+ { "name": "Arduino MKRZERO" },
+ { "name": "Arduino MKR FOX 1200" },
+ { "name": "Arduino MKR WAN 1300" },
+ { "name": "Arduino MKR GSM 1400" },
+ { "name": "Arduino M0 Pro" },
+ { "name": "Arduino M0" },
+ { "name": "Arduino Tian" },
+ { "name": "Adafruit Circuit Playground Express" }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.7.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino6-static"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.5.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS-Atmel",
+ "version": "1.1.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.2.0"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.6.20",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/cores/samd-1.6.20.tar.bz2",
+ "archiveFileName": "samd-1.6.20.tar.bz2",
+ "checksum": "SHA-256:65a07751d858c7096d4ed9e842768530d41f84c30e26baa398c82e6ad92649bf",
+ "size": "2854887",
+ "boards": [
+ { "name": "Arduino MKR WiFi 1010" },
+ { "name": "Arduino/Genuino Zero" },
+ { "name": "Arduino/Genuino MKR1000" },
+ { "name": "Arduino MKRZERO" },
+ { "name": "Arduino MKR FOX 1200" },
+ { "name": "Arduino MKR WAN 1300" },
+ { "name": "Arduino MKR GSM 1400" },
+ { "name": "Arduino MKR NB 1500" },
+ { "name": "Arduino M0 Pro" },
+ { "name": "Arduino M0" },
+ { "name": "Arduino Tian" },
+ { "name": "Adafruit Circuit Playground Express" }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.7.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino6-static"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.5.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS-Atmel",
+ "version": "1.1.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.2.0"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Beta Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd_beta",
+ "version": "1.6.22",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/samd-beta-1.6.22.tar.bz2",
+ "archiveFileName": "samd-beta-1.6.22.tar.bz2",
+ "checksum": "SHA-256:f9be59deae12889c79900774b6b236da0aa3b8ecc615f91d1cf061b3c63c4531",
+ "size": "2880064",
+ "boards": [
+ {
+ "name": "Arduino MKR Vidor 4000"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "7-2017q4"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.7.0-arduino3"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.10.0-arduino7"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.5.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS-Atmel",
+ "version": "1.2.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.2.0"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Beta Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd_beta",
+ "version": "1.6.23",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/samd-beta-1.6.23.tar.gz",
+ "archiveFileName": "samd-beta-1.6.23.tar.gz",
+ "checksum": "SHA-256:5ff9b77ef8044613604d5e915e8e2e69362241b489c1c2bf06f008a5150e1162",
+ "size": "3634633",
+ "boards": [
+ {
+ "name": "Arduino MKR Vidor 4000"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "7-2017q4"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.7.0-arduino3"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.10.0-arduino7"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.5.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS-Atmel",
+ "version": "1.2.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.2.0"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Beta Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd_beta",
+ "version": "1.6.24",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/samd-beta-1.6.24.tar.bz2",
+ "archiveFileName": "samd-beta-1.6.24.tar.bz2",
+ "checksum": "SHA-256:922c42decf1088aaaef473f26b9630ebda63cf2ec4db52a39c3eadef45cb7e4a",
+ "size": "2885903",
+ "boards": [
+ {
+ "name": "Arduino MKR Vidor 4000"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "7-2017q4"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.7.0-arduino3"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.10.0-arduino7"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.5.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS-Atmel",
+ "version": "1.2.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.2.1"
+ }
+ ]
+ },
+ {
+ "name": "Arduino SAMD Beta Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd_beta",
+ "version": "1.6.25",
+ "category": "Arduino",
+ "help": {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "url": "http://downloads.arduino.cc/cores/samd-beta-1.6.25.tar.bz2",
+ "archiveFileName": "samd-beta-1.6.25.tar.bz2",
+ "checksum": "SHA-256:95e22b75d2ff1e3a1c9baabad205fbd48964b195390951943035bfbd88286961",
+ "size": "2890759",
+ "boards": [
+ {
+ "name": "Arduino MKR Vidor 4000"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "7-2017q4"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.7.0-arduino3"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.10.0-arduino7"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.5.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS-Atmel",
+ "version": "1.2.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoOTA",
+ "version": "1.2.1"
+ }
+ ]
+ },
+ {
+ "name": "Arduino STM32F4 Boards",
+ "architecture": "stm32f4",
+ "version": "1.0.0",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/arduino.org/package_arduino_stm32f4_1.0.0.tar.bz2",
+ "archiveFileName": "package_arduino_stm32f4_1.0.0.tar.bz2",
+ "size": "20499005",
+ "checksum": "SHA-256:79a67167bff45c34a4addf29a4c84904d0dc114c19220bfd204a65e51688b3d1",
+ "boards": [
+ {
+ "name": "Arduino Star OTTO"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "version": "0.8.0-stm32-arduino1",
+ "name": "dfu-util"
+ }
+ ]
+ },
+ {
+ "name": "Arduino STM32F4 Boards",
+ "architecture": "stm32f4",
+ "version": "1.0.1",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/arduino.org/package_arduino_stm32f4_1.0.1.tar.bz2",
+ "archiveFileName": "package_arduino_stm32f4_1.0.1.tar.bz2",
+ "checksum": "SHA-256:e66dd40f8b9faeb6c7400b48745c023aacb65a6d5c700395edc9dd89d2d150c0",
+ "size": "20495225",
+ "boards": [
+ {
+ "name": "Arduino Star OTTO"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "dfu-util",
+ "version": "0.9.0-arduino1"
+ },
+ {
+ "packager": "arduino",
+ "name": "arduinoSTM32load",
+ "version": "2.0.0"
+ }
+ ]
+ },
+ {
+ "name": "Arduino NRF52 Boards",
+ "architecture": "nrf52",
+ "version": "1.0.0",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/arduino.org/package_arduino_nrf52_1.0.0.tar.bz2",
+ "archiveFileName": "package_arduino_nrf52_1.0.0.tar.bz2",
+ "checksum": "SHA-256:d95cf5a1a3794512fd428f00f71c1e334d2fcf6e261bb22e4bbaa64143205de6",
+ "size": "2438905",
+ "boards": [
+ {
+ "name": "Arduino Primo"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.5.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.10.0-arduino1-static"
+ },
+ {
+ "packager": "arduino",
+ "name": "nrf5x-cl-tools",
+ "version": "9.3.1"
+ }
+ ]
+ },
+ {
+ "name": "Arduino nRF52 Boards",
+ "architecture": "nrf52",
+ "version": "1.0.1",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/arduino.org/package_arduino_nrf52_1.0.1.tar.bz2",
+ "archiveFileName": "package_arduino_nrf52_1.0.1.tar.bz2",
+ "checksum": "SHA-256:f6c5b66a1257188a2e6cab3df8ec373777f62c3abb29f7196d571d221a5c75ff",
+ "size": "2262233",
+ "boards": [
+ {
+ "name": "Arduino Primo"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.5.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.10.0-arduino1-static"
+ },
+ {
+ "packager": "arduino",
+ "name": "nrf5x-cl-tools",
+ "version": "9.3.1"
+ }
+ ]
+ },
+ {
+ "name": "Arduino nRF52 Boards",
+ "architecture": "nrf52",
+ "version": "1.0.2",
+ "category": "Arduino",
+ "url": "http://downloads.arduino.cc/arduino.org/package_arduino_nrf52_1.0.2.tar.bz2",
+ "archiveFileName": "package_arduino_nrf52_1.0.2.tar.bz2",
+ "checksum": "SHA-256:3a96c9476e567b7bca6f0201fcd6a4dbc97bddfbce70bc2d96f5f33a59265163",
+ "size": "2308695",
+ "boards": [
+ { "name": "Arduino Primo" },
+ { "name": "Arduino Primo Core" }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.5.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.10.0-arduino1-static"
+ },
+ {
+ "packager": "arduino",
+ "name": "nrf5x-cl-tools",
+ "version": "9.3.1"
+ }
+ ]
+ }
+ ],
+ "tools": [
+ {
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1",
+ "systems": [
+ {
+ "host": "arm-linux-gnueabihf",
+ "url": "http://downloads.arduino.cc/gcc-arm-none-eabi-4.8.3-2014q1-arm.tar.bz2",
+ "archiveFileName": "gcc-arm-none-eabi-4.8.3-2014q1-arm.tar.bz2",
+ "checksum": "SHA-256:ebe96b34c4f434667cab0187b881ed585e7c7eb990fe6b69be3c81ec7e11e845",
+ "size": "44423906"
+ },
+ {
+ "host": "i686-mingw32",
+ "archiveFileName": "gcc-arm-none-eabi-4.8.3-2014q1-windows.tar.gz",
+ "url": "http://downloads.arduino.cc/gcc-arm-none-eabi-4.8.3-2014q1-windows.tar.gz",
+ "checksum": "SHA-256:fd8c111c861144f932728e00abd3f7d1107e186eb9cd6083a54c7236ea78b7c2",
+ "size": "84537449"
+ },
+ {
+ "host": "x86_64-apple-darwin",
+ "url": "http://downloads.arduino.cc/gcc-arm-none-eabi-4.8.3-2014q1-mac.tar.gz",
+ "archiveFileName": "gcc-arm-none-eabi-4.8.3-2014q1-mac.tar.gz",
+ "checksum": "SHA-256:3598acf21600f17a8e4a4e8e193dc422b894dc09384759b270b2ece5facb59c2",
+ "size": "52518522"
+ },
+ {
+ "host": "x86_64-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/gcc-arm-none-eabi-4.8.3-2014q1-linux64.tar.gz",
+ "archiveFileName": "gcc-arm-none-eabi-4.8.3-2014q1-linux64.tar.gz",
+ "checksum": "SHA-256:d23f6626148396d6ec42a5b4d928955a703e0757829195fa71a939e5b86eecf6",
+ "size": "51395093"
+ },
+ {
+ "host": "i686-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/gcc-arm-none-eabi-4.8.3-2014q1-linux32.tar.gz",
+ "archiveFileName": "gcc-arm-none-eabi-4.8.3-2014q1-linux32.tar.gz",
+ "checksum": "SHA-256:ba1994235f69c526c564f65343f22ddbc9822b2ea8c5ee07dd79d89f6ace2498",
+ "size": "51029223"
+ }
+ ]
+ },
+ {
+ "name": "arm-none-eabi-gcc",
+ "version": "7-2017q4",
+ "systems": [
+ {
+ "host": "arm-linux-gnueabihf",
+ "url": "http://downloads.arduino.cc/tools/gcc-arm-none-eabi-4.8.3-2014q1-arm.tar.bz2",
+ "archiveFileName": "gcc-arm-none-eabi-4.8.3-2014q1-arm.tar.bz2",
+ "checksum": "SHA-256:ebe96b34c4f434667cab0187b881ed585e7c7eb990fe6b69be3c81ec7e11e845",
+ "size": "44423906"
+ },
+ {
+ "host": "aarch64-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/gcc-arm-none-eabi-7-2018-q2-update-linuxarm64.tar.bz2",
+ "archiveFileName": "gcc-arm-none-eabi-7-2018-q2-update-linuxarm64.tar.bz2",
+ "checksum": "SHA-256:6fb5752fb4d11012bd0a1ceb93a19d0641ff7cf29d289b3e6b86b99768e66f76",
+ "size": "99558726"
+ },
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/tools/gcc-arm-none-eabi-7-2017-q4-major-win32-arduino1.zip",
+ "archiveFileName": "gcc-arm-none-eabi-7-2017-q4-major-win32-arduino1.zip",
+ "checksum": "SHA-256:96dd0091856f4d2eb21046eba571321feecf7d50b9c156f708b2a8b683903382",
+ "size": "131761924"
+ },
+ {
+ "host": "x86_64-apple-darwin",
+ "url": "http://downloads.arduino.cc/tools/gcc-arm-none-eabi-7-2017-q4-major-mac.tar.bz2",
+ "archiveFileName": "gcc-arm-none-eabi-7-2017-q4-major-mac.tar.bz2",
+ "checksum": "SHA-256:89b776c7cf0591c810b5b60067e4dc113b5b71bc50084a536e71b894a97fdccb",
+ "size": "104550003"
+ },
+ {
+ "host": "x86_64-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/gcc-arm-none-eabi-7-2017-q4-major-linux64.tar.bz2",
+ "archiveFileName": "gcc-arm-none-eabi-7-2017-q4-major-linux64.tar.bz2",
+ "checksum": "SHA-256:96a029e2ae130a1210eaa69e309ea40463028eab18ba19c1086e4c2dafe69a6a",
+ "size": "99857645"
+ },
+ {
+ "host": "i686-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/gcc-arm-none-eabi-7-2018-q2-update-linux32.tar.bz2",
+ "archiveFileName": "gcc-arm-none-eabi-7-2018-q2-update-linux32.tar.bz2",
+ "checksum": "SHA-256:090a0bc2b1956bc49392dff924a6c30fa57c88130097b1972204d67a45ce3cf3",
+ "size": "97427309"
+ }
+ ]
+ },
+ {
+ "name": "bossac",
+ "version": "1.3-arduino",
+ "systems": [
+ {
+ "host": "i686-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.3a-arduino-i686-linux-gnu.tar.bz2",
+ "archiveFileName": "bossac-1.3a-arduino-i686-linux-gnu.tar.bz2",
+ "checksum": "SHA-256:d6d10362f40729a7877e43474fcf02ad82cf83321cc64ca931f5c82b2d25d24f",
+ "size": "147359"
+ },
+ {
+ "host": "x86_64-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.3a-arduino-x86_64-pc-linux-gnu.tar.bz2",
+ "archiveFileName": "bossac-1.3a-arduino-x86_64-pc-linux-gnu.tar.bz2",
+ "checksum": "SHA-256:c1daed033251296768fa8b63ad283e053da93427c0f3cd476a71a9188e18442c",
+ "size": "26179"
+ },
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.3a-arduino-i686-mingw32.tar.bz2",
+ "archiveFileName": "bossac-1.3a-arduino-i686-mingw32.tar.bz2",
+ "checksum": "SHA-256:a37727622e0f86cb4f2856ad0209568a5d804234dba3dc0778829730d61a5ec7",
+ "size": "265647"
+ },
+ {
+ "host": "i386-apple-darwin11",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.3a-arduino-i386-apple-darwin11.tar.bz2",
+ "archiveFileName": "bossac-1.3a-arduino-i386-apple-darwin11.tar.bz2",
+ "checksum": "SHA-256:40770b225753e7a52bb165e8f37e6b760364f5c5e96048168d0178945bd96ad6",
+ "size": "39475"
+ }
+ ]
+ },
+ {
+ "name": "avr-gcc",
+ "version": "4.8.1-arduino2",
+ "systems": [
+ {
+ "size": "24443285",
+ "checksum": "SHA-256:c19a7526235c364d7f62ec1a993d9b495973ba1813869ccf0241c65905896852",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "avr-gcc-4.8.1-arduino2-i386-apple-darwin11.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.8.1-arduino2-i386-apple-darwin11.tar.bz2"
+ },
+ {
+ "size": "27152002",
+ "checksum": "SHA-256:24a931877bee5f36dc00a88877219a6d2f6a1fb7abb989fd04556b8432d2e14e",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "avr-gcc-4.8.1-arduino2-x86_64-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.8.1-arduino2-x86_64-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "25876628",
+ "checksum": "SHA-256:2d701b4efbc8cec62dc299cde01730c5eebcf23d7e4393db8cf7744a9bf1d3de",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "avr-gcc-4.8.1-arduino2-i686-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.8.1-arduino2-i686-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "46046691",
+ "checksum": "SHA-256:2eafb49fb803fa4d2c32d35e24c0b372fcd520ca0a790fa537a847179e382000",
+ "host": "i686-mingw32",
+ "archiveFileName": "avr-gcc-4.8.1-arduino2-i686-mingw32.zip",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.8.1-arduino2-i686-mingw32.zip"
+ }
+ ]
+ },
+ {
+ "name": "avrdude",
+ "version": "6.0.1-arduino2",
+ "systems": [
+ {
+ "size": "264965",
+ "checksum": "SHA-256:71117cce0096dad6c091e2c34eb0b9a3386d3aec7d863d2da733d9e5eac3a6b1",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "avrdude-6.0.1-arduino2-i386-apple-darwin11.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.0.1-arduino2-i386-apple-darwin11.tar.bz2"
+ },
+ {
+ "size": "292541",
+ "checksum": "SHA-256:2489004d1d98177eaf69796760451f89224007c98b39ebb5577a9a34f51425f1",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "avrdude-6.0.1-arduino2-x86_64-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.0.1-arduino2-x86_64-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "283209",
+ "checksum": "SHA-256:6f633dd6270ad0d9ef19507bcbf8697b414a15208e4c0f71deec25ef89cdef3f",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "avrdude-6.0.1-arduino2-i686-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.0.1-arduino2-i686-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "241618",
+ "checksum": "SHA-256:6c5483800ba753c80893607e30cade8ab77b182808fcc5ea15fa3019c63d76ae",
+ "host": "i686-mingw32",
+ "archiveFileName": "avrdude-6.0.1-arduino2-i686-mingw32.zip",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.0.1-arduino2-i686-mingw32.zip"
+ }
+ ]
+ },
+ {
+ "name": "avr-gcc",
+ "version": "4.8.1-arduino3",
+ "systems": [
+ {
+ "size": "24447175",
+ "checksum": "SHA-256:28e207c66b3dc405367d0c5e68ce3c278e5ec3abb0e4974e7927fe0f9a532c40",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "avr-gcc-4.8.1-arduino3-i386-apple-darwin11.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.8.1-arduino3-i386-apple-darwin11.tar.bz2"
+ },
+ {
+ "size": "30556996",
+ "checksum": "SHA-256:028340abec6eb3085b82404dfc7ed143e1bb05b2da961b539ddcdba4a6f65533",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "avr-gcc-4.8.1-arduino3-x86_64-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.8.1-arduino3-x86_64-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "28768022",
+ "checksum": "SHA-256:37796548ba9653267568f959cd8c7ebfe5b4bce4599898cf9f876d64e616cb87",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "avr-gcc-4.8.1-arduino3-i686-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.8.1-arduino3-i686-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "46046917",
+ "checksum": "SHA-256:d6f0527793f9800f060408392a99eb290ed205730edbae43a1a25cbf6b6b588f",
+ "host": "i686-mingw32",
+ "archiveFileName": "avr-gcc-4.8.1-arduino3-i686-mingw32.zip",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.8.1-arduino3-i686-mingw32.zip"
+ }
+ ]
+ },
+ {
+ "name": "avrdude",
+ "version": "6.0.1-arduino3",
+ "systems": [
+ {
+ "size": "264682",
+ "checksum": "SHA-256:df7cd4a76e45ab3767eb964f845f4d5e9d643df950ec32812923da1e9843d072",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "avrdude-6.0.1-arduino3-i386-apple-darwin11.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.0.1-arduino3-i386-apple-darwin11.tar.bz2"
+ },
+ {
+ "size": "748634",
+ "checksum": "SHA-256:bb7bff48f20a68e1fe559c3f3f644574df12ab5c98eb6a1491079f3c760434ad",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "avrdude-6.0.1-arduino3-x86_64-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.0.1-arduino3-x86_64-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "495482",
+ "checksum": "SHA-256:96a0cfb83fe0452366159e3bf4e19ff10906a8957d1feafd3d98b49ab4b14405",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "avrdude-6.0.1-arduino3-i686-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.0.1-arduino3-i686-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "241619",
+ "checksum": "SHA-256:ea59bfc2ee85039c85318b2ba52c47ef0573513444a785b72f59b22586a950f9",
+ "host": "i686-mingw32",
+ "archiveFileName": "avrdude-6.0.1-arduino3-i686-mingw32.zip",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.0.1-arduino3-i686-mingw32.zip"
+ }
+ ]
+ },
+ {
+ "name": "avr-gcc",
+ "version": "4.8.1-arduino5",
+ "systems": [
+ {
+ "size": "24403768",
+ "checksum": "SHA-256:c8ffcd2db7a651b48ab4ea19db4b34fbae3e7f0210a0f294592af2bdabf2154b",
+ "host": "arm-linux-gnueabihf",
+ "archiveFileName": "avr-gcc-4.8.1-arduino5-armhf-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.8.1-arduino5-armhf-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "24437400",
+ "checksum": "SHA-256:111b3ef00d737d069eb237a8933406cbb928e4698689e24663cffef07688a901",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "avr-gcc-4.8.1-arduino5-i386-apple-darwin11.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.8.1-arduino5-i386-apple-darwin11.tar.bz2"
+ },
+ {
+ "size": "27093036",
+ "checksum": "SHA-256:9054fcc174397a419ba56c4ce1bfcbcad275a6a080cc144905acc9b0351ee9cc",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "avr-gcc-4.8.1-arduino5-x86_64-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.8.1-arduino5-x86_64-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "25882375",
+ "checksum": "SHA-256:7648b7f549b37191da0b0be53bae791b652f82ac3cb4e7877f85075aaf32141f",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "avr-gcc-4.8.1-arduino5-i686-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.8.1-arduino5-i686-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "46044779",
+ "checksum": "SHA-256:d4303226a7b41d3c445d901b5aa5903458def3fc7b7ff4ffef37cabeb37d424d",
+ "host": "i686-mingw32",
+ "archiveFileName": "avr-gcc-4.8.1-arduino5-i686-mingw32.zip",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.8.1-arduino5-i686-mingw32.zip"
+ }
+ ]
+ },
+ {
+ "name": "avrdude",
+ "version": "6.0.1-arduino5",
+ "systems": [
+ {
+ "size": "267095",
+ "checksum": "SHA-256:23ea1341dbc117ec067f2eb1a498ad2bdd7d11fff0143c00b2e018c39804f6b4",
+ "host": "arm-linux-gnueabihf",
+ "archiveFileName": "avrdude-6.0.1-arduino5-armhf-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.0.1-arduino5-armhf-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "264894",
+ "checksum": "SHA-256:41af8d3b0a586853c8317b4fb5163ca0db594a1870ddf680fd988c42166fc3e5",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "avrdude-6.0.1-arduino5-i386-apple-darwin11.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.0.1-arduino5-i386-apple-darwin11.tar.bz2"
+ },
+ {
+ "size": "292629",
+ "checksum": "SHA-256:d826cca7383461f7e8adde686372cf900e9cb3afd639555cf2d6c645b283a476",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "avrdude-6.0.1-arduino5-x86_64-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.0.1-arduino5-x86_64-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "283121",
+ "checksum": "SHA-256:5933d66927bce46ababa9b68a8b7f1d53f68c4f3ff7a5ce4b85d7cf4e6c6bfee",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "avrdude-6.0.1-arduino5-i686-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.0.1-arduino5-i686-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "241634",
+ "checksum": "SHA-256:41f667f1f6a0ab8df46b4ffacd023176dcdef331d6db3b74bddd37d18cca0a44",
+ "host": "i686-mingw32",
+ "archiveFileName": "avrdude-6.0.1-arduino5-i686-mingw32.zip",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.0.1-arduino5-i686-mingw32.zip"
+ }
+ ]
+ },
+ {
+ "name": "avr-gcc",
+ "version": "4.9.2-atmel3.5.3-arduino",
+ "systems": [
+ {
+ "size": "27046965",
+ "checksum": "SHA-256:adeee70be27cc3ee0e4b9e844610d9c534c7b21dae24ec3fa49808c2f04958de",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "avr-gcc-4.9.2-atmel3.5.3-arduino-i386-apple-darwin11.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.9.2-atmel3.5.3-arduino-i386-apple-darwin11.tar.bz2"
+ },
+ {
+ "size": "27400001",
+ "checksum": "SHA-256:02dba9ee77694c23a4c304416a3808949c8faedf07f25a225a4189d850615ec6",
+ "host": "arm-linux-gnueabihf",
+ "archiveFileName": "avr-gcc-4.9.2-atmel3.5.3-arduino-armhf-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.9.2-atmel3.5.3-arduino-armhf-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "29904544",
+ "checksum": "SHA-256:0711e885c0430859e7fea3831af8c69a0c25f92a90ecfda9281799a0acec7455",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "avr-gcc-4.9.2-atmel3.5.3-arduino-x86_64-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.9.2-atmel3.5.3-arduino-x86_64-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "29077606",
+ "checksum": "SHA-256:fe0bb1d6369694779ceb671d457ccadbeafe855a11f6746b7db20055cea4df33",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "avr-gcc-4.9.2-atmel3.5.3-arduino-i686-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.9.2-atmel3.5.3-arduino-i686-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "43847566",
+ "checksum": "SHA-256:445ce3117e87be7e196809fbbea373976160689b6d4b43dbf185eb4c914d1469",
+ "host": "i686-mingw32",
+ "archiveFileName": "avr-gcc-4.9.2-atmel3.5.3-arduino-i686-mingw32.zip",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.9.2-atmel3.5.3-arduino-i686-mingw32.zip"
+ }
+ ]
+ },
+ {
+ "name": "avr-gcc",
+ "version": "4.9.2-atmel3.5.3-arduino2",
+ "systems": [
+ {
+ "size": "27400889",
+ "checksum": "SHA-256:77f300d519bc6b9a25df17b36cb303218e9a258c059b2f6bff8f71a0d8f96821",
+ "host": "arm-linux-gnueabihf",
+ "archiveFileName": "avr-gcc-4.9.2-atmel3.5.3-arduino2-armhf-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.9.2-atmel3.5.3-arduino2-armhf-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "27048070",
+ "checksum": "SHA-256:311258af188defe24a4b341e4e1f4dc93ca6c80516d3e3b55a2fc07a7050248b",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "avr-gcc-4.9.2-atmel3.5.3-arduino2-i386-apple-darwin11.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.9.2-atmel3.5.3-arduino2-i386-apple-darwin11.tar.bz2"
+ },
+ {
+ "size": "43847945",
+ "checksum": "SHA-256:f8e6ede8746c70be01ec79a30803277cd94360cc5b2e104762da0fbcf536fcc6",
+ "host": "i686-mingw32",
+ "archiveFileName": "avr-gcc-4.9.2-atmel3.5.3-arduino2-i686-mingw32.zip",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.9.2-atmel3.5.3-arduino2-i686-mingw32.zip"
+ },
+ {
+ "size": "29292729",
+ "checksum": "SHA-256:f108951e7c4dc90926d1fc76cc27549f6ea63c702a2bb7ff39647a19ae86ec68",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "avr-gcc-4.9.2-atmel3.5.3-arduino2-i686-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.9.2-atmel3.5.3-arduino2-i686-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "29882960",
+ "checksum": "SHA-256:3903a6d1bb9fdd91727e504b5993d5501f119bcb7f99f7aee98a2101e5629188",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "avr-gcc-4.9.2-atmel3.5.3-arduino2-x86_64-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.9.2-atmel3.5.3-arduino2-x86_64-pc-linux-gnu.tar.bz2"
+ }
+ ]
+ },
+ {
+ "name": "avr-gcc",
+ "version": "4.9.2-atmel3.5.4-arduino2",
+ "systems": [
+ {
+ "size": "27764772",
+ "checksum": "SHA-256:ee36009e19bd238d1f6351cbc9aa5db69714761f67dec4c1d69d5d5d7758720c",
+ "host": "arm-linux-gnueabihf",
+ "archiveFileName": "avr-gcc-4.9.2-atmel3.5.4-arduino2-armhf-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.9.2-atmel3.5.4-arduino2-armhf-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "28574644",
+ "checksum": "SHA-256:67b3ed3555eacf0b4fc6f62240773b9f0220171fe4de26bb8d711547fc884730",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "avr-gcc-4.9.2-atmel3.5.4-arduino2-i386-apple-darwin11.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.9.2-atmel3.5.4-arduino2-i386-apple-darwin11.tar.bz2"
+ },
+ {
+ "size": "44386446",
+ "checksum": "SHA-256:6044551cd729d88ea6ffcccf10aad1934c5b164d61f4f5890b0e78524ffff853",
+ "host": "i686-mingw32",
+ "archiveFileName": "avr-gcc-4.9.2-atmel3.5.4-arduino2-i686-mingw32.zip",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.9.2-atmel3.5.4-arduino2-i686-mingw32.zip"
+ },
+ {
+ "size": "29723974",
+ "checksum": "SHA-256:63a9d4cebbac06fd5fa8f48a2e2ba7d513837dcddc97f560129b4e466af901b5",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "avr-gcc-4.9.2-atmel3.5.4-arduino2-i686-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.9.2-atmel3.5.4-arduino2-i686-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "30374404",
+ "checksum": "SHA-256:19480217f1524d78467b83cd742f503182bbcc76b5440093261f146828aa588c",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "avr-gcc-4.9.2-atmel3.5.4-arduino2-x86_64-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-4.9.2-atmel3.5.4-arduino2-x86_64-pc-linux-gnu.tar.bz2"
+ }
+ ]
+ },
+ {
+ "name": "avr-gcc",
+ "version": "5.4.0-atmel3.6.1-arduino2",
+ "systems": [
+ {
+ "size": "31449123",
+ "checksum": "SHA-256:6741f95cc3182a8729cf9670eb13d8dc5a19e881639ca61e53a2d78346a4e99f",
+ "host": "arm-linux-gnueabihf",
+ "archiveFileName": "avr-gcc-5.4.0-atmel3.6.1-arduino2-armhf-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-5.4.0-atmel3.6.1-arduino2-armhf-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "33141295",
+ "checksum": "SHA-256:0fa9e4f2d6d09782dbc84dd91a302849cde2f192163cb9f29484c5f32785269a",
+ "host": "aarch64-linux-gnu",
+ "archiveFileName": "avr-gcc-5.4.0-atmel3.6.1-arduino2-aarch64-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-5.4.0-atmel3.6.1-arduino2-aarch64-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "31894498",
+ "checksum": "SHA-256:abc50137543ba73e227b4d1b8510fff50a474bacd24f2c794f852904963849f8",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "avr-gcc-5.4.0-atmel3.6.1-arduino2-i386-apple-darwin11.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-5.4.0-atmel3.6.1-arduino2-i386-apple-darwin11.tar.bz2"
+ },
+ {
+ "size": "45923772",
+ "checksum": "SHA-256:7eb5691a379b547798fae535b05d68bc02d3969f12d051b8a5a5f2f350ab0a7f",
+ "host": "i686-mingw32",
+ "archiveFileName": "avr-gcc-5.4.0-atmel3.6.1-arduino2-i686-w64-mingw32.zip",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-5.4.0-atmel3.6.1-arduino2-i686-w64-mingw32.zip"
+ },
+ {
+ "size": "33022916",
+ "checksum": "SHA-256:51f87e04f3cdaa73565c751051ac118e02904ad8478f1475b300e1bffcd5538f",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "avr-gcc-5.4.0-atmel3.6.1-arduino2-i686-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-5.4.0-atmel3.6.1-arduino2-i686-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "33522375",
+ "checksum": "SHA-256:05422b0d73b10357c12ea938f02cf50529422b89a4722756e70024aed3e69185",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "avr-gcc-5.4.0-atmel3.6.1-arduino2-x86_64-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avr-gcc-5.4.0-atmel3.6.1-arduino2-x86_64-pc-linux-gnu.tar.bz2"
+ }
+ ]
+ },
+ {
+ "name": "avrdude",
+ "version": "6.3.0-arduino2",
+ "systems": [
+ {
+ "size": "643484",
+ "checksum": "SHA-256:26af86137d8a872f64d217cb262734860b36fe26d6d34faf72e951042f187885",
+ "host": "arm-linux-gnueabihf",
+ "archiveFileName": "avrdude-6.3.0-arduino2-armhf-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino2-armhf-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "653968",
+ "checksum": "SHA-256:32525ea3696c861030e1a6006a5f11971d1dad331e45bfa68dac35126476b04f",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "avrdude-6.3.0-arduino2-i386-apple-darwin11.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino2-i386-apple-darwin11.tar.bz2"
+ },
+ {
+ "size": "745081",
+ "checksum": "SHA-256:9635af5a35bdca11804c07582d7beec458140fb6e3308168c3deda18dc6790fa",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "avrdude-6.3.0-arduino2-x86_64-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino2-x86_64-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "731802",
+ "checksum": "SHA-256:790b6cb610c48e73a2a0f65dcee9903d2fd7f1b0a1f75008a9a21f50a60c7251",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "avrdude-6.3.0-arduino2-i686-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino2-i686-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "608496",
+ "checksum": "SHA-256:8eaf98ea41fbd4450483488ef31710cbcc43c0412dbc8e1e1b582feaab6eca30",
+ "host": "i686-mingw32",
+ "archiveFileName": "avrdude-6.3.0-arduino2-i686-w64-mingw32.zip",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino2-i686-w64-mingw32.zip"
+ }
+ ]
+ },
+ {
+ "name": "avrdude",
+ "version": "6.3.0-arduino6",
+ "systems": [
+ {
+ "size": "644600",
+ "checksum": "SHA-256:2426207423d58eb0e5fc4df9493418f1cb54ba3f328fdc7c3bb582f920b9cbe7",
+ "host": "arm-linux-gnueabihf",
+ "archiveFileName": "avrdude-6.3.0-arduino6-armhf-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino6-armhf-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "696273",
+ "checksum": "SHA-256:d9a039c9e92d3dbb2011e75e6c044a1a4a2789e2fbf8386b1d580994811be084",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "avrdude-6.3.0-arduino6-i386-apple-darwin11.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino6-i386-apple-darwin11.tar.bz2"
+ },
+ {
+ "size": "746653",
+ "checksum": "SHA-256:97b4875cad6110c70101bb776f3ac37b64a2e73f036cd0b10afb6f4be96a6621",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "avrdude-6.3.0-arduino6-x86_64-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino6-x86_64-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "733127",
+ "checksum": "SHA-256:5f4bc4b0957b1d34cec9908b7f84a7c297b894b39fe16a4992c284b24c00d6fb",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "avrdude-6.3.0-arduino6-i686-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino6-i686-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "645859",
+ "checksum": "SHA-256:7468a1bcdfa459d175a095b102c0de28efc466accfb104305fbcad7832659ddc",
+ "host": "i686-mingw32",
+ "archiveFileName": "avrdude-6.3.0-arduino6-i686-w64-mingw32.zip",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino6-i686-w64-mingw32.zip"
+ }
+ ]
+ },
+ {
+ "name": "avrdude",
+ "version": "6.3.0-arduino8",
+ "systems": [
+ {
+ "size": "644550",
+ "checksum": "SHA-256:25a6834ae48019fccf37024236a1f79fe21760414292a4f3fa058d937ceee1ce",
+ "host": "arm-linux-gnueabihf",
+ "archiveFileName": "avrdude-6.3.0-arduino8-armhf-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino8-armhf-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "697268",
+ "checksum": "SHA-256:be8a33a7ec01bb7123279466ffa31371e0aa4fccefffcc23ce71810b59531947",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "avrdude-6.3.0-arduino8-i386-apple-darwin11.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino8-i386-apple-darwin11.tar.bz2"
+ },
+ {
+ "size": "711544",
+ "checksum": "SHA-256:85f38d02e2398d3b7f93da2ca8b830ee65bb73f66cc7a7b30c466d3cebf2da6e",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "avrdude-6.3.0-arduino8-x86_64-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino8-x86_64-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "701718",
+ "checksum": "SHA-256:8e2e4bc71d22e9d11ed143763b97f3aa2d164cdeee678a9deaf5b36e245b2d20",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "avrdude-6.3.0-arduino8-i686-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino8-i686-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "645996",
+ "checksum": "SHA-256:3a7592f6c33efd658b820c73d1058d3c868a297cbddb37da5644973c3b516d5e",
+ "host": "i686-mingw32",
+ "archiveFileName": "avrdude-6.3.0-arduino8-i686-w64-mingw32.zip",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino8-i686-w64-mingw32.zip"
+ }
+ ]
+ },
+ {
+ "name": "avrdude",
+ "version": "6.3.0-arduino9",
+ "systems": [
+ {
+ "size": "644550",
+ "checksum": "SHA-256:25a6834ae48019fccf37024236a1f79fe21760414292a4f3fa058d937ceee1ce",
+ "host": "arm-linux-gnueabihf",
+ "archiveFileName": "avrdude-6.3.0-arduino9-armhf-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino9-armhf-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "697309",
+ "checksum": "SHA-256:bfa06bc042dff252d3a8eded98da159484e75b46d2697da4d9446dcd2aea8465",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "avrdude-6.3.0-arduino9-i386-apple-darwin11.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino9-i386-apple-darwin11.tar.bz2"
+ },
+ {
+ "size": "711229",
+ "checksum": "SHA-256:c8cccb84e2fe49ee837b24f0a60a99e9c371dae26e84c5b0b22b6b6aab2f1f6a",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "avrdude-6.3.0-arduino9-x86_64-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino9-x86_64-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "701590",
+ "checksum": "SHA-256:4235a2d58e3c3224c603d6c5f0610507ed6c48ebf4051fdcce9f77a7646e218b",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "avrdude-6.3.0-arduino9-i686-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino9-i686-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "645974",
+ "checksum": "SHA-256:f3c5cfa8d0b3b0caee81c5b35fb6acff89c342ef609bf4266734c6266a256d4f",
+ "host": "i686-mingw32",
+ "archiveFileName": "avrdude-6.3.0-arduino9-i686-w64-mingw32.zip",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino9-i686-w64-mingw32.zip"
+ }
+ ]
+ },
+ {
+ "name": "avrdude",
+ "version": "6.3.0-arduino14",
+ "systems": [
+ {
+ "size": "219616",
+ "checksum": "SHA-256:d1a06275490d59a431c419788bbc53ffd5a79510dac1a35e63cf488621ba5589",
+ "host": "arm-linux-gnueabihf",
+ "archiveFileName": "avrdude-6.3.0-arduino14-armhf-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino14-armhf-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "229688",
+ "checksum": "SHA-256:439f5de150695e3732dd598bb182dae6ec1e3a5cdb580f855d9b58e485e84e66",
+ "host": "aarch64-linux-gnu",
+ "archiveFileName": "avrdude-6.3.0-arduino14-aarch64-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino14-aarch64-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "256917",
+ "checksum": "SHA-256:47d03991522722ce92120c60c4118685b7861909d895f34575001137961e4a63",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "avrdude-6.3.0-arduino14-i386-apple-darwin12.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino14-i386-apple-darwin11.tar.bz2"
+ },
+ {
+ "size": "253366",
+ "checksum": "SHA-256:7986e8f3059353dc08f9234f7dbc98d9b2fa2242f046f02a8243a060f7358bfc",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "avrdude-6.3.0-arduino14-x86_64-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino14-x86_64-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "244293",
+ "checksum": "SHA-256:4f100e3843c635064997df91d2a079ab15cd30d1d7fa227280abe6a7c3bc74ca",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "avrdude-6.3.0-arduino14-i686-pc-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino14-i686-pc-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "328363",
+ "checksum": "SHA-256:69293e0de2eff8de89f553477795c25005f674a320bbba4b0222beb0194aa297",
+ "host": "i686-mingw32",
+ "archiveFileName": "avrdude-6.3.0-arduino14-i686-w64-mingw32.zip",
+ "url": "http://downloads.arduino.cc/tools/avrdude-6.3.0-arduino14-i686-w64-mingw32.zip"
+ }
+ ]
+ },
+ {
+ "name": "arduinoOTA",
+ "version": "1.0.0",
+ "systems": [
+ {
+ "size": "2044124",
+ "checksum": "SHA-256:850a86876403cb45c944590a8cc7f9d8ef6d53ed853f7a9593ec395c4c1c6b2d",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "arduinoOTA-1.0.0-linux32.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/arduinoOTA-1.0.0-linux32.tar.bz2"
+ },
+ {
+ "size": "2178772",
+ "checksum": "SHA-256:f01f25e02787492a8a30414230635adae76ed85228045437433892d185991f9e",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "arduinoOTA-1.0.0-linux64.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/arduinoOTA-1.0.0-linux64.tar.bz2"
+ },
+ {
+ "size": "1961623",
+ "checksum": "SHA-256:0ca6c0a93bfad50be0b6e62dc51ba6c3267b809bab4ec91ef9606ab7d838e46b",
+ "host": "arm-linux-gnueabihf",
+ "archiveFileName": "arduinoOTA-1.0.0-linuxarm.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/arduinoOTA-1.0.0-linuxarm.tar.bz2"
+ },
+ {
+ "size": "2180617",
+ "checksum": "SHA-256:e63c6034da2c1a7fe453eaa29c22df88627cc0aa3c5cbab7635c19367b74ee59",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "arduinoOTA-1.0.0-osx.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/arduinoOTA-1.0.0-osx.tar.bz2"
+ },
+ {
+ "size": "2247970",
+ "checksum": "SHA-256:7bced1489217e07661ea1e75702a10a874b54f6146e2414ee47684c7eac014d1",
+ "host": "i686-mingw32",
+ "archiveFileName": "arduinoOTA-1.0.0-windows.zip",
+ "url": "http://downloads.arduino.cc/tools/arduinoOTA-1.0.0-windows.zip"
+ }
+ ]
+ },
+ {
+ "name": "arduinoOTA",
+ "version": "1.1.1",
+ "systems": [
+ {
+ "size": "2045036",
+ "checksum": "SHA-256:7ac91ef1d5b357c0ceb790be02ef54986db598ba5a42fffbd6c8ecbdd6a271ef",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "arduinoOTA-1.1.1-linux_386.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/arduinoOTA-1.1.1-linux_386.tar.bz2"
+ },
+ {
+ "size": "2178288",
+ "checksum": "SHA-256:eb5ad0a457dd7f610f7f9b85454399c36755673d61a16f9d07cdfcbbb32ec277",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "arduinoOTA-1.1.1-linux_amd64.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/arduinoOTA-1.1.1-linux_amd64.tar.bz2"
+ },
+ {
+ "size": "1962115",
+ "checksum": "SHA-256:e4880d83df3d3f6f4b7b7bcde161e80a0556877468803a3c6066ee4ad18a374c",
+ "host": "arm-linux-gnueabihf",
+ "archiveFileName": "arduinoOTA-1.1.1-linux_arm.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/arduinoOTA-1.1.1-linux_arm.tar.bz2"
+ },
+ {
+ "size": "2181376",
+ "checksum": "SHA-256:a1ce7cf578982f3af5e4fab6b5839e44830d7a41cb093faba5c4b45952a6fa55",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "arduinoOTA-1.1.1-darwin_amd64.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/arduinoOTA-1.1.1-darwin_amd64.tar.bz2"
+ },
+ {
+ "size": "2248431",
+ "checksum": "SHA-256:b2d3610c77f969a68cd75b6ea66bf63ec10c263937009d99147fbcd975c90006",
+ "host": "i686-mingw32",
+ "archiveFileName": "arduinoOTA-1.1.1-windows_386.zip",
+ "url": "http://downloads.arduino.cc/tools/arduinoOTA-1.1.1-windows_386.zip"
+ }
+ ]
+ },
+ {
+ "name": "arduinoOTA",
+ "version": "1.2.0",
+ "systems": [
+ {
+ "size": "1839854",
+ "checksum": "SHA-256:7157a0b56620fb43b8dfb4afd958f8b294476a5ce4322c212167ca5d4092f2d9",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "arduinoOTA-1.2.0-linux_386.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/arduinoOTA-1.2.0-linux_386.tar.bz2"
+ },
+ {
+ "size": "1974030",
+ "checksum": "SHA-256:f672c1c407c4cb10729a1d891bdb8b010e2043e5415e1c2559bf39cdeaede78c",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "arduinoOTA-1.2.0-linux_amd64.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/arduinoOTA-1.2.0-linux_amd64.tar.bz2"
+ },
+ {
+ "size": "1787138",
+ "checksum": "SHA-256:ac49ffcd3239a6a52215f89dbda012d28f1296e6d79fc0efc3df06f919105744",
+ "host": "arm-linux-gnueabihf",
+ "archiveFileName": "arduinoOTA-1.2.0-linux_arm.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/arduinoOTA-1.2.0-linux_arm.tar.bz2"
+ },
+ {
+ "size": "1992476",
+ "checksum": "SHA-256:160e83e77d7a60514ca40fedf34f539124aac4b9ae0e2bfdf8fda11d958de38f",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "arduinoOTA-1.2.0-darwin_amd64.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/arduinoOTA-1.2.0-darwin_amd64.tar.bz2"
+ },
+ {
+ "size": "2003964",
+ "checksum": "SHA-256:9d26747093ab7966bfeffced9dbd7def0e164bba0db89f5efb3f7f8011496c8f",
+ "host": "i686-mingw32",
+ "archiveFileName": "arduinoOTA-1.2.0-windows_386.zip",
+ "url": "http://downloads.arduino.cc/tools/arduinoOTA-1.2.0-windows_386.zip"
+ }
+ ]
+ },
+ {
+ "name": "arduinoOTA",
+ "version": "1.2.1",
+ "systems": [
+ {
+ "size": "2133779",
+ "checksum": "SHA-256:2ffdf64b78486c1d0bf28dc23d0ca36ab75ca92e84b9487246da01888abea6d4",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "arduinoOTA-1.2.1-linux_386.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/arduinoOTA-1.2.1-linux_386.tar.bz2"
+ },
+ {
+ "size": "2257689",
+ "checksum": "SHA-256:5b82310d53688480f34a916aac31cd8f2dd2be65dd8fa6c2445262262e1948f9",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "arduinoOTA-1.2.1-linux_amd64.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/arduinoOTA-1.2.1-linux_amd64.tar.bz2"
+ },
+ {
+ "size": "2093132",
+ "checksum": "SHA-256:ad54b3dcd586212941fd992bab573b53d13207a419a3f2981c970a085ae0e9e0",
+ "host": "arm-linux-gnueabihf",
+ "archiveFileName": "arduinoOTA-1.2.1-linux_arm.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/arduinoOTA-1.2.1-linux_arm.tar.bz2"
+ },
+ {
+ "size": "2093132",
+ "checksum": "SHA-256:ad54b3dcd586212941fd992bab573b53d13207a419a3f2981c970a085ae0e9e0",
+ "host": "aarch64-linux-gnu",
+ "archiveFileName": "arduinoOTA-1.2.1-linux_arm.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/arduinoOTA-1.2.1-linux_arm.tar.bz2"
+ },
+ {
+ "size": "2244088",
+ "checksum": "SHA-256:93a6d9f9c0c765d237be1665bf7a0a8e2b0b6d2a8531eae92db807f5515088a7",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "arduinoOTA-1.2.1-darwin_amd64.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/arduinoOTA-1.2.1-darwin_amd64.tar.bz2"
+ },
+ {
+ "size": "2237511",
+ "checksum": "SHA-256:e1ebf21f2c073fce25c09548c656da90d4ef6c078401ec6f323e0c58335115e5",
+ "host": "i686-mingw32",
+ "archiveFileName": "arduinoOTA-1.2.1-windows_386.zip",
+ "url": "http://downloads.arduino.cc/tools/arduinoOTA-1.2.1-windows_386.zip"
+ }
+ ]
+ },
+ {
+ "name": "bossac",
+ "version": "1.5-arduino",
+ "systems": [
+ {
+ "host": "arm-linux-gnueabihf",
+ "url": "http://downloads.arduino.cc/bossac-1.5-arduino2-arm-linux-gnueabihf.tar.bz2",
+ "archiveFileName": "bossac-1.5-arduino2-arm-linux-gnueabihf.tar.bz2",
+ "checksum": "SHA-256:7b61b7814e5b57bcbd853439fc9cd3e98af4abfdd369bf039c6917f9599e44b9",
+ "size": "199550"
+ },
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/bossac-1.5-arduino2-mingw32.tar.gz",
+ "archiveFileName": "bossac-1.5-arduino2-mingw32.tar.gz",
+ "checksum": "SHA-256:9d849a34f0b26c25c6a8c4d741cd749dea238cade73b57a3048f248c431d9cc9",
+ "size": "222283"
+ },
+ {
+ "host": "x86_64-apple-darwin",
+ "url": "http://downloads.arduino.cc/bossac-1.5-arduino2-i386-apple-darwin14.3.0.tar.gz",
+ "archiveFileName": "bossac-1.5-arduino2-i386-apple-darwin14.3.0.tar.gz",
+ "checksum": "SHA-256:8f07e50a1f887cb254092034c6a4482d73209568cd83cb624d6625d66794f607",
+ "size": "64120"
+ },
+ {
+ "host": "x86_64-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/bossac-1.5-arduino2-x86_64-linux-gnu.tar.gz",
+ "archiveFileName": "bossac-1.5-arduino2-x86_64-linux-gnu.tar.gz",
+ "checksum": "SHA-256:42785329155dcb39872d4d30a2a9d31e0f0ce3ae7e34a3ed3d840cbc909c4657",
+ "size": "30431"
+ },
+ {
+ "host": "i686-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/bossac-1.5-arduino2-i486-linux-gnu.tar.gz",
+ "archiveFileName": "bossac-1.5-arduino2-i486-linux-gnu.tar.gz",
+ "checksum": "SHA-256:ac56e553bbd6d992fa5592ace90996806230ab582f2bf9f8590836fec9dabef6",
+ "size": "29783"
+ }
+ ]
+ },
+ {
+ "name": "bossac",
+ "version": "1.6-arduino",
+ "systems": [
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.6-arduino-mingw32.tar.gz",
+ "archiveFileName": "bossac-1.6-arduino-mingw32.tar.gz",
+ "checksum": "SHA-256:b59d64d3f7a43c894d0fba2dd1241bbaeefedf8c902130a24d8ec63b08f9ff6a",
+ "size": "222517"
+ },
+ {
+ "host": "x86_64-apple-darwin",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.6-arduino-i386-apple-darwin14.4.0.tar.gz",
+ "archiveFileName": "bossac-1.6-arduino-i386-apple-darwin14.4.0.tar.gz",
+ "checksum": "SHA-256:6b3b686a782b6587c64c85db80085c9089c5ea1b051e49e5af17b3c6109c8efa",
+ "size": "64538"
+ },
+ {
+ "host": "x86_64-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.6-arduino-x86_64-linux-gnu.tar.gz",
+ "archiveFileName": "bossac-1.6-arduino-x86_64-linux-gnu.tar.gz",
+ "checksum": "SHA-256:2ce7a54d609b4ce3b678147202b2556dd1ce5b318de48a018c676521b994c7a7",
+ "size": "30649"
+ },
+ {
+ "host": "i686-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.6-arduino-i486-linux-gnu.tar.gz",
+ "archiveFileName": "bossac-1.6-arduino-i486-linux-gnu.tar.gz",
+ "checksum": "SHA-256:5c320bf5cfdbf03e3f648642e6de325e459a061fcf96b2215cb955263f7467b2",
+ "size": "30072"
+ }
+ ]
+ },
+ {
+ "name": "bossac",
+ "version": "1.6.1-arduino",
+ "systems": [
+ {
+ "host": "arm-linux-gnueabihf",
+ "url": "http://downloads.arduino.cc/bossac-1.6.1-arduino-arm-linux-gnueabihf.tar.bz2",
+ "archiveFileName": "bossac-1.6.1-arduino-arm-linux-gnueabihf.tar.bz2",
+ "checksum": "SHA-256:8c4e63db982178919c824e7a35580dffc95c3426afa7285de3eb583982d4d391",
+ "size": "201341"
+ },
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/bossac-1.6.1-arduino-mingw32.tar.gz",
+ "archiveFileName": "bossac-1.6.1-arduino-mingw32.tar.gz",
+ "checksum": "SHA-256:d59f43e2e83a337d04c4ae88b195a4ee175b8d87fff4c43144d23412a4a9513b",
+ "size": "222918"
+ },
+ {
+ "host": "x86_64-apple-darwin",
+ "url": "http://downloads.arduino.cc/bossac-1.6.1-arduino-i386-apple-darwin14.5.0.tar.gz",
+ "archiveFileName": "bossac-1.6.1-arduino-i386-apple-darwin14.5.0.tar.gz",
+ "checksum": "SHA-256:2f80ef569a3fb19da60ab3489e49d8fe7d4699876acf30ff4938c632230a09aa",
+ "size": "64587"
+ },
+ {
+ "host": "x86_64-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/bossac-1.6.1-arduino-x86_64-linux-gnu.tar.gz",
+ "archiveFileName": "bossac-1.6.1-arduino-x86_64-linux-gnu.tar.gz",
+ "checksum": "SHA-256:b78afc66c00ccfdd69a08bd3959c260a0c64ccce78a71d5a1135ae4437ff40db",
+ "size": "30869"
+ },
+ {
+ "host": "i686-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/bossac-1.6.1-arduino-i486-linux-gnu.tar.gz",
+ "archiveFileName": "bossac-1.6.1-arduino-i486-linux-gnu.tar.gz",
+ "checksum": "SHA-256:1e211347569d75193b337296a10dd25b0ce04419e3d7dc644355178b6b514f92",
+ "size": "30320"
+ }
+ ]
+ },
+ {
+ "name": "bossac",
+ "version": "1.7.0",
+ "systems": [
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.7.0-mingw32.tar.gz",
+ "archiveFileName": "bossac-1.7.0-mingw32.tar.gz",
+ "checksum": "SHA-256:9ef7d11b4fabca0adc17102a0290957d5cc26ce46b422c3a5344722c80acc7b2",
+ "size": "243066"
+ },
+ {
+ "host": "x86_64-apple-darwin",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.7.0-i386-apple-darwin15.6.0.tar.gz",
+ "archiveFileName": "bossac-1.7.0-i386-apple-darwin15.6.0.tar.gz",
+ "checksum": "SHA-256:feac36ab38876c163dcf51bdbcfbed01554eede3d41c59a0e152e170fe5164d2",
+ "size": "63822"
+ },
+ {
+ "host": "x86_64-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.7.0-x86_64-linux-gnu.tar.gz",
+ "archiveFileName": "bossac-1.7.0-x86_64-linux-gnu.tar.gz",
+ "checksum": "SHA-256:9475c0c8596c1ba12dcbce60e48fef7559087fa8eccbea7bab732113f3c181ee",
+ "size": "31373"
+ },
+ {
+ "host": "i686-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.7.0-i686-linux-gnu.tar.gz",
+ "archiveFileName": "bossac-1.7.0-i686-linux-gnu.tar.gz",
+ "checksum": "SHA-256:17003b0bdc698d52eeb91b09c34aec501c6e0285b4aa88659ab7cc407a451a4d",
+ "size": "31086"
+ },
+ {
+ "host": "arm-linux-gnueabihf",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.7.0-arm-linux-gnueabihf.tar.gz",
+ "archiveFileName": "bossac-1.7.0-arm-linux-gnueabihf.tar.gz",
+ "checksum": "SHA-256:09e46d0af61b2189caaac0bc6d4dd15cb22c167fdedc56ec98602dd5f10e68e0",
+ "size": "27382"
+ }
+ ]
+ },
+ {
+ "name": "bossac",
+ "version": "1.7.0-arduino3",
+ "systems": [
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.7.0-arduino3-windows.tar.gz",
+ "archiveFileName": "bossac-1.7.0-arduino3-windows.tar.gz",
+ "checksum": "SHA-256:62745cc5a98c26949ec9041ef20420643c561ec43e99dae659debf44e6836526",
+ "size": "3607421"
+ },
+ {
+ "host": "x86_64-apple-darwin",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.7.0-arduino3-osx.tar.gz",
+ "archiveFileName": "bossac-1.7.0-arduino3-osx.tar.gz",
+ "checksum": "SHA-256:adb3c14debd397d8135e9e970215c6972f0e592c7af7532fa15f9ce5e64b991f",
+ "size": "75510"
+ },
+ {
+ "host": "x86_64-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.7.0-arduino3-linux64.tar.gz",
+ "archiveFileName": "bossac-1.7.0-arduino3-linux64.tar.gz",
+ "checksum": "SHA-256:1ae54999c1f97234a5c603eb99ad39313b11746a4ca517269a9285afa05f9100",
+ "size": "207271"
+ },
+ {
+ "host": "i686-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.7.0-arduino3-linux32.tar.gz",
+ "archiveFileName": "bossac-1.7.0-arduino3-linux32.tar.gz",
+ "checksum": "SHA-256:4ac4354746d1a09258f49a43ef4d1baf030d81c022f8434774268b00f55d3ec3",
+ "size": "193577"
+ },
+ {
+ "host": "arm-linux-gnueabihf",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.7.0-arduino3-linuxarm.tar.gz",
+ "archiveFileName": "bossac-1.7.0-arduino3-linuxarm.tar.gz",
+ "checksum": "SHA-256:626c6cc548046901143037b782bf019af1663bae0d78cf19181a876fb9abbb90",
+ "size": "193941"
+ },
+ {
+ "host": "aarch64-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.7.0-arduino3-linuxaarch64.tar.gz",
+ "archiveFileName": "bossac-1.7.0-arduino3-linuxaarch64.tar.gz",
+ "checksum": "SHA-256:a098b2cc23e29f0dc468416210d097c4a808752cd5da1a7b9b8b7b931a04180b",
+ "size": "268365"
+ }
+ ]
+ },
+ {
+ "name": "bossac",
+ "version": "1.8.0-48-gb176eee",
+ "systems": [
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.8-48-gb176eee-i686-w64-mingw32.tar.gz",
+ "archiveFileName": "bossac-1.8-48-gb176eee-i686-w64-mingw32.tar.gz",
+ "checksum": "SHA-256:4523a6897f3dfd673fe821c5cfbac8d6a12782e7a36b312b9ee7d41deec2a10a",
+ "size": "91219"
+ },
+ {
+ "host": "x86_64-apple-darwin",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.8-48-gb176eee-i386-apple-darwin16.1.0.tar.gz",
+ "archiveFileName": "bossac-1.8-48-gb176eee-i386-apple-darwin16.1.0.tar.gz",
+ "checksum": "SHA-256:581ecc16021de36638ae14e9e064ffb4a1d532a11502f4252da8bcdf5ce1d649",
+ "size": "39150"
+ },
+ {
+ "host": "x86_64-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.8-48-gb176eee-x86_64-linux-gnu.tar.gz",
+ "archiveFileName": "bossac-1.8-48-gb176eee-x86_64-linux-gnu.tar.gz",
+ "checksum": "SHA-256:1347eec67f5b90b785abdf6c8a8aa59129d0c016de7ff9b5ac1690378eacca3c",
+ "size": "37798"
+ },
+ {
+ "host": "i686-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.8-48-gb176eee-i486-linux-gnu.tar.gz",
+ "archiveFileName": "bossac-1.8-48-gb176eee-i486-linux-gnu.tar.gz",
+ "checksum": "SHA-256:4c7492f876b8269aa9d8bcaad3aeda31acf1a0292383093b6d9f5f1d23fdafc3",
+ "size": "37374"
+ },
+ {
+ "host": "arm-linux-gnueabihf",
+ "url": "http://downloads.arduino.cc/tools/bossac-1.8-48-gb176eee-arm-linux-gnueabihf.tar.gz",
+ "archiveFileName": "bossac-1.8-48-gb176eee-arm-linux-gnueabihf.tar.gz",
+ "checksum": "SHA-256:2001e4a592f3aefd22f213b1ddd6f5d8d5e74bd04080cf1b97c24cbaa81b10ed",
+ "size": "34825"
+ }
+ ]
+ },
+ {
+ "name": "openocd",
+ "version": "0.9.0-arduino",
+ "systems": [
+ {
+ "host": "arm-linux-gnueabihf",
+ "url": "http://downloads.arduino.cc/tools/OpenOCD-0.9.0-arduino-arm-linux-gnueabihf.tar.bz2",
+ "archiveFileName": "OpenOCD-0.9.0-dev-arduino-arm-linux-gnueabihf.tar.bz2",
+ "checksum": "SHA-256:a84e7c4cba853f2c937d77286f8a0ca317447d3873e51cbd2a2d41424e044a18",
+ "size": "1402283"
+ },
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/tools/OpenOCD-0.9.0-arduino-i686-pc-cygwin.tar.bz2",
+ "archiveFileName": "OpenOCD-0.9.0-arduino-i686-pc-cygwin.tar.bz2",
+ "checksum": "SHA-256:5310bdd3730168a33b09b68558e908ca8b2fec25620c488f50a5fb35d0d1effd",
+ "size": "2360705"
+ },
+ {
+ "host": "x86_64-apple-darwin",
+ "url": "http://downloads.arduino.cc/tools/OpenOCD-0.9.0-arduino-x86_64-apple-darwin14.3.0.tar.bz2",
+ "archiveFileName": "OpenOCD-0.9.0-arduino-x86_64-apple-darwin14.3.0.tar.bz2",
+ "checksum": "SHA-256:ef90769c07b8018cec3a5054e690ac6c196e03720e102ac5038c3f9da4e44782",
+ "size": "2275101"
+ },
+ {
+ "host": "x86_64-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/OpenOCD-0.9.0-arduino-x86_64-linux-gnu.tar.bz2",
+ "archiveFileName": "OpenOCD-0.9.0-arduino-x86_64-linux-gnu.tar.bz2",
+ "checksum": "SHA-256:c350409f7badf213dfcc516ea34289461ad92d87806e8e33945508a2c6b2c0b3",
+ "size": "1210796"
+ },
+ {
+ "host": "i686-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/OpenOCD-0.9.0-arduino-i486-linux-gnu.tar.bz2",
+ "archiveFileName": "OpenOCD-0.9.0-arduino-i486-linux-gnu.tar.bz2",
+ "checksum": "SHA-256:4c9793dfd7822b0fc959d039e5ecabfa89092ee2911abfdc7b5905deb171499a",
+ "size": "1129654"
+ }
+ ]
+ },
+ {
+ "name": "openocd",
+ "version": "0.9.0-arduino5-static",
+ "systems": [
+ {
+ "host": "arm-linux-gnueabihf",
+ "url": "http://downloads.arduino.cc/tools/openocd-0.9.0-arduino5-static-arm-linux-gnueabihf.tar.bz2",
+ "archiveFileName": "openocd-0.9.0-arduino5-static-arm-linux-gnueabihf.tar.bz2",
+ "checksum": "SHA-256:cef48c1448664612dd25168f0a56962aec4ce2f1d7c06dafd86a1b606dc8ae20",
+ "size": "1319000"
+ },
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/tools/openocd-0.9.0-arduino5-static-i686-w64-mingw32.zip",
+ "archiveFileName": "openocd-0.9.0-arduino5-static-i686-w64-mingw32.zip",
+ "checksum": "SHA-256:54c70a0bfa1b0a3a592d6ee9ab532f9715e1dede2e7d46a3232abd72de274c5a",
+ "size": "1641209"
+ },
+ {
+ "host": "x86_64-apple-darwin",
+ "url": "http://downloads.arduino.cc/tools/openocd-0.9.0-arduino5-static-x86_64-apple-darwin15.6.0.tar.bz2",
+ "archiveFileName": "openocd-0.9.0-arduino5-static-x86_64-apple-darwin15.6.0.tar.bz2",
+ "checksum": "SHA-256:14be5c5400e1a32c3d6a15f9c8d2f438634974ab263ff437b91b527e5b5d53a4",
+ "size": "1235752"
+ },
+ {
+ "host": "x86_64-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/openocd-0.9.0-arduino5-static-x86_64-linux-gnu.tar.bz2",
+ "archiveFileName": "openocd-0.9.0-arduino5-static-x86_64-linux-gnu.tar.bz2",
+ "checksum": "SHA-256:8e378bdcd71c93a39818c16b49b91128c8028e3d9675551ba7eff39462391ba2",
+ "size": "1393855"
+ },
+ {
+ "host": "i686-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/openocd-0.9.0-arduino5-static-i686-linux-gnu.tar.bz2",
+ "archiveFileName": "openocd-0.9.0-arduino5-static-i686-linux-gnu.tar.bz2",
+ "checksum": "SHA-256:8e0787f54e204fe6e9071b2b7edf8a5e695492696f1182d447647fe5c0bd55bd",
+ "size": "1341739"
+ }
+ ]
+ },
+ {
+ "name": "openocd",
+ "version": "0.9.0-arduino6-static",
+ "systems": [
+ {
+ "host": "arm-linux-gnueabihf",
+ "url": "http://downloads.arduino.cc/tools/openocd-0.9.0-arduino6-static-arm-linux-gnueabihf.tar.bz2",
+ "archiveFileName": "openocd-0.9.0-arduino6-static-arm-linux-gnueabihf.tar.bz2",
+ "checksum": "SHA-256:5d596c90510f80d66f64a3615d74063a6a61f07b79be475592a3c76bf0deb3ca",
+ "size": "1319020"
+ },
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/tools/openocd-0.9.0-arduino6-static-i686-w64-mingw32.zip",
+ "archiveFileName": "openocd-0.9.0-arduino6-static-i686-w64-mingw32.zip",
+ "checksum": "SHA-256:dde6c8cd42c179e819eeebee1d09829b0768ecb89b75fb10e1f053c1c65f9cf1",
+ "size": "1641514"
+ },
+ {
+ "host": "x86_64-apple-darwin",
+ "url": "http://downloads.arduino.cc/tools/openocd-0.9.0-arduino6-static-x86_64-apple-darwin15.6.0.tar.bz2",
+ "archiveFileName": "openocd-0.9.0-arduino6-static-x86_64-apple-darwin15.6.0.tar.bz2",
+ "checksum": "SHA-256:00cd65339bc981ff0d4ab4876df8f89b1e60e476441fabca31d5fc2968bad9be",
+ "size": "1222523"
+ },
+ {
+ "host": "x86_64-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/openocd-0.9.0-arduino6-static-x86_64-linux-gnu.tar.bz2",
+ "archiveFileName": "openocd-0.9.0-arduino6-static-x86_64-linux-gnu.tar.bz2",
+ "checksum": "SHA-256:d2f58bbd0661b755fdb8a307d197f119d838b066f5510b25ee766e47d1774543",
+ "size": "1394293"
+ },
+ {
+ "host": "i686-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/openocd-0.9.0-arduino6-static-i686-linux-gnu.tar.bz2",
+ "archiveFileName": "openocd-0.9.0-arduino6-static-i686-linux-gnu.tar.bz2",
+ "checksum": "SHA-256:88d948c2062c73c0c93e649e099aaac4b009018cff365f44cfc5b47907043dc9",
+ "size": "1340444"
+ }
+ ]
+ },
+ {
+ "name": "openocd",
+ "version": "0.10.0-arduino7",
+ "systems": [
+ {
+ "size": "1638575",
+ "checksum": "SHA-256:f8e0d783e80a3d5f75ee82e9542315871d46e1e283a97447735f1cbcd8986b06",
+ "host": "arm-linux-gnueabihf",
+ "archiveFileName": "openocd-0.10.0-arduino7-static-arm-linux-gnueabihf.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/openocd-0.10.0-arduino7-static-arm-linux-gnueabihf.tar.bz2"
+ },
+ {
+ "size": "1580739",
+ "checksum": "SHA-256:d47d728a9a8d98f28dc22e31d7127ced9de0d5e268292bf935e050ef1d2bdfd0",
+ "host": "aarch64-linux-gnu",
+ "archiveFileName": "openocd-0.10.0-arduino7-static-aarch64-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/openocd-0.10.0-arduino7-static-aarch64-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "1498970",
+ "checksum": "SHA-256:1e539a587a0c54a551ce0dc542af10a2520b1c93bbfe2ca4ebaef4c83411df1a",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "openocd-0.10.0-arduino7-static-x86_64-apple-darwin13.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/openocd-0.10.0-arduino7-static-x86_64-apple-darwin13.tar.bz2"
+ },
+ {
+ "size": "1701581",
+ "checksum": "SHA-256:91d418bd309ec1e98795c622cd25c936aa537c0b3828fa5bcb191389378a1b27",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "openocd-0.10.0-arduino7-static-x86_64-ubuntu12.04-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/openocd-0.10.0-arduino7-static-x86_64-ubuntu12.04-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "1626347",
+ "checksum": "SHA-256:08a18f39d72a5626383503053a30a5da89eed7fdccb6f514b20b77403eb1b2b4",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "openocd-0.10.0-arduino7-static-i686-ubuntu12.04-linux-gnu.tar.bz2",
+ "url": "http://downloads.arduino.cc/tools/openocd-0.10.0-arduino7-static-i686-ubuntu12.04-linux-gnu.tar.bz2"
+ },
+ {
+ "size": "2016965",
+ "checksum": "SHA-256:f251aec5471296e18aa540c3078d66475357a76a77c16c06a2d9345f4e12b3d5",
+ "host": "i686-mingw32",
+ "archiveFileName": "openocd-0.10.0-arduino7-static-i686-w64-mingw32.zip",
+ "url": "http://downloads.arduino.cc/tools/openocd-0.10.0-arduino7-static-i686-w64-mingw32.zip"
+ }
+ ]
+ },
+ {
+ "name": "CMSIS",
+ "version": "4.0.0-atmel",
+ "systems": [
+ {
+ "host": "arm-linux-gnueabihf",
+ "url": "http://downloads.arduino.cc/CMSIS-4.0.0.tar.bz2",
+ "archiveFileName": "CMSIS-4.0.0.tar.bz2",
+ "checksum": "SHA-256:7d637d2d7a0c6bacc22065848a201db2fff124268e4a56868260d0f472b4bbb7",
+ "size": "17642623"
+ },
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/CMSIS-4.0.0.tar.bz2",
+ "archiveFileName": "CMSIS-4.0.0.tar.bz2",
+ "checksum": "SHA-256:7d637d2d7a0c6bacc22065848a201db2fff124268e4a56868260d0f472b4bbb7",
+ "size": "17642623"
+ },
+ {
+ "host": "x86_64-apple-darwin",
+ "url": "http://downloads.arduino.cc/CMSIS-4.0.0.tar.bz2",
+ "archiveFileName": "CMSIS-4.0.0.tar.bz2",
+ "checksum": "SHA-256:7d637d2d7a0c6bacc22065848a201db2fff124268e4a56868260d0f472b4bbb7",
+ "size": "17642623"
+ },
+ {
+ "host": "x86_64-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/CMSIS-4.0.0.tar.bz2",
+ "archiveFileName": "CMSIS-4.0.0.tar.bz2",
+ "checksum": "SHA-256:7d637d2d7a0c6bacc22065848a201db2fff124268e4a56868260d0f472b4bbb7",
+ "size": "17642623"
+ },
+ {
+ "host": "i686-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/CMSIS-4.0.0.tar.bz2",
+ "archiveFileName": "CMSIS-4.0.0.tar.bz2",
+ "checksum": "SHA-256:7d637d2d7a0c6bacc22065848a201db2fff124268e4a56868260d0f472b4bbb7",
+ "size": "17642623"
+ }
+ ]
+ },
+ {
+ "name": "CMSIS",
+ "version": "4.5.0",
+ "systems":
+ [
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/CMSIS-4.5.0.tar.bz2",
+ "archiveFileName": "CMSIS-4.5.0.tar.bz2",
+ "checksum": "SHA-256:cd8f7eae9fc7c8b4a1b5e40b89b9666d33953b47d3d2eb81844f5af729fa224d",
+ "size": "31525196"
+ },
+ {
+ "host": "x86_64-apple-darwin",
+ "url": "http://downloads.arduino.cc/CMSIS-4.5.0.tar.bz2",
+ "archiveFileName": "CMSIS-4.5.0.tar.bz2",
+ "checksum": "SHA-256:cd8f7eae9fc7c8b4a1b5e40b89b9666d33953b47d3d2eb81844f5af729fa224d",
+ "size": "31525196"
+ },
+ {
+ "host": "x86_64-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/CMSIS-4.5.0.tar.bz2",
+ "archiveFileName": "CMSIS-4.5.0.tar.bz2",
+ "checksum": "SHA-256:cd8f7eae9fc7c8b4a1b5e40b89b9666d33953b47d3d2eb81844f5af729fa224d",
+ "size": "31525196"
+ },
+ {
+ "host": "i686-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/CMSIS-4.5.0.tar.bz2",
+ "archiveFileName": "CMSIS-4.5.0.tar.bz2",
+ "checksum": "SHA-256:cd8f7eae9fc7c8b4a1b5e40b89b9666d33953b47d3d2eb81844f5af729fa224d",
+ "size": "31525196"
+ },
+ {
+ "host": "arm-linux-gnueabihf",
+ "url": "http://downloads.arduino.cc/CMSIS-4.5.0.tar.bz2",
+ "archiveFileName": "CMSIS-4.5.0.tar.bz2",
+ "checksum": "SHA-256:cd8f7eae9fc7c8b4a1b5e40b89b9666d33953b47d3d2eb81844f5af729fa224d",
+ "size": "31525196"
+ },
+ {
+ "host": "aarch64-linux-gnu",
+ "url": "http://downloads.arduino.cc/CMSIS-4.5.0.tar.bz2",
+ "archiveFileName": "CMSIS-4.5.0.tar.bz2",
+ "checksum": "SHA-256:cd8f7eae9fc7c8b4a1b5e40b89b9666d33953b47d3d2eb81844f5af729fa224d",
+ "size": "31525196"
+ },
+ {
+ "host": "all",
+ "url": "http://downloads.arduino.cc/CMSIS-4.5.0.tar.bz2",
+ "archiveFileName": "CMSIS-4.5.0.tar.bz2",
+ "checksum": "SHA-256:cd8f7eae9fc7c8b4a1b5e40b89b9666d33953b47d3d2eb81844f5af729fa224d",
+ "size": "31525196"
+ }
+ ]
+ },
+ {
+ "name": "CMSIS-Atmel",
+ "version": "1.0.0",
+ "systems":
+ [
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/CMSIS-Atmel-1.0.0.tar.bz2",
+ "archiveFileName": "CMSIS-Atmel-1.0.0.tar.bz2",
+ "checksum": "SHA-256:b3c954570a2f8d9821c372e0864f5f0b86cfbeab8114ce95821f5c49758c7256",
+ "size": "1281654"
+ },
+ {
+ "host": "x86_64-apple-darwin",
+ "url": "http://downloads.arduino.cc/CMSIS-Atmel-1.0.0.tar.bz2",
+ "archiveFileName": "CMSIS-Atmel-1.0.0.tar.bz2",
+ "checksum": "SHA-256:b3c954570a2f8d9821c372e0864f5f0b86cfbeab8114ce95821f5c49758c7256",
+ "size": "1281654"
+ },
+ {
+ "host": "x86_64-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/CMSIS-Atmel-1.0.0.tar.bz2",
+ "archiveFileName": "CMSIS-Atmel-1.0.0.tar.bz2",
+ "checksum": "SHA-256:b3c954570a2f8d9821c372e0864f5f0b86cfbeab8114ce95821f5c49758c7256",
+ "size": "1281654"
+ },
+ {
+ "host": "i686-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/CMSIS-Atmel-1.0.0.tar.bz2",
+ "archiveFileName": "CMSIS-Atmel-1.0.0.tar.bz2",
+ "checksum": "SHA-256:b3c954570a2f8d9821c372e0864f5f0b86cfbeab8114ce95821f5c49758c7256",
+ "size": "1281654"
+ },
+ {
+ "host": "arm-linux-gnueabihf",
+ "url": "http://downloads.arduino.cc/CMSIS-Atmel-1.0.0.tar.bz2",
+ "archiveFileName": "CMSIS-Atmel-1.0.0.tar.bz2",
+ "checksum": "SHA-256:b3c954570a2f8d9821c372e0864f5f0b86cfbeab8114ce95821f5c49758c7256",
+ "size": "1281654"
+ },
+ {
+ "host": "all",
+ "url": "http://downloads.arduino.cc/CMSIS-Atmel-1.0.0.tar.bz2",
+ "archiveFileName": "CMSIS-Atmel-1.0.0.tar.bz2",
+ "checksum": "SHA-256:b3c954570a2f8d9821c372e0864f5f0b86cfbeab8114ce95821f5c49758c7256",
+ "size": "1281654"
+ }
+ ]
+ },
+ {
+ "name": "CMSIS-Atmel",
+ "version": "1.1.0",
+ "systems":
+ [
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/CMSIS-Atmel-1.1.0.tar.bz2",
+ "archiveFileName": "CMSIS-Atmel-1.1.0.tar.bz2",
+ "checksum": "SHA-256:3ea5ec0451f42dc2b97f869b027a9cf696241cfc927cfc48d74ccc7b396ba41b",
+ "size": "1659108"
+ },
+ {
+ "host": "x86_64-apple-darwin",
+ "url": "http://downloads.arduino.cc/CMSIS-Atmel-1.1.0.tar.bz2",
+ "archiveFileName": "CMSIS-Atmel-1.1.0.tar.bz2",
+ "checksum": "SHA-256:3ea5ec0451f42dc2b97f869b027a9cf696241cfc927cfc48d74ccc7b396ba41b",
+ "size": "1659108"
+ },
+ {
+ "host": "x86_64-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/CMSIS-Atmel-1.1.0.tar.bz2",
+ "archiveFileName": "CMSIS-Atmel-1.1.0.tar.bz2",
+ "checksum": "SHA-256:3ea5ec0451f42dc2b97f869b027a9cf696241cfc927cfc48d74ccc7b396ba41b",
+ "size": "1659108"
+ },
+ {
+ "host": "i686-pc-linux-gnu",
+ "url": "http://downloads.arduino.cc/CMSIS-Atmel-1.1.0.tar.bz2",
+ "archiveFileName": "CMSIS-Atmel-1.1.0.tar.bz2",
+ "checksum": "SHA-256:3ea5ec0451f42dc2b97f869b027a9cf696241cfc927cfc48d74ccc7b396ba41b",
+ "size": "1659108"
+ },
+ {
+ "host": "arm-linux-gnueabihf",
+ "url": "http://downloads.arduino.cc/CMSIS-Atmel-1.1.0.tar.bz2",
+ "archiveFileName": "CMSIS-Atmel-1.1.0.tar.bz2",
+ "checksum": "SHA-256:3ea5ec0451f42dc2b97f869b027a9cf696241cfc927cfc48d74ccc7b396ba41b",
+ "size": "1659108"
+ },
+ {
+ "host": "all",
+ "url": "http://downloads.arduino.cc/CMSIS-Atmel-1.1.0.tar.bz2",
+ "archiveFileName": "CMSIS-Atmel-1.1.0.tar.bz2",
+ "checksum": "SHA-256:3ea5ec0451f42dc2b97f869b027a9cf696241cfc927cfc48d74ccc7b396ba41b",
+ "size": "1659108"
+ }
+ ]
+ },
+ {
+ "name": "CMSIS-Atmel",
+ "version": "1.2.0",
+ "systems":
+ [
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/CMSIS-Atmel-1.2.0.tar.bz2",
+ "archiveFileName": "CMSIS-Atmel-1.2.0.tar.bz2",
+ "checksum": "SHA-256:5e02670be7e36be9691d059bee0b04ee8b249404687531f33893922d116b19a5",
+ "size": "2221805"
+ },
+ {
+ "host": "x86_64-apple-darwin",
+ "url": "http://downloads.arduino.cc/CMSIS-Atmel-1.2.0.tar.bz2",
+ "archiveFileName": "CMSIS-Atmel-1.2.0.tar.bz2",
+ "checksum": "SHA-256:5e02670be7e36be9691d059bee0b04ee8b249404687531f33893922d116b19a5",
+ "size": "2221805"
+ },
+ {
+ "host": "x86_64-pc-linux-gnu",
+ "url": "https://downloads.arduino.cc/CMSIS-Atmel-1.2.0.tar.bz2",
+ "archiveFileName": "CMSIS-Atmel-1.2.0.tar.bz2",
+ "checksum": "SHA-256:5e02670be7e36be9691d059bee0b04ee8b249404687531f33893922d116b19a5",
+ "size": "2221805"
+ },
+ {
+ "host": "i686-pc-linux-gnu",
+ "url": "https://downloads.arduino.cc/CMSIS-Atmel-1.2.0.tar.bz2",
+ "archiveFileName": "CMSIS-Atmel-1.2.0.tar.bz2",
+ "checksum": "SHA-256:5e02670be7e36be9691d059bee0b04ee8b249404687531f33893922d116b19a5",
+ "size": "2221805"
+ },
+ {
+ "host": "arm-linux-gnueabihf",
+ "url": "https://downloads.arduino.cc/CMSIS-Atmel-1.2.0.tar.bz2",
+ "archiveFileName": "CMSIS-Atmel-1.2.0.tar.bz2",
+ "checksum": "SHA-256:5e02670be7e36be9691d059bee0b04ee8b249404687531f33893922d116b19a5",
+ "size": "2221805"
+ },
+ {
+ "host": "aarch64-linux-gnu",
+ "url": "https://downloads.arduino.cc/CMSIS-Atmel-1.2.0.tar.bz2",
+ "archiveFileName": "CMSIS-Atmel-1.2.0.tar.bz2",
+ "checksum": "SHA-256:5e02670be7e36be9691d059bee0b04ee8b249404687531f33893922d116b19a5",
+ "size": "2221805"
+ },
+ {
+ "host": "all",
+ "url": "https://downloads.arduino.cc/CMSIS-Atmel-1.2.0.tar.bz2",
+ "archiveFileName": "CMSIS-Atmel-1.2.0.tar.bz2",
+ "checksum": "SHA-256:5e02670be7e36be9691d059bee0b04ee8b249404687531f33893922d116b19a5",
+ "size": "2221805"
+ }
+ ]
+ },
+ {
+ "name": "dfu-util",
+ "version": "0.9.0-arduino1",
+ "systems": [
+ {
+ "host": "i386-apple-darwin11",
+ "url": "http://downloads.arduino.cc/tools/dfu-util-0.9.0-arduino1-osx.tar.bz2",
+ "archiveFileName": "dfu-util-0.9.0-arduino1-osx.tar.bz2",
+ "size": "68361",
+ "checksum": "SHA-256:ea9216c627b7aa2d3a9bffab97df937e3c580cce66753c428dc697c854a35271"
+ },
+ {
+ "host": "arm-linux-gnueabihf",
+ "url": "http://downloads.arduino.cc/tools/dfu-util-0.9.0-arduino1-arm.tar.bz2",
+ "archiveFileName": "dfu-util-0.9.0-arduino1-arm.tar.bz2",
+ "size": "194826",
+ "checksum": "SHA-256:480637bf578e74b19753666a049f267d8ebcd9dfc8660d48f246bb76d5b806f9"
+ },
+ {
+ "host": "x86_64-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/dfu-util-0.9.0-arduino1-linux64.tar.bz2",
+ "archiveFileName": "dfu-util-0.9.0-arduino1-linux64.tar.bz2",
+ "size": "66230",
+ "checksum": "SHA-256:e8a4d5477ab8c44d8528f35bc7dfafa5f3f04dace513906514aea31adc6fd3ba"
+ },
+ {
+ "host": "i686-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/dfu-util-0.9.0-arduino1-linux32.tar.bz2",
+ "archiveFileName": "dfu-util-0.9.0-arduino1-linux32.tar.bz2",
+ "size": "62608",
+ "checksum": "SHA-256:17d69213914da04dadd6464d8adbcd3581dd930eb666b8f3336ab5383ce2127f"
+ },
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/tools/dfu-util-0.9.0-arduino1-windows.tar.bz2",
+ "archiveFileName": "dfu-util-0.9.0-arduino1-windows.tar.bz2",
+ "size": "377537",
+ "checksum": "SHA-256:29be01b298348be8b822391be7147b71a969d47bd5457d5b24cfa5981dbce78e"
+ }
+ ]
+ },
+ {
+ "name": "windows-drivers",
+ "version": "1.6.9",
+ "systems": [
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/tools/drivers-arduino-windows-1.6.9.zip",
+ "archiveFileName": "drivers-arduino-windows-1.6.9.zip",
+ "checksum": "SHA-256:10d456ab18d164d42545255db8bef4ac9e1bf660cc89acb7a0980b5a486654ac",
+ "size": "7071714"
+ }
+ ]
+ },
+ {
+ "name": "windows-drivers",
+ "version": "1.8.0",
+ "systems": [
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/tools/drivers-arduino-windows-1.8.0.zip",
+ "archiveFileName": "drivers-arduino-windows-1.8.0.zip",
+ "checksum": "SHA-256:60614b326ad6860ed0cb99eb4cb2cb69f9ba6ba3784396d5441fe3f99004f8ec",
+ "size": "16302148"
+ }
+ ]
+ },
+ {
+ "name": "dfu-util",
+ "version": "0.8.0-stm32-arduino1",
+ "systems": [
+ {
+ "archiveFileName": "dfu-util-0.8.0-stm32-arduino1-darwin_amd64.tar.bz2",
+ "checksum": "SHA-256:bb146803a4152ce2647d72b2cde68ff95eb3017c2460f24c4db922adac1fbd12",
+ "host": "i386-apple-darwin11",
+ "size": "68381",
+ "url": "http://downloads.arduino.cc/arduino.org/dfu-util-0.8.0-stm32-arduino1-darwin_amd64.tar.bz2"
+ },
+ {
+ "archiveFileName": "dfu-util-0.8.0-stm32-arduino1-linux_arm.tar.bz2",
+ "checksum": "SHA-256:607e6b0f2d2787ed7837f26da30b100131e3db207f84b8aca94a377db6e9ae50",
+ "host": "arm-linux-gnueabihf",
+ "size": "213760",
+ "url": "http://downloads.arduino.cc/arduino.org/dfu-util-0.8.0-stm32-arduino1-linux_arm.tar.bz2"
+ },
+ {
+ "archiveFileName": "dfu-util-0.8.0-stm32-arduino1-stm32-linux_amd64.tar.bz2",
+ "checksum": "SHA-256:e44287494ebd22f59fc79766a94e20306e59c6c799f5bb1cddeed80db95000d9",
+ "host": "x86_64-linux-gnu",
+ "size": "68575",
+ "url": "http://downloads.arduino.cc/arduino.org/dfu-util-0.8.0-stm32-arduino1-linux_amd64.tar.bz2"
+ },
+ {
+ "archiveFileName": "dfu-util-0.8.0-stm32-arduino1-linux_386.tar.bz2",
+ "checksum": "SHA-256:58131e35ad5d7053b281bc6176face7b117c5ad63331e43c6801f8ccd57f59a4",
+ "host": "i686-linux-gnu",
+ "size": "69097",
+ "url": "http://downloads.arduino.cc/arduino.org/dfu-util-0.8.0-stm32-arduino1-linux_386.tar.bz2"
+ },
+ {
+ "archiveFileName": "dfu-util-0.8.0-stm32-arduino1-windows_386.tar.bz2",
+ "checksum": "SHA-256:25c2f84e1acf1f10fd2aa1afced441366d4545fd41eae56e64f0b990b4ce9f55",
+ "host": "i686-mingw32",
+ "size": "159753",
+ "url": "http://downloads.arduino.cc/arduino.org/dfu-util-0.8.0-stm32-arduino1-windows_386.tar.bz2"
+ }
+ ]
+ },
+ {
+ "name": "arduinoSTM32load",
+ "version": "2.0.0",
+ "systems": [
+ {
+ "archiveFileName": "arduinoSTM32load-2.0.0-darwin_amd64.tar.bz2",
+ "checksum": "SHA-256:92fb9714091850febaa9d159501cbca5ba68d03020e5e2d4eff596154040bfaa",
+ "host": "i386-apple-darwin11",
+ "size": "807514",
+ "url": "http://downloads.arduino.cc/arduino.org/arduinoSTM32load-2.0.0-darwin_amd64.tar.bz2"
+ },
+ {
+ "archiveFileName": "arduinoSTM32load-2.0.0-linux_arm.tar.bz2",
+ "checksum": "SHA-256:fc0d8058b57bda849e1ffc849f83f54b0b85f97954176db317da1c745c174e08",
+ "host": "arm-linux-gnueabihf",
+ "size": "809480",
+ "url": "http://downloads.arduino.cc/arduino.org/arduinoSTM32load-2.0.0-linux_arm.tar.bz2"
+ },
+ {
+ "archiveFileName": "arduinoSTM32load-2.0.0-linux_amd64.tar.bz2",
+ "checksum": "SHA-256:0ed5cf1ea05fe6c33567817c54daf9c296d058a3607c428e0b0bd9aad89b9809",
+ "host": "x86_64-linux-gnu",
+ "size": "818885",
+ "url": "http://downloads.arduino.cc/arduino.org/arduinoSTM32load-2.0.0-linux_amd64.tar.bz2"
+ },
+ {
+ "archiveFileName": "arduinoSTM32load-2.0.0-linux_386.tar.bz2",
+ "checksum": "SHA-256:fad50abaaca034e6d647d09b042291b761982aabfd42b6156411c86e4f873ca7",
+ "host": "i686-linux-gnu",
+ "size": "814283",
+ "url": "http://downloads.arduino.cc/arduino.org/arduinoSTM32load-2.0.0-linux_386.tar.bz2"
+ },
+ {
+ "archiveFileName": "arduinoSTM32load-2.0.0-windows_386.tar.bz2",
+ "checksum": "SHA-256:79467c0cde4b88c4884acb09445a2186af4e41f901eee56e99b5d89b7065d085",
+ "host": "i686-mingw32",
+ "size": "786335",
+ "url": "http://downloads.arduino.cc/arduino.org/arduinoSTM32load-2.0.0-windows_386.tar.bz2"
+ }
+ ]
+ },
+ {
+ "name": "openocd",
+ "version": "0.10.0-arduino1-static",
+ "systems": [
+ {
+ "host": "i386-apple-darwin11",
+ "url": "http://downloads.arduino.cc/arduino.org/OpenOCD-0.10.0-nrf52-osx-static.tar.gz",
+ "archiveFileName": "OpenOCD-0.10.0-nrf52-osx-static.tar.gz",
+ "size": "1529841",
+ "checksum": "SHA-256:46bd02c1d42c5d94c4936e4d4a0ff29697b621840be9a6f882e316203122049d"
+ },
+ {
+ "host": "x86_64-linux-gnu",
+ "url": "http://downloads.arduino.cc/arduino.org/OpenOCD-0.10.0-nrf52-linux64-static.tar.gz",
+ "archiveFileName": "OpenOCD-0.10.0-nrf52-linux64-static.tar.gz",
+ "size": "1777984",
+ "checksum": "SHA-256:1c9ae77930dd7377d8c13f84abe7307b67fdcd6da74cc1ce269a79e138e7a00a"
+ },
+ {
+ "host": "i686-linux-gnu",
+ "url": "http://downloads.arduino.cc/arduino.org/OpenOCD-0.10.0-nrf52-linux32-static.tar.gz",
+ "archiveFileName": "OpenOCD-0.10.0-nrf52-linux32-static.tar.gz",
+ "size": "1713236",
+ "checksum": "SHA-256:777371df34828810e1bea623b0f7c98f28fedf30fd3bc8e7d8f0a5745fb4e258"
+ },
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/arduino.org/OpenOCD-0.10.0-nrf52-win32-static.zip",
+ "archiveFileName": "OpenOCD-0.10.0-nrf52-win32-static.zip",
+ "size": "1773642",
+ "checksum": "SHA-256:9371b25d000bd589c058a5bf10720617adb91fd8b8a21d2e887cf45eaa2df93c"
+ },
+ {
+ "host": "arm-linux-gnueabihf",
+ "url": "http://downloads.arduino.cc/arduino.org/OpenOCD-0.10.0-nrf52-arm-static.tar.gz",
+ "archiveFileName": "OpenOCD-0.10.0-nrf52-arm-static.tar.gz",
+ "size": "1526863",
+ "checksum": "SHA-256:b5172422077f87ff05b76ff40034979678c9c640e9d08cee15ce55e40dd8c929"
+ }
+ ]
+ },
+ {
+ "name": "nrf5x-cl-tools",
+ "version": "9.3.1",
+ "systems": [
+ {
+ "host": "i386-apple-darwin11",
+ "url": "http://downloads.arduino.cc/arduino.org/nRF5x-Command-Line-Tools_9_3_1_OSX.tar.bz2",
+ "archiveFileName": "nRF5x-Command-Line-Tools_9_3_1_OSX.tar.bz2",
+ "size": "341674",
+ "checksum": "SHA-256:41e4580271b39459a7ef1b078d11ee08d8f4f23fab7ff03f3fe8c3bc986a0ed4"
+ },
+ {
+ "host": "x86_64-linux-gnu",
+ "url": "http://downloads.arduino.cc/arduino.org/nRF5x-Command-Line-Tools_9_3_1_Linux-x86_64.tar.bz2",
+ "archiveFileName": "nRF5x-Command-Line-Tools_9_3_1_Linux-x86_64.tar.bz2",
+ "size": "167414",
+ "checksum": "SHA-256:4074fffe678d60968006a72edd182c6506b264472c9957bc3eaa39336bfcf972"
+ },
+ {
+ "host": "i686-linux-gnu",
+ "url": "http://downloads.arduino.cc/arduino.org/nRF5x-Command-Line-Tools_9_3_1_Linux-i386.tar.bz2",
+ "archiveFileName": "nRF5x-Command-Line-Tools_9_3_1_Linux-i386.tar.bz2",
+ "size": "155680",
+ "checksum": "SHA-256:e880059b303e5aad3a8b34c83dfd8c22beee77ae2074fbd37511e3baa91464a5"
+ },
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/arduino.org/nRF5x-Command-Line-Tools_9_3_1_Win32.tar.bz2",
+ "archiveFileName": "nRF5x-Command-Line-Tools_9_3_1_Win32.tar.bz2",
+ "size": "812257",
+ "checksum": "SHA-256:a4467350e39314690cec2e96b80e7e3cab463c84eff9b81593ad57754d76ee00"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "email": "support@intel.com",
+ "maintainer": "Intel",
+ "websiteURL": "http://maker.intel.com/",
+ "name": "Intel",
+ "platforms": [
+ {
+ "name": "Intel i586 Boards",
+ "version": "1.6.2+1.0",
+ "category": "Arduino Certified",
+ "architecture": "i586",
+ "url": "https://github.com/01org/corelibs-galileo/archive/1.6.2+1.0.tar.gz",
+ "archiveFileName": "corelibs-galileo-1.6.2.tar.gz",
+ "checksum": "SHA-256:e20d62b0dccf0d68dbb61d70b866d77134b770b226d6046a61c7e8d55e64e53a",
+ "size": "272961",
+ "boards": [
+ {
+ "name": "Galileo"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "Intel",
+ "name": "i586-poky-linux-uclibc",
+ "version": "1.6.2+1.0"
+ },
+ {
+ "packager": "Intel",
+ "name": "sketchUploader",
+ "version": "1.6.2+1.0"
+ }
+ ]
+ },
+ {
+ "name": "Intel i686 Boards",
+ "version": "1.6.2+1.0",
+ "category": "Arduino Certified",
+ "architecture": "i686",
+ "url": "https://github.com/01org/corelibs-edison/archive/1.6.2+1.0.tar.gz",
+ "archiveFileName": "corelibs-edison-1.6.2.tar.gz",
+ "checksum": "SHA-256:538ab8553f832f56b04df80d44992ecc994b9c296f3fce6902832d97f99811a8",
+ "size": "271420",
+ "boards": [
+ {
+ "name": "Edison"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "Intel",
+ "name": "core2-32-poky-linux",
+ "version": "1.6.2+1.0"
+ },
+ {
+ "packager": "Intel",
+ "name": "sketchUploader",
+ "version": "1.6.2+1.0"
+ }
+ ]
+ },
+ {
+ "name": "Intel i586 Boards",
+ "version": "1.6.7+1.0",
+ "category": "Arduino Certified",
+ "architecture": "i586",
+ "url": "https://github.com/01org/corelibs-galileo/archive/1.6.7+1.0.tar.gz",
+ "archiveFileName": "corelibs-galileo-1.6.7.tar.gz",
+ "checksum": "SHA-256:4d161dde4c95ef8ebc264ca444ee6d1baaee70f9fd416e5f234f3f5cdd4ee028",
+ "size": "273392",
+ "boards": [
+ {
+ "name": "Galileo"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "Intel",
+ "name": "i586-poky-linux-uclibc",
+ "version": "1.6.2+1.0"
+ },
+ {
+ "packager": "Intel",
+ "name": "sketchUploader",
+ "version": "1.6.2+1.0"
+ }
+ ]
+ },
+ {
+ "name": "Intel i686 Boards",
+ "version": "1.6.7+1.0",
+ "category": "Arduino Certified",
+ "architecture": "i686",
+ "url": "https://github.com/01org/corelibs-edison/archive/1.6.7+1.0.tar.gz",
+ "archiveFileName": "corelibs-edison-1.6.7.tar.gz",
+ "checksum": "SHA-256:2b630b4986c4c7543946eef339ab8d08ed23d28aa6428d27f5b464ad2331a3f1",
+ "size": "272002",
+ "boards": [
+ {
+ "name": "Edison"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "Intel",
+ "name": "core2-32-poky-linux",
+ "version": "1.6.2+1.0"
+ },
+ {
+ "packager": "Intel",
+ "name": "sketchUploader",
+ "version": "1.6.2+1.0"
+ }
+ ]
+ },
+ {
+ "name": "Intel Curie Boards",
+ "version": "1.0.4",
+ "category": "Arduino Certified",
+ "architecture": "arc32",
+ "url": "https://github.com/arduino/ArduinoCore-arc32/archive/1.6.4+1.59.zip",
+ "archiveFileName": "arduino101-1.0.4.zip",
+ "checksum": "SHA-256:f440078eaf664d171180dda4264e8f2cf7b840e251c48f80e1ea9cfca6074fb1",
+ "size": "806100",
+ "boards": [
+ {
+ "name": "Arduino 101"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "Intel",
+ "version": "1.6.4+1.0",
+ "name": "arc-elf32"
+ },
+ {
+ "packager": "Intel",
+ "version": "1.6.4+1.14",
+ "name": "sketchUploader"
+ }
+ ]
+ },
+ {
+ "name": "Intel Curie Boards",
+ "version": "1.0.5",
+ "category": "Arduino Certified",
+ "architecture": "arc32",
+ "url": "https://github.com/arduino/ArduinoCore-arc32/archive/1.6.4+1.68.zip",
+ "archiveFileName": "arduino101-1.0.5.zip",
+ "checksum": "SHA-256:8c26b92491b12a9498453ed90a1178f47e9a7229fbfe129737f02ed24f55b542",
+ "size": "776592",
+ "boards": [
+ {
+ "name": "Arduino/Genuino 101"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "Intel",
+ "version": "1.6.4+1.0",
+ "name": "arc-elf32"
+ },
+ {
+ "packager": "Intel",
+ "version": "1.6.4+1.18",
+ "name": "arduino101load"
+ }
+ ]
+ },
+ {
+ "name": "Intel Curie Boards",
+ "version": "1.0.6",
+ "category": "Arduino Certified",
+ "archiveFileName": "arduino101-1.0.6.zip",
+ "architecture": "arc32",
+ "url": "https://github.com/arduino/ArduinoCore-arc32/archive/1.6.9+1.74.zip",
+ "checksum": "SHA-256:166c5d0f3c60c77495a08fe7d2debffb52195139854b3a42cfb6ae23b2fa2023",
+ "size": "672314",
+ "boards": [
+ {
+ "name": "Arduino/Genuino 101"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "Intel",
+ "version": "1.6.9+1.0.1",
+ "name": "arc-elf32"
+ },
+ {
+ "packager": "Intel",
+ "version": "1.6.9+1.24",
+ "name": "arduino101load"
+ }
+ ]
+ },
+ {
+ "name": "Intel Curie Boards",
+ "version": "1.0.7",
+ "category": "Arduino Certified",
+ "archiveFileName": "arduino101-1.0.7.zip",
+ "architecture": "arc32",
+ "url": "https://github.com/arduino/ArduinoCore-arc32/archive/1.6.11+1.83.zip",
+ "checksum": "SHA-256:e0fcc5175f00c521b5ed1fd3c78198bdb5fc43b8306247c62c83183572e1af8e",
+ "size": "729297",
+ "boards": [
+ {
+ "name": "Arduino/Genuino 101"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "Intel",
+ "version": "1.6.9+1.0.1",
+ "name": "arc-elf32"
+ },
+ {
+ "packager": "Intel",
+ "version": "1.6.9+1.28",
+ "name": "arduino101load"
+ },
+ {
+ "packager": "Intel",
+ "version": "0.9.0+0.1",
+ "name": "openocd"
+ },
+ {
+ "packager": "Intel",
+ "version": "1.0.0",
+ "name": "flashpack"
+ }
+ ]
+ },
+ {
+ "name": "Intel Curie Boards",
+ "version": "2.0.2",
+ "category": "Arduino Certified",
+ "archiveFileName": "corelibs-arduino101-2.0.2.zip",
+ "architecture": "arc32",
+ "url": "https://github.com/arduino/ArduinoCore-arc32/archive/2.0.2.zip",
+ "checksum": "SHA-256:2cea02dee6959f784c6bb6bdb2dc0eafa4c4d0ce5539da9cfa64dd6ce50939fe",
+ "size": "6242776",
+ "boards": [
+ {
+ "name": "Arduino/Genuino 101"
+ }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "Intel",
+ "version": "2.0.1",
+ "name": "arduino101load"
+ },
+ {
+ "packager": "arduino",
+ "version": "0.9.0-arduino1",
+ "name": "dfu-util"
+ },
+ {
+ "packager": "Intel",
+ "version": "1.6.9+1.0.1",
+ "name": "arc-elf32"
+ },
+ {
+ "packager": "Intel",
+ "version": "0.9.0+0.1",
+ "name": "openocd"
+ },
+ {
+ "packager": "Intel",
+ "version": "2.0.0",
+ "name": "flashpack"
+ }
+ ]
+ }
+ ],
+ "tools": [
+ {
+ "name": "i586-poky-linux-uclibc",
+ "version": "1.6.2+1.0",
+ "systems": [
+ {
+ "size": "30587705",
+ "checksum": "SHA-256:5b705d26dc1d8ca8953df6e0c08dcc8584d5be77b584d561f631360fd166677c",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "galileo-toolchain-20150323-osx.tar.bz2",
+ "url": "http://downloadmirror.intel.com/24806/eng/galileo-toolchain-osx-1.6.2-1.0.tar.bz2"
+ },
+ {
+ "size": "45948648",
+ "checksum": "SHA-256:821eb290d7c668c1caa74da30903c13843edc746d41508b35161622ae6279b56",
+ "host": "i686-mingw32",
+ "archiveFileName": "galileo-toolchain-20150323-windows.zip",
+ "url": "http://downloadmirror.intel.com/24806/eng/galileo-toolchain-windows-1.6.2-1.0.zip"
+ },
+ {
+ "size": "56227185",
+ "checksum": "SHA-256:935ccad3eaaec34f5de76eceb0f0ecd1372bdab0b7dc8f4241e8260c6f827b72",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "galileo-toolchain-20150316-linux64.tar.bz2",
+ "url": "http://downloadmirror.intel.com/24806/eng/galileo-toolchain-linux64-1.6.2-1.0.tar.bz2"
+ },
+ {
+ "size": "55098166",
+ "checksum": "SHA-256:1dab7f21e10d0208a6dd2897c36c6f5f55f9372b947225d2b59c3c4ab4777d03",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "galileo-toolchain-20150316-linux32.tar.bz2",
+ "url": "http://downloadmirror.intel.com/24806/eng/galileo-toolchain-linux32-1.6.2-1.0.tar.bz2"
+ }
+ ]
+ },
+ {
+ "name": "core2-32-poky-linux",
+ "version": "1.6.2+1.0",
+ "systems": [
+ {
+ "size": "42720934",
+ "checksum": "SHA-256:fac0b3f00a33ee0531ea0da6d517c170409e25bd5e59f6f3db9506974336375d",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "edison-toolchain-20150323-osx.tar.bz2",
+ "url": "http://downloadmirror.intel.com/24806/eng/edison-toolchain-osx-1.6.2-1.0.tar.bz2"
+ },
+ {
+ "size": "56683094",
+ "checksum": "SHA-256:5a9a1b51f0fa18bf21e1dcf1332d34331dd435c5ca0d1fe008e68e13cb3255e5",
+ "host": "i686-mingw32",
+ "archiveFileName": "edison-toolchain-20150323-windows.zip",
+ "url": "http://downloadmirror.intel.com/24806/eng/edison-toolchain-windows-1.6.2-1.0.zip"
+ },
+ {
+ "size": "78998436",
+ "checksum": "SHA-256:e3443e7832732f2189fd424e4868d2ebb563e823addb2321a6e8a86a9fced193",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "edison-toolchain-20150316-linux64.tar.bz2",
+ "url": "http://downloadmirror.intel.com/24806/eng/edison-toolchain-linux64-1.6.2-1.0.tar.bz2"
+ },
+ {
+ "size": "76488215",
+ "checksum": "SHA-256:014d1bdc40bb080987c736d04ffd42cdc0d2c3cad001891fb01555dac04296f7",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "edison-toolchain-20150316-linux32.tar.bz2",
+ "url": "http://downloadmirror.intel.com/24806/eng/edison-toolchain-linux32-1.6.2-1.0.tar.bz2"
+ }
+ ]
+ },
+ {
+ "name": "arc-elf32",
+ "version": "1.6.4+1.0",
+ "systems": [
+ {
+ "url": "https://downloadmirror.intel.com/25470/eng/arc-toolchain-windows-arcem.zip",
+ "checksum": "SHA-256:d7a3700e8762c656da66b1f85967ef1992c2473cee5bfc06b23aacfef7867435",
+ "host": "i686-mingw32",
+ "archiveFileName": "arc-toolchain-windows-arcem.zip",
+ "size": "218816528"
+ },
+ {
+ "url": "https://downloadmirror.intel.com/25470/eng/arc-toolchain-osx-arcem.tar.bz2",
+ "checksum": "SHA-256:3818fbc4cfbb8bc826ae33f6869413463849a889e53b4e27da365f2489699ec1",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "arc-toolchain-osx-arcem.tar.bz2",
+ "size": "78371400"
+ },
+ {
+ "url": "https://downloadmirror.intel.com/25470/eng/arc-toolchain-linux32-arcem.tar.bz2",
+ "checksum": "SHA-256:cb49a8b6a2d55712a0750813d56f8271d712252907fcd82b0cf690321be0d663",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "arc-toolchain-linux32-arcem.tar.bz2",
+ "size": "182899676"
+ },
+ {
+ "url": "https://downloadmirror.intel.com/25470/eng/arc-toolchain-linux64-arcem.tar.bz2",
+ "checksum": "SHA-256:0b538ae361b02a6329e00a009962c3bad91c48b07c78bb6343ffc6c19475e1a8",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "arc-toolchain-linux64-arcem.tar.bz2",
+ "size": "177840353"
+ }
+ ]
+ },
+ {
+ "name": "arc-elf32",
+ "version": "1.6.9+1.0.1",
+ "systems": [
+ {
+ "url": "https://downloadmirror.intel.com/25470/eng/arc-toolchain-windows-arcem-1.0.1.zip",
+ "checksum": "SHA-256:59614534473a55b1d964e1f4fa14f5e01e6aaa426d1e8319a8293ffa6576f58e",
+ "host": "i686-mingw32",
+ "archiveFileName": "arc-toolchain-windows-arcem-1.0.1.zip",
+ "size": "172455793"
+ },
+ {
+ "url": "https://downloadmirror.intel.com/25470/eng/arc-toolchain-osx-arcem-1.0.1.tar.bz2",
+ "checksum": "SHA-256:456995157cf1549073b9330a114abb25f056352278c107e25b3cc79443040a44",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "arc-toolchain-osx-arcem-1.0.1.tar.bz2",
+ "size": "30014953"
+ },
+ {
+ "url": "https://downloadmirror.intel.com/25470/eng/arc-toolchain-linux32-arcem-1.0.1.tar.bz2",
+ "checksum": "SHA-256:6aa92f239998d10992592cc16a16352bb1cce75a6bff1c72b8be18d939182915",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "arc-toolchain-linux32-arcem-1.0.1.tar.bz2",
+ "size": "116481978"
+ },
+ {
+ "url": "https://downloadmirror.intel.com/25470/eng/arc-toolchain-linux64-arcem-1.0.1.tar.bz2",
+ "checksum": "SHA-256:0ee03cecc27471eae58cb557a105c5edaea45b49557c86867fa436a13681392c",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "arc-toolchain-linux64-arcem-1.0.1.tar.bz2",
+ "size": "129250679"
+ }
+ ]
+ },
+ {
+ "name": "sketchUploader",
+ "version": "1.6.2+1.0",
+ "systems": [
+ {
+ "size": "61789",
+ "checksum": "SHA-256:8395ccb57c627f997fe01170df4613de906f48c6ce99623b9ca42806079c28ad",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "intel-arduino-tools-20150316-osx.tar.gz",
+ "url": "https://github.com/01org/intel-arduino-tools/archive/1.6.2+1.0-osx.tar.gz"
+ },
+ {
+ "size": "2534586",
+ "checksum": "SHA-256:c32d1ae4cde190242eef95122d20dbcb5da226e7de6c567079a1c8c292267ae5",
+ "host": "i686-mingw32",
+ "archiveFileName": "intel-arduino-tools-20150316-windows.zip",
+ "url": "https://github.com/01org/intel-arduino-tools/archive/1.6.2+1.0-windows.zip"
+ },
+ {
+ "size": "178239",
+ "checksum": "SHA-256:2876db4153db22609d2f6c9c3bfb198efbb9d9574edad579aca7d58cff9d2cca",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "intel-arduino-tools-20150316-linux64.tar.gz",
+ "url": "https://github.com/01org/intel-arduino-tools/archive/1.6.2+1.0-linux64.tar.gz"
+ },
+ {
+ "size": "187995",
+ "checksum": "SHA-256:20d87602d0194be626f592d3f2bdc9566a5a897786b042393482ef4c26ae158c",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "intel-arduino-tools-20150316-linux32.tar.gz",
+ "url": "https://github.com/01org/intel-arduino-tools/archive/1.6.2+1.0-linux32.tar.gz"
+ }
+ ]
+ },
+ {
+ "version": "1.6.4+1.14",
+ "name": "sketchUploader",
+ "systems": [
+ {
+ "url": "https://github.com/01org/intel-arduino-tools/archive/1.6.4+1.14-windows.zip",
+ "checksum": "SHA-256:694cc2e85bca897f2c5bf867d16f11ae4e93134e329a08079a41a83fa9fd7beb",
+ "host": "i686-mingw32",
+ "archiveFileName": "1.6.4+1.14-windows.zip",
+ "size": "8239124"
+ },
+ {
+ "url": "https://github.com/01org/intel-arduino-tools/archive/1.6.4+1.14-osx.tar.gz",
+ "checksum": "SHA-256:cd9c81e72667f42fca2047b12da330917607d2f2412772758d3b25565f61cb61",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "1.6.4+1.14-osx.tar.gz",
+ "size": "324645"
+ },
+ {
+ "url": "https://github.com/01org/intel-arduino-tools/archive/1.6.4+1.14-linux32.tar.gz",
+ "checksum": "SHA-256:8bd55d901ef26f89a2e0e67551eb966de2b3bd90504d8204bd200510b85e1a9b",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "1.6.4+1.14-linux32.tar.gz",
+ "size": "224137"
+ },
+ {
+ "url": "https://github.com/01org/intel-arduino-tools/archive/1.6.4+1.14-linux64.tar.gz",
+ "checksum": "SHA-256:2da8ac969f6e1731adfe0802d015891109b834fdedfa5ca199742d5439b4f038",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "1.6.4+1.14-linux64.tar.gz",
+ "size": "216542"
+ }
+ ]
+ },
+ {
+ "version": "1.6.4+1.18",
+ "name": "arduino101load",
+ "systems": [
+ {
+ "url": "https://github.com/01org/intel-arduino-tools/archive/1.6.4+1.19-windows.zip",
+ "checksum": "SHA-256:66c184ed5b045ba262ee86e17eb00280ebb6407b73b21979186ccc2270624bf7",
+ "host": "i686-mingw32",
+ "archiveFileName": "1.6.4+1.19-windows.zip",
+ "size": "9367650"
+ },
+ {
+ "url": "https://github.com/01org/intel-arduino-tools/archive/1.6.4+1.18-osx.tar.gz",
+ "checksum": "SHA-256:c698ced53fe27030dfae9d198cc550357bf130ede204d6ccac5eaac8c15e159a",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "1.6.4+1.18-osx.tar.gz",
+ "size": "1236355"
+ },
+ {
+ "url": "https://github.com/01org/intel-arduino-tools/archive/1.6.4+1.18-linux32.tar.gz",
+ "checksum": "SHA-256:96a9a3c9306b2ddcdecf96db64d7640c76422626a12488ef90ecd1ecdf451f3c",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "1.6.4+1.18-linux32.tar.gz",
+ "size": "1269508"
+ },
+ {
+ "url": "https://github.com/01org/intel-arduino-tools/archive/1.6.4+1.18-linux64.tar.gz",
+ "checksum": "SHA-256:0e7a9c95a4b8ed9dbd9b3e2dcd00cb22c0f41a0090efe7f144cdf6fc94eb2a19",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "1.6.4+1.18-linux64.tar.gz",
+ "size": "1467166"
+ }
+ ]
+ },
+ {
+ "version": "1.6.9+1.24",
+ "name": "arduino101load",
+ "systems": [
+ {
+ "url": "https://github.com/01org/intel-arduino-tools/archive/1.6.9+1.24-windows.zip",
+ "checksum": "SHA-256:b531d93a8aa5dcb3e9338c553060f71835e6530639106e6976ca8d9dea0039b0",
+ "host": "i686-mingw32",
+ "archiveFileName": "arduino101load-1.6.9+1.24-windows.zip",
+ "size": "9418690"
+ },
+ {
+ "url": "https://github.com/01org/intel-arduino-tools/archive/1.6.9+1.24-osx.tar.gz",
+ "checksum": "SHA-256:b927e7996f48d861d803b911317062b345fa631c4fbe2f85f2a0dcb651508e92",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "arduino101load-1.6.9+1.24-osx.tar.gz",
+ "size": "1241701"
+ },
+ {
+ "url": "https://github.com/01org/intel-arduino-tools/archive/1.6.9+1.24-linux32.tar.gz",
+ "checksum": "SHA-256:db418197f6b4fbdc104b682fc4137cd955681af1d12147a663054e7f352a3785",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "arduino101load-1.6.9+1.24-linux32.tar.gz",
+ "size": "1305107"
+ },
+ {
+ "url": "https://github.com/01org/intel-arduino-tools/archive/1.6.9+1.24-linux64.tar.gz",
+ "checksum": "SHA-256:b7723d8edac0ed957304dc2e275a5628f6602962b93863f32cc1986e1c2ee6ab",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "arduino101-1.6.9+1.24-linux64.tar.gz",
+ "size": "1507392"
+ }
+ ]
+ },
+ {
+ "version": "1.6.9+1.28",
+ "name": "arduino101load",
+ "systems": [
+ {
+ "url": "https://github.com/01org/intel-arduino-tools/archive/1.6.9+1.28-windows.zip",
+ "checksum": "SHA-256:0847ba5466ad97c4624c7b76c84aea036f24f1189828274310f52b86e6c3de1e",
+ "host": "i686-mingw32",
+ "archiveFileName": "arduino101load-1.6.9+1.28-windows.zip",
+ "size": "9331111"
+ },
+ {
+ "url": "https://github.com/01org/intel-arduino-tools/archive/1.6.9+1.28-osx.tar.gz",
+ "checksum": "SHA-256:011fcb145728842aa6dc7a2332a12b7cd0d0e4f9eae7e9c35400d3727eb33db9",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "arduino101load-1.6.9+1.28-osx.tar.gz",
+ "size": "1134614"
+ },
+ {
+ "url": "https://github.com/01org/intel-arduino-tools/archive/1.6.9+1.28-linux32.tar.gz",
+ "checksum": "SHA-256:36681f3dbe4edc04c0f6c3888ab7787c8015b0fce2d9cffccd17d7ec5fb1204d",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "arduino101load-1.6.9+1.28-linux32.tar.gz",
+ "size": "1202482"
+ },
+ {
+ "url": "https://github.com/01org/intel-arduino-tools/archive/1.6.9+1.28-linux64.tar.gz",
+ "checksum": "SHA-256:c691e2f077771b18a83b61c1cda96608150c80707adb7bac6b9665232bc74d4b",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "arduino101load-1.6.9+1.28-linux64.tar.gz",
+ "size": "1411007"
+ }
+ ]
+ },
+ {
+ "name": "arduino101load",
+ "version": "2.0.0",
+ "systems": [
+ {
+ "host": "i386-apple-darwin11",
+ "url": "http://downloads.arduino.cc/tools/arduino101load-2.0.0-darwin_amd64.tar.bz2",
+ "archiveFileName": "arduino101load-2.0.0-darwin_amd64.tar.bz2",
+ "size": "1115949",
+ "checksum": "SHA-256:1adc2bb7ae5c12dabd2ce62a281285557d85d694d88e3578176a26a892546bff"
+ },
+ {
+ "host": "arm-linux-gnueabihf",
+ "url": "http://downloads.arduino.cc/tools/arduino101load-2.0.0-linux_arm.tar.bz2",
+ "archiveFileName": "arduino101load-2.0.0-linux_arm.tar.bz2",
+ "size": "992362",
+ "checksum": "SHA-256:ce5bc6598481cbf058a86695f4661f4b9f3a37481a8a1401bc8aa6e38055c13f"
+ },
+ {
+ "host": "x86_64-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/arduino101load-2.0.0-linux_amd64.tar.bz2",
+ "archiveFileName": "arduino101load-2.0.0-linux_amd64.tar.bz2",
+ "size": "1111450",
+ "checksum": "SHA-256:70965f1830e97bb8eafe1a4cea59a202aeb7c08dd2e9f46eb6a658d382cbf3fe"
+ },
+ {
+ "host": "i686-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/arduino101load-2.0.0-linux_386.tar.bz2",
+ "archiveFileName": "arduino101load-2.0.0-linux_386.tar.bz2",
+ "size": "1002073",
+ "checksum": "SHA-256:3a0880cbfc795a94cbc90e8c5e6bcf3dc47b893be8b61ba8657f009fdec364a0"
+ },
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/tools/arduino101load-2.0.0-windows_386.tar.bz2",
+ "archiveFileName": "arduino101load-2.0.0-windows_386.tar.bz2",
+ "size": "1046285",
+ "checksum": "SHA-256:90923d95f3d30fe3161fa0fcac344db12f99e2bfba396a48bec596d1870370ce"
+ }
+ ]
+ },
+ {
+ "name": "arduino101load",
+ "version": "2.0.1",
+ "systems": [
+ {
+ "host": "i386-apple-darwin11",
+ "url": "http://downloads.arduino.cc/tools/arduino101load-2.0.1-darwin_amd64.tar.bz2",
+ "archiveFileName": "arduino101load-2.0.1-darwin_amd64.tar.bz2",
+ "size": "1114375",
+ "checksum": "SHA-256:1f96480028a2aaa9475a1f44c5912236b6a039fced948fe2042a0353c88c1fb0"
+ },
+ {
+ "host": "arm-linux-gnueabihf",
+ "url": "http://downloads.arduino.cc/tools/arduino101load-2.0.1-linux_arm.tar.bz2",
+ "archiveFileName": "arduino101load-2.0.1-linux_arm.tar.bz2",
+ "size": "992557",
+ "checksum": "SHA-256:ff02bc7b64dd2f6a526fd283c55b36cab15297d045ccd214ec70d12067ce0991"
+ },
+ {
+ "host": "x86_64-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/arduino101load-2.0.1-linux_amd64.tar.bz2",
+ "archiveFileName": "arduino101load-2.0.1-linux_amd64.tar.bz2",
+ "size": "1111519",
+ "checksum": "SHA-256:b15a213b495e599b76ecd68253602b56ff5eebda4fadc53442e8c1917964a45e"
+ },
+ {
+ "host": "i686-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/arduino101load-2.0.1-linux_386.tar.bz2",
+ "archiveFileName": "arduino101load-2.0.1-linux_386.tar.bz2",
+ "size": "1001211",
+ "checksum": "SHA-256:2941d1f2f726ca1dd3789a744a2084e8f9000912bdaf25ef888c90fd454057e9"
+ },
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/tools/arduino101load-2.0.1-windows_386.tar.bz2",
+ "archiveFileName": "arduino101load-2.0.1-windows_386.tar.bz2",
+ "size": "1046214",
+ "checksum": "SHA-256:932373b6da9a8ad8ee9051937ea42cedde604fa8437050dcf7baa29564fc4547"
+ }
+ ]
+ },
+ {
+ "name": "arduino101load",
+ "version": "2.1.0",
+ "systems": [
+ {
+ "host": "i386-apple-darwin11",
+ "url": "http://downloads.arduino.cc/tools/arduino101load-2.1.0-darwin_amd64.tar.bz2",
+ "archiveFileName": "arduino101load-2.1.0-darwin_amd64.tar.bz2",
+ "size": "1188264",
+ "checksum": "SHA-256:068310277e032df5e36bd7cf351680462bd14f667536baf0b46e0f98e88a5616"
+ },
+ {
+ "host": "arm-linux-gnueabihf",
+ "url": "http://downloads.arduino.cc/tools/arduino101load-2.1.0-linux_arm.tar.bz2",
+ "archiveFileName": "arduino101load-2.1.0-linux_arm.tar.bz2",
+ "size": "1110912",
+ "checksum": "SHA-256:440c407d3fcca28333830891550bd4ed62ff4a6fb54a488330291bb79e737e97"
+ },
+ {
+ "host": "x86_64-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/arduino101load-2.1.0-linux_amd64.tar.bz2",
+ "archiveFileName": "arduino101load-2.1.0-linux_amd64.tar.bz2",
+ "size": "1184509",
+ "checksum": "SHA-256:c86f707914c733f51d705b32d45ade9d708913f458382f30b7436bdcdc9bb514"
+ },
+ {
+ "host": "i686-linux-gnu",
+ "url": "http://downloads.arduino.cc/tools/arduino101load-2.1.0-linux_386.tar.bz2",
+ "archiveFileName": "arduino101load-2.1.0-linux_386.tar.bz2",
+ "size": "1065618",
+ "checksum": "SHA-256:b17ce537f26e6ececf176585859594e1a66dfb788f074711d0be4a563c1815d8"
+ },
+ {
+ "host": "i686-mingw32",
+ "url": "http://downloads.arduino.cc/tools/arduino101load-2.1.0-windows_386.tar.bz2",
+ "archiveFileName": "arduino101load-2.1.0-windows_386.tar.bz2",
+ "size": "1078361",
+ "checksum": "SHA-256:32ff223937fbf747f1c552256627dfcb8eeccb4903342a9e8ac8b3e6d89cb4a7"
+ }
+ ]
+ },
+ {
+ "version": "0.9.0+0.1",
+ "name": "openocd",
+ "systems": [
+ {
+ "url": "https://github.com/01org/OpenOCD/releases/download/0.9-0.1/openocd-windows.zip",
+ "checksum": "SHA-256:95accfa22294cf150f33fba3cac45e6aa6857ebc2ba5dacebc2963b4a6021962",
+ "host": "i686-mingw32",
+ "archiveFileName": "openocd-windows-0.9.0.zip",
+ "size": "9181935"
+ },
+ {
+ "url": "https://github.com/01org/OpenOCD/releases/download/0.9-0.1/openocd-osx.tar.bz2",
+ "checksum": "SHA-256:57083e887ba77826a5f532f4b1ca4b7390cb7a2e2744583576a72f28a77dcc44",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "openocd-osx-0.9.0.tar.bz2",
+ "size": "896625"
+ },
+ {
+ "url": "https://github.com/01org/OpenOCD/releases/download/0.9-0.1/openocd-linux32.tar.bz2",
+ "checksum": "SHA-256:49d2a220e4b25eead4fe03d2ffa339ed946abef9cb718debdc7369f7b3c70534",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "openocd-linux32-0.9.0.tar.bz2",
+ "size": "3923308"
+ },
+ {
+ "url": "https://github.com/01org/OpenOCD/releases/download/0.9-0.1/openocd-linux64.tar.bz2",
+ "checksum": "SHA-256:4df6d3d387b45fa6214145f736c48c95109871fcd85fa81b81c01b533097a031",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "openocd-linux64-0.9.0.tar.bz2",
+ "size": "4010990"
+ }
+ ]
+ },
+ {
+ "version": "1.0.0",
+ "name": "flashpack",
+ "systems": [
+ {
+ "url": "https://github.com/arduino/ArduinoCore-arc32/releases/download/1.0.7/arduino101-factory_ble-flashpack-ide.tar.bz2",
+ "checksum": "SHA-256:6d4422cfa5c21ceba522ebf298bf24ebb2f57089e6bbaf6356038ef6fae6ca6d",
+ "host": "i686-mingw32",
+ "archiveFileName": "arduino101-factory_ble-flashpack-ide.tar.bz2",
+ "size": "654391"
+ },
+ {
+ "url": "https://github.com/arduino/ArduinoCore-arc32/releases/download/1.0.7/arduino101-factory_ble-flashpack-ide.tar.bz2",
+ "checksum": "SHA-256:6d4422cfa5c21ceba522ebf298bf24ebb2f57089e6bbaf6356038ef6fae6ca6d",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "arduino101-factory_ble-flashpack-ide.tar.bz2",
+ "size": "654391"
+ },
+ {
+ "url": "https://github.com/arduino/ArduinoCore-arc32/releases/download/1.0.7/arduino101-factory_ble-flashpack-ide.tar.bz2",
+ "checksum": "SHA-256:6d4422cfa5c21ceba522ebf298bf24ebb2f57089e6bbaf6356038ef6fae6ca6d",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "arduino101-factory_ble-flashpack-ide.tar.bz2",
+ "size": "654391"
+ },
+ {
+ "url": "https://github.com/arduino/ArduinoCore-arc32/releases/download/1.0.7/arduino101-factory_ble-flashpack-ide.tar.bz2",
+ "checksum": "SHA-256:6d4422cfa5c21ceba522ebf298bf24ebb2f57089e6bbaf6356038ef6fae6ca6d",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "arduino101-factory_ble-flashpack-ide.tar.bz2",
+ "size": "654391"
+ }
+ ]
+ },
+ {
+ "version": "2.0.0",
+ "name": "flashpack",
+ "systems": [
+ {
+ "url": "https://github.com/arduino/ArduinoCore-arc32/releases/download/2.0.0/flashpack_ble_v3.tar.bz2",
+ "checksum": "SHA-256:d5c9dfacfa4a12580d8853db32c194537aa920f43564e99aee42f069590cce3e",
+ "host": "i686-mingw32",
+ "archiveFileName": "flashpack_ble_v3.tar.bz2",
+ "size": "678394"
+ },
+ {
+ "url": "https://github.com/arduino/ArduinoCore-arc32/releases/download/2.0.0/flashpack_ble_v3.tar.bz2",
+ "checksum": "SHA-256:d5c9dfacfa4a12580d8853db32c194537aa920f43564e99aee42f069590cce3e",
+ "host": "i386-apple-darwin11",
+ "archiveFileName": "flashpack_ble_v3.tar.bz2",
+ "size": "678394"
+ },
+ {
+ "url": "https://github.com/arduino/ArduinoCore-arc32/releases/download/2.0.0/flashpack_ble_v3.tar.bz2",
+ "checksum": "SHA-256:d5c9dfacfa4a12580d8853db32c194537aa920f43564e99aee42f069590cce3e",
+ "host": "i686-linux-gnu",
+ "archiveFileName": "flashpack_ble_v3.tar.bz2",
+ "size": "678394"
+ },
+ {
+ "url": "https://github.com/arduino/ArduinoCore-arc32/releases/download/2.0.0/flashpack_ble_v3.tar.bz2",
+ "checksum": "SHA-256:d5c9dfacfa4a12580d8853db32c194537aa920f43564e99aee42f069590cce3e",
+ "host": "x86_64-linux-gnu",
+ "archiveFileName": "flashpack_ble_v3.tar.bz2",
+ "size": "678394"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "atmel-avr-xminis",
+ "maintainer": "Atmel University France",
+ "websiteURL": "https://github.com/AtmelUniversityFrance/atmel-avr-xmini-boardmanagermodule/wiki",
+ "help":
+ {
+ "online": "http://www.arduino.cc/en/Reference/HomePage"
+ },
+ "platforms":
+ [
+ {
+ "name": "Atmel AVR Xplained-minis",
+ "architecture": "avr",
+ "version": "0.3.0",
+ "category": "Partner",
+ "url": "https://github.com/AtmelUniversityFrance/atmel-avr-xmini-boardmanagermodule/releases/download/v0.3.0/atmel-avr-xmini-boardmanagermodule-0.3.0.tar.bz2",
+ "archiveFileName": "atmel-avr-xmini-boardmanagermodule-0.3.0.tar.bz2",
+ "checksum": "SHA-256:3bf7739682bdd86c8e8e566769429e424859f23022664295f800df489c782f18",
+ "size": "95352",
+ "boards": [
+ {
+ "name": "atmega168pb-xmini"
+ },
+ {
+ "name": "atmega328pb-xmini"
+ },
+ {
+ "name": "atmega328p-xmini"
+ }
+ ],
+ "toolsDependencies":
+ [
+ {
+ "packager": "arduino",
+ "name": "avr-gcc",
+ "version": "4.8.1-arduino5"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.0.1-arduino5"
+ }
+ ]
+ },
+ {
+ "name": "Atmel AVR Xplained-minis",
+ "architecture": "avr",
+ "version": "0.4.0",
+ "category": "Partner",
+ "url": "https://github.com/AtmelUniversityFrance/atmel-avr-xmini-boardmanagermodule/releases/download/v0.4.0/atmel-avr-xmini-boardmanagermodule-0.4.0.tar.bz2",
+ "archiveFileName": "atmel-avr-xmini-boardmanagermodule-0.4.0.tar.bz2",
+ "checksum": "SHA-256:36e09208c91071ff2fce4a7ed06463cf820c867d9f59477192d18965c0830ed5",
+ "size": "95452",
+ "boards": [
+ {
+ "name": "atmega168pb-xmini"
+ },
+ {
+ "name": "atmega328pb-xmini"
+ },
+ {
+ "name": "atmega328p-xmini"
+ }
+ ],
+ "toolsDependencies":
+ [
+ ]
+ },
+ {
+ "name": "Atmel AVR Xplained-minis",
+ "architecture": "avr",
+ "version": "0.5.0",
+ "category": "Partner",
+ "url": "https://github.com/AtmelUniversityFrance/atmel-avr-xmini-boardmanagermodule/releases/download/v0.5.0/atmel-avr-xmini-boardmanagermodule-0.5.0.tar.bz2",
+ "archiveFileName": "atmel-avr-xmini-boardmanagermodule-0.5.0.tar.bz2",
+ "checksum": "SHA-256:e2e401fc30f8e8519bcd628884699373dc386d83a366c740b1895aa9b8cc30f4",
+ "size": "95573",
+ "boards": [
+ {
+ "name": "atmega168pb-xmini"
+ },
+ {
+ "name": "atmega328pb-xmini"
+ },
+ {
+ "name": "atmega328p-xmini"
+ }
+ ],
+ "toolsDependencies":
+ [
+ ]
+ },
+ {
+ "name": "Atmel AVR Xplained-minis",
+ "architecture": "avr",
+ "version": "0.6.0",
+ "category": "Partner",
+ "url": "https://github.com/AtmelUniversityFrance/atmel-avr-xmini-boardmanagermodule/releases/download/v0.6.0/atmel-avr-xmini-boardmanagermodule-0.6.0.tar.bz2",
+ "archiveFileName": "atmel-avr-xmini-boardmanagermodule-0.6.0.tar.bz2",
+ "checksum": "SHA-256:60bcb315a33056fac9b1d626ac039b3439efdae821ba91b70e09dc5ce8e07f88",
+ "size": "95447",
+ "boards": [
+ {
+ "name": "atmega168pb-xmini"
+ },
+ {
+ "name": "atmega328pb-xmini"
+ },
+ {
+ "name": "atmega328p-xmini"
+ }
+ ],
+ "toolsDependencies":
+ [
+ ]
+ }
+ ],
+ "tools":
+ [
+ ]
+ },
+ {
+ "name":"littleBits",
+ "maintainer":"littleBits Electronics",
+ "websiteURL":"http://www.littlebits.cc/bits/w6-arduino",
+ "email":"support@littlebits.cc",
+ "help":{
+ "online":"http://www.littlebits.cc/arduino-help"
+ },
+ "platforms":[
+ {
+ "name":"littleBits Arduino AVR Modules",
+ "architecture":"avr",
+ "version":"1.0.0",
+ "category": "Arduino@Heart",
+ "url": "http://downloads.arduino.cc/cores/littlebits-avr-1.0.0.tar.bz2",
+ "archiveFileName":"littlebits-avr-1.0.0.tar.bz2",
+ "checksum":"SHA-256:8B867B8E89718B405231681CF9091F24372FAEF8B5B9FFBFBCF2486E3D9B5324",
+ "size":"27762",
+ "help":{
+ "online":"http://www.littlebits.cc/arduino-help"
+ },
+ "boards":[
+ { "name":"littleBits w6 Arduino module" }
+ ],
+ "toolsDependencies":[
+ { "packager":"arduino", "name":"avr-gcc", "version":"4.8.1-arduino5" },
+ { "packager":"arduino", "name":"avrdude", "version":"6.0.1-arduino5" }
+ ]
+ }
+ ],
+ "tools":[]
+ },
+ {
+ "name": "Microsoft",
+ "maintainer": "Microsoft.IoT",
+ "websiteURL": "https://github.com/ms-iot/iot-utilities/tree/master/IotCoreAppDeployment/ArduinoIde/",
+ "email": "bfjelds@microsoft.com",
+ "help": {
+ "online": "http://developer.microsoft.com/en-us/windows/iot/IotCoreAppDeployment_ArduinoIde.htm"
+ },
+ "platforms": [
+ {
+ "name": "Windows 10 Iot Core",
+ "architecture": "win10",
+ "version": "1.0.0",
+ "category": "Contributed",
+ "help": {
+ "online": "http://developer.microsoft.com/en-us/windows/iot/IotCoreAppDeployment_ArduinoIde.htm"
+ },
+ "url": "https://github.com/ms-iot/iot-utilities/raw/master/IotCoreAppDeployment/ArduinoIde/release/win10_iotcore-1.0.0_ide-1.6.6.zip",
+ "archiveFileName": "win10_iotcore-1.0.0_ide-1.6.6.zip",
+ "checksum": "SHA-256:ed1b42b396217e242cd3a5b597ad5dad5957cb8117dd0f972376bafab7d8a0e5",
+ "size": "47325480",
+ "boards": [
+ {"name": "Windows 10 IoT Core"}
+ ],
+ "toolsDependencies":[]
+ },
+ {
+ "name": "Windows 10 Iot Core",
+ "architecture": "win10",
+ "version": "1.1.0",
+ "category": "Contributed",
+ "help": {
+ "online": "http://developer.microsoft.com/en-us/windows/iot/IotCoreAppDeployment_ArduinoIde.htm"
+ },
+ "url": "https://github.com/ms-iot/iot-utilities/raw/master/IotCoreAppDeployment/ArduinoIde/release/win10_iotcore-1.1.0_ide-1.6.6.zip",
+ "archiveFileName": "win10_iotcore-1.1.0_ide-1.6.6.zip",
+ "checksum": "SHA-256:683dcb0a72e80b9d21117f6471e0860d8cc35cd9b86557ba6fd1ed255952413e",
+ "size": "48316890",
+ "boards": [
+ {"name": "Windows 10 IoT Core"}
+ ],
+ "toolsDependencies":[]
+ },
+ {
+ "name": "Windows 10 Iot Core",
+ "architecture": "win10",
+ "version": "1.1.1",
+ "category": "Contributed",
+ "help": {
+ "online": "http://ms-iot.github.io/content/en-US/win10/IotCoreAppDeployment_ArduinoIde.htm"
+ },
+ "url": "https://github.com/ms-iot/iot-utilities/raw/master/IotCoreAppDeployment/ArduinoIde/release/win10_iotcore-1.1.1_ide-1.6.11.zip",
+ "archiveFileName": "win10_iotcore-1.1.1_ide-1.6.11.zip",
+ "checksum": "SHA-256:f455f2829164065faacde141b3c15604c51bb79b6874d55d1124c66aae372693",
+ "size": "48317624",
+ "boards": [
+ {"name": "Windows 10 IoT Core"}
+ ],
+ "toolsDependencies":[]
+ },
+ {
+ "name": "Windows 10 Iot Core",
+ "architecture": "win10",
+ "version": "1.1.2",
+ "category": "Contributed",
+ "help": {
+ "online": "http://ms-iot.github.io/content/en-US/win10/IotCoreAppDeployment_ArduinoIde.htm"
+ },
+ "url": "https://github.com/ms-iot/iot-utilities/raw/master/IotCoreAppDeployment/ArduinoIde/release/win10_iotcore-1.1.2_ide-1.6.11.zip",
+ "archiveFileName": "win10_iotcore-1.1.2_ide-1.6.11.zip",
+ "checksum": "SHA-256:b1c9956b46f33bcebb7f500d29931b19ed4723c713ac0439681ea1fb172722d1",
+ "size": "48317648",
+ "boards": [
+ {"name": "Windows 10 IoT Core"}
+ ],
+ "toolsDependencies":[]
+ }
+ ],
+ "tools":[]
+ },
+ {
+ "name": "Arrow",
+ "maintainer": "Axel Elettronica S.r.l.",
+ "websiteURL": "http://axelelettronica.it/",
+ "email": "development@axelelettronica.it",
+ "help": {
+ "online": "http://www.arrowsmarteverything.com/support/"
+ },
+ "platforms": [
+ {
+ "name": "Arrow Boards",
+ "architecture": "samd",
+ "version": "1.2.0",
+ "category": "Partner",
+ "url": "http://downloads.arduino.cc/cores/Arrow-samd-1.2.0.tar.bz2",
+ "archiveFileName": "Arrow-samd-1.2.0.tar.bz2",
+ "checksum": "SHA-256:1d1cd66eb9986ed60a45b6554b8dcbf168401339c8a06bcf45c1b6ee2efa0943",
+ "size": "36575",
+ "boards": [
+ {"name": "SmartEverything Fox"},
+ {"name": "SmartTutto"}
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "arduino",
+ "name": "bossac",
+ "version": "1.5-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.0.0-atmel"
+ }
+ ]
+ },
+ {
+ "name": "Arrow Boards",
+ "architecture": "samd",
+ "version": "2.0.0",
+ "category": "Partner",
+ "url": "http://downloads.arduino.cc/cores/Arrow-samd-2.0.0.tar.gz",
+ "archiveFileName": "Arrow-samd-2.0.0.tar.gz",
+ "checksum": "SHA-256:e0bbf85d28af03a196a625462598294f9dd30ffbc9d686cbf2cc09bc5a76421a",
+ "size": "772778",
+ "boards": [
+ {"name": "SmartEverything Fox"},
+ {"name": "SmartEverything Fox3"},
+ {"name": "SmartEverything Lion"},
+ {"name": "SmartEverything Dragonfly"},
+ {"name": "Analog ADI"},
+ {"name": "SmartTutto"}
+ ],
+ "toolsDependencies": [
+ ]
+ },
+ {
+ "name": "Arrow Boards",
+ "architecture": "samd",
+ "version": "2.1.0",
+ "category": "Partner",
+ "url": "http://downloads.arduino.cc/cores/Arrow-samd-2.1.0.tar.gz",
+ "archiveFileName": "Arrow-samd-2.1.0.tar.gz",
+ "checksum": "SHA-256:fc593dba03249300a9e2cff88c06f50389681e4fe6e3c92207d3ffb8a8a673f7",
+ "size": "780064",
+ "boards": [
+ {"name": "SmartEverything Fox"},
+ {"name": "SmartEverything Fox3"},
+ {"name": "SmartEverything Lion"},
+ {"name": "SmartEverything Dragonfly"},
+ {"name": "Analog ADI"},
+ {"name": "SmartTutto"}
+ ],
+ "toolsDependencies": [
+ ]
+ }
+ ],
+ "tools": [
+ ]
+ },
+ {
+ "name": "emoro",
+ "maintainer": "Inovatic-ICT",
+ "websiteURL": "http://www.emoro.eu/shop/system/download/EMoRo2560_and_GLAM_brochure.pdf.0b83da8ad90db30722cca3a5d2529494",
+ "email": "support@inovatic-ict.com",
+ "help": {
+ "online": "http://www.emoro.eu/shop/index.php?route=product/product&product_id=52"
+ },
+ "platforms": [
+ {
+ "name": "EMORO 2560",
+ "architecture": "avr",
+ "version": "3.2.1",
+ "category": "Arduino@Heart",
+ "url": "http://downloads.arduino.cc/cores/EMoRo_2560-3.2.1.zip",
+ "archiveFileName": "EMoRo_2560-3.2.1.zip",
+ "checksum": "SHA-256:56dd308fc2f84229688f6219a4c31629ec9b38bdadcd382c45fae9247b94f051",
+ "size": "846444",
+ "boards": [
+ {"name": "EMoRo 2560. Board based on ATmega 2560 MCU"}
+ ],
+ "toolsDependencies": [
+ ]
+ },
+ {
+ "name": "EMORO 2560",
+ "architecture": "avr",
+ "version": "3.2.2",
+ "category": "Arduino@Heart",
+ "url": "http://downloads.arduino.cc/cores/EMoRo_2560-3.2.2.zip",
+ "archiveFileName": "EMoRo_2560-3.2.2.zip",
+ "checksum": "SHA-256:c67e6660af44c923c10deede252f9c46dabaf5b61d6e44f3785be688f1c9b46f",
+ "size": "565744",
+ "boards": [
+ {"name": "EMoRo 2560. Board based on ATmega 2560 MCU"}
+ ],
+ "toolsDependencies": [
+ ]
+ }
+ ],
+ "tools": [
+ ]
+ },
+ {
+ "name": "industruino",
+ "maintainer": "Industruino",
+ "websiteURL": "https://industruino.com/",
+ "email": "connect@industruino.com",
+ "help": {
+ "online": "https://github.com/Industruino/IndustruinoSAMD"
+ },
+ "platforms": [
+ {
+ "name": "Industruino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.0.0",
+ "category": "Contributed",
+ "url": "https://static.industruino.com/downloads/code/IndustruinoCores/IndustruinoSAMD/core/industruino-samd-1.0.0.tar.bz2",
+ "archiveFileName": "industruino-samd-1.0.0.tar.bz2",
+ "checksum": "SHA-256:f2e03e584117474c3a6481922c7030558236588762dceea0421ea79ceec3ae11",
+ "size": "172665",
+ "boards": [
+ { "name": "Industruino D21G" }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "industruino",
+ "name": "bossac",
+ "version": "1.7.0-industruino"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.3.0-arduino8"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.5.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS-Atmel",
+ "version": "1.0.0"
+ }
+ ]
+ },
+ {
+ "name": "Industruino SAMD Boards (32-bits ARM Cortex-M0+)",
+ "architecture": "samd",
+ "version": "1.0.1",
+ "category": "Contributed",
+ "url": "https://static.industruino.com/downloads/code/IndustruinoCores/IndustruinoSAMD/core/industruino-samd-1.0.1.tar.bz2",
+ "archiveFileName": "industruino-samd-1.0.1.tar.bz2",
+ "checksum": "SHA-256:ffd9a0d53f0e659432182987bfc70eeca8ea8cceaeac802be677614ed51c371f",
+ "size": "203844",
+ "boards": [
+ { "name": "Industruino D21G" }
+ ],
+ "toolsDependencies": [
+ {
+ "packager": "arduino",
+ "name": "arm-none-eabi-gcc",
+ "version": "4.8.3-2014q1"
+ },
+ {
+ "packager": "industruino",
+ "name": "bossac",
+ "version": "1.7.0-industruino"
+ },
+ {
+ "packager": "arduino",
+ "name": "openocd",
+ "version": "0.9.0-arduino"
+ },
+ {
+ "packager": "arduino",
+ "name": "avrdude",
+ "version": "6.3.0-arduino8"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS",
+ "version": "4.5.0"
+ },
+ {
+ "packager": "arduino",
+ "name": "CMSIS-Atmel",
+ "version": "1.0.0"
+ }
+ ]
+ }
+ ],
+ "tools": [
+ {
+ "name": "bossac",
+ "version": "1.7.0-industruino",
+ "systems": [
+ {
+ "host": "i686-mingw32",
+ "url": "https://static.industruino.com/downloads/code/IndustruinoCores/IndustruinoSAMD/tools/bossac-1.7.0-industruino-mingw32.tar.gz",
+ "archiveFileName": "bossac-1.7.0-industruino-mingw32.tar.gz",
+ "checksum": "SHA-256:960d91feb565e957dbac9399e16839aa4eb4400153f2373896b733eeea778ab2",
+ "size": "565844"
+ },
+ {
+ "host": "x86_64-apple-darwin",
+ "url": "https://static.industruino.com/downloads/code/IndustruinoCores/IndustruinoSAMD/tools/bossac-1.7.0-industruino-i386-apple-darwin15.6.0.tar.gz",
+ "archiveFileName": "bossac-1.7.0-industruino-i386-apple-darwin15.6.0.tar.gz",
+ "checksum": "SHA-256:a2455d20fd8269d0655ebc50014e539911070a0b14964082337655f17de7cbf2",
+ "size": "128421"
+ },
+ {
+ "host": "x86_64-pc-linux-gnu",
+ "url": "https://static.industruino.com/downloads/code/IndustruinoCores/IndustruinoSAMD/tools/bossac-1.7.0-industruino-x86_64-linux-gnu.tar.gz",
+ "archiveFileName": "bossac-1.7.0-industruino-x86_64-linux-gnu.tar.gz",
+ "checksum": "SHA-256:2cbeb64a0e0a58f3b769ef6cdcfe55fa939b3015845b3081a9beebe9432ba4a6",
+ "size": "62348"
+ },
+ {
+ "host": "i686-pc-linux-gnu",
+ "url": "https://static.industruino.com/downloads/code/IndustruinoCores/IndustruinoSAMD/tools/bossac-1.7.0-industruino-i686-linux-gnu.tar.gz",
+ "archiveFileName": "bossac-1.7.0-industruino-i686-linux-gnu.tar.gz",
+ "checksum": "SHA-256:77b06322da2a9bdc17ddcbc29627802e8b2bf47b5625fe6fb17d90203e4a4071",
+ "size": "63204"
+ }
+ ]
+ },
+ {
+ "name": "windows-drivers",
+ "version": "0.0.1",
+ "systems": [
+ {
+ "host": "i686-mingw32",
+ "url": "https://static.industruino.com/downloads/drivers/drivers-industruino-windows-0.0.1.zip",
+ "archiveFileName": "drivers-industruino-windows-0.0.1.zip",
+ "checksum": "SHA-256:f1d7c7d30ca71f7224dac077f18f25f62ff97b43dd1e4a43ec4930a13a3ac484",
+ "size": "2545"
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ }
\ No newline at end of file
diff --git a/v2/pkgs/tools.go b/v2/pkgs/tools.go
new file mode 100644
index 000000000..7598e0e7b
--- /dev/null
+++ b/v2/pkgs/tools.go
@@ -0,0 +1,263 @@
+package pkgs
+
+import (
+ "bytes"
+ "context"
+ "crypto/sha256"
+ "encoding/hex"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "net/http"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strings"
+
+ "github.com/arduino/arduino-create-agent/gen/tools"
+ "github.com/codeclysm/extract"
+ "github.com/xrash/smetrics"
+)
+
+// Tools is a client that implements github.com/arduino/arduino-create-agent/gen/tools.Service interface.
+// It saves tools in a specified folder with this structure: packager/name/version
+// For example:
+// folder
+// └── arduino
+// └── bossac
+// ├── 1.6.1-arduino
+// │ └── bossac
+// └── 1.7.0
+// └── bossac
+// It requires an Indexes client to list and read package index files: use the Indexes struct
+type Tools struct {
+ Indexes interface {
+ List(context.Context) ([]string, error)
+ Get(context.Context, string) (Index, error)
+ }
+ Folder string
+}
+
+// Available crawles the downloaded package index files and returns a list of tools that can be installed.
+func (c *Tools) Available(ctx context.Context) (res tools.ToolCollection, err error) {
+ list, err := c.Indexes.List(ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, url := range list {
+ index, err := c.Indexes.Get(ctx, url)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, packager := range index.Packages {
+ for _, tool := range packager.Tools {
+ res = append(res, &tools.Tool{
+ Packager: packager.Name,
+ Name: tool.Name,
+ Version: tool.Version,
+ })
+ }
+ }
+ }
+
+ return res, nil
+}
+
+// Installed crawles the Tools Folder and finds the installed tools.
+func (c *Tools) Installed(ctx context.Context) (tools.ToolCollection, error) {
+ res := tools.ToolCollection{}
+
+ // Find packagers
+ packagers, err := ioutil.ReadDir(c.Folder)
+ if err != nil {
+ if !strings.Contains(err.Error(), "no such file") {
+ return nil, err
+ }
+ err = os.MkdirAll(c.Folder, 0755)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ for _, packager := range packagers {
+ if !packager.IsDir() {
+ continue
+ }
+
+ // Find tools
+ toolss, err := ioutil.ReadDir(filepath.Join(c.Folder, packager.Name()))
+ if err != nil {
+ return nil, err
+ }
+
+ for _, tool := range toolss {
+ // Find versions
+ path := filepath.Join(c.Folder, packager.Name(), tool.Name())
+ versions, err := ioutil.ReadDir(path)
+ if err != nil {
+ continue // we ignore errors because the folders could be dirty
+ }
+
+ for _, version := range versions {
+ res = append(res, &tools.Tool{
+ Packager: packager.Name(),
+ Name: tool.Name(),
+ Version: version.Name(),
+ })
+ }
+ }
+ }
+
+ return res, nil
+}
+
+// Install crawles the Index folder, downloads the specified tool, extracts the archive in the Tools Folder.
+// It checks for the Signature specified in the package index.
+func (c *Tools) Install(ctx context.Context, payload *tools.ToolPayload) (*tools.Operation, error) {
+ path := filepath.Join(payload.Packager, payload.Name, payload.Version)
+
+ if payload.URL != nil {
+ return c.install(ctx, path, *payload.URL, *payload.Checksum)
+ }
+
+ list, err := c.Indexes.List(ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, url := range list {
+ index, err := c.Indexes.Get(ctx, url)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, packager := range index.Packages {
+ if packager.Name != payload.Packager {
+ continue
+ }
+
+ for _, tool := range packager.Tools {
+ if tool.Name == payload.Name &&
+ tool.Version == payload.Version {
+
+ i := findSystem(tool)
+
+ return c.install(ctx, path, tool.Systems[i].URL, tool.Systems[i].Checksum)
+ }
+ }
+ }
+ }
+
+ return nil, tools.MakeNotFound(
+ fmt.Errorf("tool not found with packager '%s', name '%s', version '%s'",
+ payload.Packager, payload.Name, payload.Version))
+}
+
+func (c *Tools) install(ctx context.Context, path, url, checksum string) (*tools.Operation, error) {
+ // Download
+ res, err := http.Get(url)
+ if err != nil {
+ return nil, err
+ }
+ defer res.Body.Close()
+
+ // Use a teereader to only read once
+ var buffer bytes.Buffer
+ reader := io.TeeReader(res.Body, &buffer)
+
+ err = extract.Archive(ctx, reader, c.Folder, rename(path))
+ if err != nil {
+ return nil, err
+ }
+
+ sum := sha256.Sum256(buffer.Bytes())
+ sumString := "SHA-256:" + hex.EncodeToString(sum[:sha256.Size])
+
+ if sumString != checksum {
+ os.RemoveAll(path)
+ return nil, errors.New("checksum doesn't match")
+ }
+
+ // Write installed.json for retrocompatibility with v1
+ err = writeInstalled(c.Folder, path)
+ if err != nil {
+ return nil, err
+ }
+
+ return &tools.Operation{Status: "ok"}, nil
+}
+
+// Remove deletes the tool folder from Tools Folder
+func (c *Tools) Remove(ctx context.Context, payload *tools.ToolPayload) (*tools.Operation, error) {
+ path := filepath.Join(payload.Packager, payload.Name, payload.Version)
+
+ err := os.RemoveAll(filepath.Join(c.Folder, path))
+ if err != nil {
+ return nil, err
+ }
+
+ return &tools.Operation{Status: "ok"}, nil
+}
+
+func rename(base string) extract.Renamer {
+ return func(path string) string {
+ parts := strings.Split(path, string(filepath.Separator))
+ path = strings.Join(parts[1:], string(filepath.Separator))
+ path = filepath.Join(base, path)
+
+ return path
+ }
+}
+
+func findSystem(tool Tool) int {
+ var systems = map[string]string{
+ "linuxamd64": "x86_64-linux-gnu",
+ "linux386": "i686-linux-gnu",
+ "darwinamd64": "apple-darwin",
+ "windows386": "i686-mingw32",
+ "windowsamd64": "i686-mingw32",
+ "linuxarm": "arm-linux-gnueabihf",
+ }
+
+ var correctSystem int
+ maxSimilarity := 0.7
+
+ for i, system := range tool.Systems {
+ similarity := smetrics.Jaro(system.Host, systems[runtime.GOOS+runtime.GOARCH])
+ if similarity > maxSimilarity {
+ correctSystem = i
+ maxSimilarity = similarity
+ }
+ }
+
+ return correctSystem
+}
+
+func writeInstalled(folder, path string) error {
+ // read installed.json
+ installed := map[string]string{}
+
+ data, err := ioutil.ReadFile(filepath.Join(folder, "installed.json"))
+ if err == nil {
+ err = json.Unmarshal(data, &installed)
+ if err != nil {
+ return err
+ }
+ }
+
+ parts := strings.Split(path, string(filepath.Separator))
+ tool := parts[len(parts)-2]
+
+ installed[tool] = filepath.Join(folder, path)
+
+ data, err = json.Marshal(installed)
+ if err != nil {
+ return err
+ }
+
+ return ioutil.WriteFile(filepath.Join(folder, "installed.json"), data, 0644)
+}
diff --git a/v2/pkgs/tools_test.go b/v2/pkgs/tools_test.go
new file mode 100644
index 000000000..f45bd7c1d
--- /dev/null
+++ b/v2/pkgs/tools_test.go
@@ -0,0 +1,155 @@
+package pkgs_test
+
+import (
+ "context"
+ "io/ioutil"
+ "net/http"
+ "net/http/httptest"
+ "os"
+ "runtime"
+ "strings"
+ "testing"
+
+ "github.com/arduino/arduino-create-agent/gen/indexes"
+ "github.com/arduino/arduino-create-agent/gen/tools"
+ "github.com/arduino/arduino-create-agent/v2/pkgs"
+)
+
+// TestTools performs a series of operations about tools, ensuring it behaves as expected.
+// This test depends on the internet so it could fail unexpectedly
+func TestTools(t *testing.T) {
+ // Use local file as index
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ http.ServeFile(w, r, "testdata/package_index.json")
+ }))
+ defer ts.Close()
+
+ // Initialize indexes with a temp folder
+ tmp, err := ioutil.TempDir("", "")
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer os.RemoveAll(tmp)
+
+ indexesClient := pkgs.Indexes{
+ Folder: tmp,
+ }
+
+ service := pkgs.Tools{
+ Folder: tmp,
+ Indexes: &indexesClient,
+ }
+
+ ctx := context.Background()
+
+ // Add a new index
+ _, err = indexesClient.Add(ctx, &indexes.IndexPayload{URL: ts.URL})
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ // List available tools
+ available, err := service.Available(ctx)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(available) != 61 {
+ t.Fatalf("expected %d == %d (%s)", len(available), 61, "len(available)")
+ }
+
+ // Try to install a non-existent tool
+ _, err = service.Install(ctx, &tools.ToolPayload{})
+ if err == nil || !strings.Contains(err.Error(), "tool not found with packager '', name '', version ''") {
+ t.Fatalf("expected '%v' == '%v' (%s)", err, "tool not found with packager '', name '', version ''", "err")
+ }
+
+ // Install a tool
+ installed, err := service.Installed(ctx)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(installed) != 0 {
+ t.Fatalf("expected %d == %d (%s)", len(installed), 0, "len(installed)")
+ }
+
+ _, err = service.Install(ctx, &tools.ToolPayload{
+ Packager: "arduino",
+ Name: "avrdude",
+ Version: "6.0.1-arduino2",
+ })
+ if err != nil {
+ t.Fatal(err)
+ }
+ // List installed tools
+ installed, err = service.Installed(ctx)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(installed) != 1 {
+ t.Fatalf("expected %d == %d (%s)", len(installed), 1, "len(installed)")
+ }
+
+ // Remove tool
+ _, err = service.Remove(ctx, &tools.ToolPayload{
+ Packager: "arduino",
+ Name: "avrdude",
+ Version: "6.0.1-arduino2",
+ })
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ installed, err = service.Installed(ctx)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(installed) != 0 {
+ t.Fatalf("expected %d == %d (%s)", len(installed), 0, "len(installed)")
+ }
+
+ // Install a tool by specifying url and checksum
+ _, err = service.Install(ctx, &tools.ToolPayload{
+ Packager: "arduino",
+ Name: "avrdude",
+ Version: "6.0.1-arduino2",
+ URL: strpoint(url()),
+ Checksum: strpoint(checksum()),
+ })
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ installed, err = service.Installed(ctx)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(installed) != 1 {
+ t.Fatalf("expected %d == %d (%s)", len(installed), 1, "len(installed)")
+ }
+}
+
+func strpoint(s string) *string {
+ return &s
+}
+
+func url() string {
+ urls := map[string]string{
+ "linuxamd64": "http://downloads.arduino.cc/tools/avrdude-6.0.1-arduino2-x86_64-pc-linux-gnu.tar.bz2",
+ "linux386": "http://downloads.arduino.cc/tools/avrdude-6.0.1-arduino2-i686-pc-linux-gnu.tar.bz2",
+ "darwinamd64": "http://downloads.arduino.cc/tools/avrdude-6.0.1-arduino2-i386-apple-darwin11.tar.bz2",
+ "windows386": "http://downloads.arduino.cc/tools/avrdude-6.0.1-arduino2-i686-mingw32.zip",
+ }
+
+ return urls[runtime.GOOS+runtime.GOARCH]
+}
+
+func checksum() string {
+ checksums := map[string]string{
+ "linuxamd64": "SHA-256:2489004d1d98177eaf69796760451f89224007c98b39ebb5577a9a34f51425f1",
+ "linux386": "SHA-256:6f633dd6270ad0d9ef19507bcbf8697b414a15208e4c0f71deec25ef89cdef3f",
+ "darwinamd64": "SHA-256:71117cce0096dad6c091e2c34eb0b9a3386d3aec7d863d2da733d9e5eac3a6b1",
+ "windows386": "SHA-256:6c5483800ba753c80893607e30cade8ab77b182808fcc5ea15fa3019c63d76ae",
+ }
+ return checksums[runtime.GOOS+runtime.GOARCH]
+
+}
diff --git a/vendor/github.com/codeclysm/extract/.travis.yml b/vendor/github.com/codeclysm/extract/.travis.yml
new file mode 100644
index 000000000..7817f11e4
--- /dev/null
+++ b/vendor/github.com/codeclysm/extract/.travis.yml
@@ -0,0 +1,4 @@
+language: go
+script:
+ - go test
+ - go test -bench=.
\ No newline at end of file
diff --git a/vendor/github.com/codeclysm/extract/LICENSE b/vendor/github.com/codeclysm/extract/LICENSE
new file mode 100644
index 000000000..63b6614d4
--- /dev/null
+++ b/vendor/github.com/codeclysm/extract/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 codeclysm
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/codeclysm/extract/README.md b/vendor/github.com/codeclysm/extract/README.md
new file mode 100644
index 000000000..e06d7c8d1
--- /dev/null
+++ b/vendor/github.com/codeclysm/extract/README.md
@@ -0,0 +1,39 @@
+Extract
+=====================
+[](https://travis-ci.org/codeclysm/extract)
+[](https://raw.githubusercontent.com/codeclysm/extract/master/LICENSE)
+[](https://godoc.org/github.com/codeclysm/extract)
+
+
+ import "github.com/codeclysm/extract"
+
+Package extract allows to extract archives in zip, tar.gz or tar.bz2 formats
+easily.
+
+Most of the time you'll just need to call the proper function with a buffer and
+a destination:
+
+```go
+data, _ := ioutil.ReadFile("path/to/file.tar.bz2")
+buffer := bytes.NewBuffer(data)
+extract.TarBz2(data, "/path/where/to/extract", nil)
+```
+
+Sometimes you'll want a bit more control over the files, such as extracting a
+subfolder of the archive. In this cases you can specify a renamer func that will
+change the path for every file:
+
+```go
+var shift = func(path string) string {
+ parts := strings.Split(path, string(filepath.Separator))
+ parts = parts[1:]
+ return strings.Join(parts, string(filepath.Separator))
+}
+extract.TarBz2(data, "/path/where/to/extract", shift)
+```
+
+If you don't know which archive you're dealing with (life really is always a surprise) you can use Archive, which will infer the type of archive from the first bytes
+
+```go
+extract.Archive(data, "/path/where/to/extract", nil)
+```
\ No newline at end of file
diff --git a/vendor/github.com/codeclysm/extract/cancelable_reader.go b/vendor/github.com/codeclysm/extract/cancelable_reader.go
new file mode 100644
index 000000000..368a3b997
--- /dev/null
+++ b/vendor/github.com/codeclysm/extract/cancelable_reader.go
@@ -0,0 +1,32 @@
+package extract
+
+import (
+ "context"
+ "errors"
+ "io"
+)
+
+func copyCancel(ctx context.Context, dst io.Writer, src io.Reader) (int64, error) {
+ return io.Copy(dst, newCancelableReader(ctx, src))
+}
+
+type cancelableReader struct {
+ ctx context.Context
+ src io.Reader
+}
+
+func (r *cancelableReader) Read(p []byte) (int, error) {
+ select {
+ case <-r.ctx.Done():
+ return 0, errors.New("interrupted")
+ default:
+ return r.src.Read(p)
+ }
+}
+
+func newCancelableReader(ctx context.Context, src io.Reader) *cancelableReader {
+ return &cancelableReader{
+ ctx: ctx,
+ src: src,
+ }
+}
diff --git a/vendor/github.com/codeclysm/extract/extract.go b/vendor/github.com/codeclysm/extract/extract.go
new file mode 100644
index 000000000..617b7178b
--- /dev/null
+++ b/vendor/github.com/codeclysm/extract/extract.go
@@ -0,0 +1,339 @@
+// Package extract allows to extract archives in zip, tar.gz or tar.bz2 formats
+// easily.
+//
+// Most of the time you'll just need to call the proper function with a buffer and
+// a destination:
+//
+// data, _ := ioutil.ReadFile("path/to/file.tar.bz2")
+// buffer := bytes.NewBuffer(data)
+// extract.TarBz2(data, "/path/where/to/extract", nil)
+// ```
+//
+// Sometimes you'll want a bit more control over the files, such as extracting a
+// subfolder of the archive. In this cases you can specify a renamer func that will
+// change the path for every file:
+//
+// var shift = func(path string) string {
+// parts := strings.Split(path, string(filepath.Separator))
+// parts = parts[1:]
+// return strings.Join(parts, string(filepath.Separator))
+// }
+// extract.TarBz2(data, "/path/where/to/extract", shift)
+// ```
+//
+// If you don't know which archive you're dealing with (life really is always a surprise) you can use Archive, which will infer the type of archive from the first bytes
+//
+// extract.Archive(data, "/path/where/to/extract", nil)
+package extract
+
+import (
+ "archive/tar"
+ "archive/zip"
+ "bytes"
+ "compress/bzip2"
+ "compress/gzip"
+ "context"
+ "io"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+
+ filetype "gopkg.in/h2non/filetype.v1"
+ "gopkg.in/h2non/filetype.v1/types"
+
+ "github.com/juju/errors"
+)
+
+// Renamer is a function that can be used to rename the files when you're extracting
+// them. For example you may want to only extract files with a certain pattern.
+// If you return an empty string they won't be extracted.
+type Renamer func(string) string
+
+// Archive extracts a generic archived stream of data in the specified location.
+// It automatically detects the archive type and accepts a rename function to
+// handle the names of the files.
+// If the file is not an archive, an error is returned.
+func Archive(ctx context.Context, body io.Reader, location string, rename Renamer) error {
+ body, kind, err := match(body)
+ if err != nil {
+ errors.Annotatef(err, "Detect archive type")
+ }
+
+ switch kind.Extension {
+ case "zip":
+ return Zip(ctx, body, location, rename)
+ case "gz":
+ return Gz(ctx, body, location, rename)
+ case "bz2":
+ return Bz2(ctx, body, location, rename)
+ case "tar":
+ return Tar(ctx, body, location, rename)
+ default:
+ return errors.New("Not a supported archive")
+ }
+}
+
+// Bz2 extracts a .bz2 or .tar.bz2 archived stream of data in the specified location.
+// It accepts a rename function to handle the names of the files (see the example)
+func Bz2(ctx context.Context, body io.Reader, location string, rename Renamer) error {
+ reader := bzip2.NewReader(body)
+
+ body, kind, err := match(reader)
+ if err != nil {
+ return errors.Annotatef(err, "extract bz2: detect")
+ }
+
+ if kind.Extension == "tar" {
+ return Tar(ctx, body, location, rename)
+ }
+
+ err = copy(ctx, location, 0666, body)
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+// Gz extracts a .gz or .tar.gz archived stream of data in the specified location.
+// It accepts a rename function to handle the names of the files (see the example)
+func Gz(ctx context.Context, body io.Reader, location string, rename Renamer) error {
+ reader, err := gzip.NewReader(body)
+ if err != nil {
+ return errors.Annotatef(err, "Gunzip")
+ }
+
+ body, kind, err := match(reader)
+ if err != nil {
+ return err
+ }
+
+ if kind.Extension == "tar" {
+ return Tar(ctx, body, location, rename)
+ }
+ err = copy(ctx, location, 0666, body)
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+type file struct {
+ Path string
+ Mode os.FileMode
+ Data bytes.Buffer
+}
+type link struct {
+ Name string
+ Path string
+}
+
+// Tar extracts a .tar archived stream of data in the specified location.
+// It accepts a rename function to handle the names of the files (see the example)
+func Tar(ctx context.Context, body io.Reader, location string, rename Renamer) error {
+ files := []file{}
+ links := []link{}
+ symlinks := []link{}
+
+ // We make the first pass creating the directory structure, or we could end up
+ // attempting to create a file where there's no folder
+ tr := tar.NewReader(body)
+ for {
+ select {
+ case <-ctx.Done():
+ return errors.New("interrupted")
+ default:
+ }
+
+ header, err := tr.Next()
+ if err == io.EOF {
+ break
+ }
+
+ if err != nil {
+ return errors.Annotatef(err, "Read tar stream")
+ }
+
+ path := header.Name
+ if rename != nil {
+ path = rename(path)
+ }
+
+ if path == "" {
+ continue
+ }
+
+ path = filepath.Join(location, path)
+ info := header.FileInfo()
+
+ switch header.Typeflag {
+ case tar.TypeDir:
+ if err := os.MkdirAll(path, info.Mode()); err != nil {
+ return errors.Annotatef(err, "Create directory %s", path)
+ }
+ case tar.TypeReg, tar.TypeRegA:
+ var data bytes.Buffer
+ if _, err := copyCancel(ctx, &data, tr); err != nil {
+ return errors.Annotatef(err, "Read contents of file %s", path)
+ }
+ files = append(files, file{Path: path, Mode: info.Mode(), Data: data})
+ case tar.TypeLink:
+ name := header.Linkname
+ if rename != nil {
+ name = rename(name)
+ }
+
+ name = filepath.Join(location, name)
+ links = append(links, link{Path: path, Name: name})
+ case tar.TypeSymlink:
+ symlinks = append(symlinks, link{Path: path, Name: header.Linkname})
+ }
+ }
+
+ // Now we make another pass creating the files and links
+ for i := range files {
+ if err := copy(ctx, files[i].Path, files[i].Mode, &files[i].Data); err != nil {
+ return errors.Annotatef(err, "Create file %s", files[i].Path)
+ }
+ }
+
+ for i := range links {
+ select {
+ case <-ctx.Done():
+ return errors.New("interrupted")
+ default:
+ }
+ if err := os.Link(links[i].Name, links[i].Path); err != nil {
+ return errors.Annotatef(err, "Create link %s", links[i].Path)
+ }
+ }
+
+ for i := range symlinks {
+ select {
+ case <-ctx.Done():
+ return errors.New("interrupted")
+ default:
+ }
+ if err := os.Symlink(symlinks[i].Name, symlinks[i].Path); err != nil {
+ return errors.Annotatef(err, "Create link %s", symlinks[i].Path)
+ }
+ }
+ return nil
+}
+
+// Zip extracts a .zip archived stream of data in the specified location.
+// It accepts a rename function to handle the names of the files (see the example).
+func Zip(ctx context.Context, body io.Reader, location string, rename Renamer) error {
+ // read the whole body into a buffer. Not sure this is the best way to do it
+ buffer := bytes.NewBuffer([]byte{})
+ copyCancel(ctx, buffer, body)
+
+ archive, err := zip.NewReader(bytes.NewReader(buffer.Bytes()), int64(buffer.Len()))
+ if err != nil {
+ return errors.Annotatef(err, "Read the zip file")
+ }
+
+ files := []file{}
+ links := []link{}
+
+ // We make the first pass creating the directory structure, or we could end up
+ // attempting to create a file where there's no folder
+ for _, header := range archive.File {
+ select {
+ case <-ctx.Done():
+ return errors.New("interrupted")
+ default:
+ }
+
+ path := header.Name
+ if rename != nil {
+ path = rename(path)
+ }
+
+ if path == "" {
+ continue
+ }
+
+ path = filepath.Join(location, path)
+ info := header.FileInfo()
+
+ switch {
+ case info.IsDir():
+ if err := os.MkdirAll(path, info.Mode()|os.ModeDir|100); err != nil {
+ return errors.Annotatef(err, "Create directory %s", path)
+ }
+ // We only check for symlinks because hard links aren't possible
+ case info.Mode()&os.ModeSymlink != 0:
+ f, err := header.Open()
+ if err != nil {
+ return errors.Annotatef(err, "Open link %s", path)
+ }
+ name, err := ioutil.ReadAll(f)
+ if err != nil {
+ return errors.Annotatef(err, "Read address of link %s", path)
+ }
+ links = append(links, link{Path: path, Name: string(name)})
+ default:
+ f, err := header.Open()
+ if err != nil {
+ return errors.Annotatef(err, "Open file %s", path)
+ }
+ var data bytes.Buffer
+ if _, err := copyCancel(ctx, &data, f); err != nil {
+ return errors.Annotatef(err, "Read contents of file %s", path)
+ }
+ files = append(files, file{Path: path, Mode: info.Mode(), Data: data})
+ }
+ }
+
+ // Now we make another pass creating the files and links
+ for i := range files {
+ if err := copy(ctx, files[i].Path, files[i].Mode, &files[i].Data); err != nil {
+ return errors.Annotatef(err, "Create file %s", files[i].Path)
+ }
+ }
+
+ for i := range links {
+ select {
+ case <-ctx.Done():
+ return errors.New("interrupted")
+ default:
+ }
+ if err := os.Symlink(links[i].Name, links[i].Path); err != nil {
+ return errors.Annotatef(err, "Create link %s", links[i].Path)
+ }
+ }
+
+ return nil
+}
+
+func copy(ctx context.Context, path string, mode os.FileMode, src io.Reader) error {
+ // We add the execution permission to be able to create files inside it
+ err := os.MkdirAll(filepath.Dir(path), mode|os.ModeDir|100)
+ if err != nil {
+ return err
+ }
+ file, err := os.OpenFile(path, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, mode)
+ if err != nil {
+ return err
+ }
+ defer file.Close()
+ _, err = copyCancel(ctx, file, src)
+ return err
+}
+
+// match reads the first 512 bytes, calls types.Match and returns a reader
+// for the whole stream
+func match(r io.Reader) (io.Reader, types.Type, error) {
+ buffer := make([]byte, 512)
+
+ n, err := r.Read(buffer)
+ if err != nil && err != io.EOF {
+ return nil, types.Unknown, err
+ }
+
+ r = io.MultiReader(bytes.NewBuffer(buffer[:n]), r)
+
+ typ, err := filetype.Match(buffer)
+
+ return r, typ, err
+}
diff --git a/vendor/github.com/dimfeld/httppath/.travis.yml b/vendor/github.com/dimfeld/httppath/.travis.yml
new file mode 100644
index 000000000..47ae9f711
--- /dev/null
+++ b/vendor/github.com/dimfeld/httppath/.travis.yml
@@ -0,0 +1,5 @@
+language: go
+
+go:
+ - 1.2
+ - tip
diff --git a/vendor/github.com/dimfeld/httppath/LICENSE b/vendor/github.com/dimfeld/httppath/LICENSE
new file mode 100644
index 000000000..b829abc8a
--- /dev/null
+++ b/vendor/github.com/dimfeld/httppath/LICENSE
@@ -0,0 +1,24 @@
+Copyright (c) 2013 Julien Schmidt. All rights reserved.
+
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ * The names of the contributors may not be used to endorse or promote
+ products derived from this software without specific prior written
+ permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL JULIEN SCHMIDT BE LIABLE FOR ANY
+DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\ No newline at end of file
diff --git a/vendor/github.com/dimfeld/httppath/README.md b/vendor/github.com/dimfeld/httppath/README.md
new file mode 100644
index 000000000..3248dd904
--- /dev/null
+++ b/vendor/github.com/dimfeld/httppath/README.md
@@ -0,0 +1,5 @@
+# HttpPath [](https://travis-ci.org/dimfeld/httppath) [](https://godoc.org/github.com/dimfeld/httppath)
+
+Utilities for HTTP Path manipulation.
+
+Currently, this just contains the CleanPath function, renamed to Clean, from [Julien Schmidt's httprouter](https://github.com/julienschmidt/httprouter).
diff --git a/vendor/github.com/dimfeld/httppath/path.go b/vendor/github.com/dimfeld/httppath/path.go
new file mode 100644
index 000000000..d800d0144
--- /dev/null
+++ b/vendor/github.com/dimfeld/httppath/path.go
@@ -0,0 +1,127 @@
+// Copyright 2013 Julien Schmidt. All rights reserved.
+// Based on the path package, Copyright 2009 The Go Authors.
+// Use of this source code is governed by a BSD-style license that can be found
+// in the LICENSE file.
+
+package httppath
+
+// Clean is the URL version of path.Clean, it returns a canonical URL path
+// for p, eliminating . and .. elements.
+//
+// The following rules are applied iteratively until no further processing can
+// be done:
+// 1. Replace multiple slashes with a single slash.
+// 2. Eliminate each . path name element (the current directory).
+// 3. Eliminate each inner .. path name element (the parent directory)
+// along with the non-.. element that precedes it.
+// 4. Eliminate .. elements that begin a rooted path:
+// that is, replace "/.." by "/" at the beginning of a path.
+//
+// If the result of this process is an empty string, "/" is returned
+func Clean(p string) string {
+ if p == "" {
+ return "/"
+ }
+
+ n := len(p)
+ var buf []byte
+
+ // Invariants:
+ // reading from path; r is index of next byte to process.
+ // writing to buf; w is index of next byte to write.
+
+ // path must start with '/'
+ r := 1
+ w := 1
+
+ if p[0] != '/' {
+ r = 0
+ buf = make([]byte, n+1)
+ buf[0] = '/'
+ }
+
+ trailing := n > 2 && p[n-1] == '/'
+
+ // A bit more clunky without a 'lazybuf' like the path package, but the loop
+ // gets completely inlined (bufApp). So in contrast to the path package this
+ // loop has no expensive function calls (except 1x make)
+
+ for r < n {
+ switch {
+ case p[r] == '/':
+ // empty path element, trailing slash is added after the end
+ r++
+
+ case p[r] == '.' && r+1 == n:
+ trailing = true
+ r++
+
+ case p[r] == '.' && p[r+1] == '/':
+ // . element
+ r++
+
+ case p[r] == '.' && p[r+1] == '.' && (r+2 == n || p[r+2] == '/'):
+ // .. element: remove to last /
+ r += 2
+
+ if w > 1 {
+ // can backtrack
+ w--
+
+ if buf == nil {
+ for w > 1 && p[w] != '/' {
+ w--
+ }
+ } else {
+ for w > 1 && buf[w] != '/' {
+ w--
+ }
+ }
+ }
+
+ default:
+ // real path element.
+ // add slash if needed
+ if w > 1 {
+ bufApp(&buf, p, w, '/')
+ w++
+ }
+
+ // copy element
+ for r < n && p[r] != '/' {
+ bufApp(&buf, p, w, p[r])
+ w++
+ r++
+ }
+ }
+ }
+
+ // re-append trailing slash
+ if trailing && w > 1 {
+ bufApp(&buf, p, w, '/')
+ w++
+ }
+
+ // Turn empty string into "/"
+ if w == 0 {
+ return "/"
+ }
+
+ if buf == nil {
+ return p[:w]
+ }
+ return string(buf[:w])
+}
+
+// internal helper to lazily create a buffer if necessary
+func bufApp(buf *[]byte, s string, w int, c byte) {
+ if *buf == nil {
+ if s[w] == c {
+ return
+ }
+
+ *buf = make([]byte, len(s))
+ copy(*buf, s[:w])
+ }
+ (*buf)[w] = c
+}
diff --git a/vendor/github.com/dimfeld/httptreemux/.gitignore b/vendor/github.com/dimfeld/httptreemux/.gitignore
new file mode 100644
index 000000000..836562412
--- /dev/null
+++ b/vendor/github.com/dimfeld/httptreemux/.gitignore
@@ -0,0 +1,23 @@
+# Compiled Object files, Static and Dynamic libs (Shared Objects)
+*.o
+*.a
+*.so
+
+# Folders
+_obj
+_test
+
+# Architecture specific extensions/prefixes
+*.[568vq]
+[568vq].out
+
+*.cgo1.go
+*.cgo2.c
+_cgo_defun.c
+_cgo_gotypes.go
+_cgo_export.*
+
+_testmain.go
+
+*.exe
+*.test
diff --git a/vendor/github.com/dimfeld/httptreemux/.travis.yml b/vendor/github.com/dimfeld/httptreemux/.travis.yml
new file mode 100644
index 000000000..9267a02e7
--- /dev/null
+++ b/vendor/github.com/dimfeld/httptreemux/.travis.yml
@@ -0,0 +1,14 @@
+language: go
+
+gobuild_args: "-v -race"
+go:
+ - 1.5
+ - 1.6
+ - 1.7
+ - 1.8
+ - 1.9
+ - tip
+
+matrix:
+ allow_failures:
+ - go: tip
diff --git a/vendor/github.com/dimfeld/httptreemux/LICENSE b/vendor/github.com/dimfeld/httptreemux/LICENSE
new file mode 100644
index 000000000..32c75c9b1
--- /dev/null
+++ b/vendor/github.com/dimfeld/httptreemux/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2014,2015 Daniel Imfeld
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/dimfeld/httptreemux/README.md b/vendor/github.com/dimfeld/httptreemux/README.md
new file mode 100644
index 000000000..2cc6a7a1b
--- /dev/null
+++ b/vendor/github.com/dimfeld/httptreemux/README.md
@@ -0,0 +1,244 @@
+httptreemux [](https://travis-ci.org/dimfeld/httptreemux) [](https://godoc.org/github.com/dimfeld/httptreemux)
+===========
+
+High-speed, flexible, tree-based HTTP router for Go.
+
+This is inspired by [Julien Schmidt's httprouter](https://www.github.com/julienschmidt/httprouter), in that it uses a patricia tree, but the implementation is rather different. Specifically, the routing rules are relaxed so that a single path segment may be a wildcard in one route and a static token in another. This gives a nice combination of high performance with a lot of convenience in designing the routing patterns. In [benchmarks](https://github.com/julienschmidt/go-http-routing-benchmark), httptreemux is close to, but slightly slower than, httprouter.
+
+Release notes may be found using the [Github releases tab](https://github.com/dimfeld/httptreemux/releases). Version numbers are compatible with the [Semantic Versioning 2.0.0](http://semver.org/) convention, and a new release is made after every change to the code.
+
+## Why?
+There are a lot of good routers out there. But looking at the ones that were really lightweight, I couldn't quite get something that fit with the route patterns I wanted. The code itself is simple enough, so I spent an evening writing this.
+
+## Handler
+The handler is a simple function with the prototype `func(w http.ResponseWriter, r *http.Request, params map[string]string)`. The params argument contains the parameters parsed from wildcards and catch-alls in the URL, as described below. This type is aliased as httptreemux.HandlerFunc.
+
+### Using http.HandlerFunc
+Due to the inclusion of the [context](https://godoc.org/context) package as of Go 1.7, `httptreemux` now supports handlers of type [http.HandlerFunc](https://godoc.org/net/http#HandlerFunc). There are two ways to enable this support.
+
+#### Adapting an Existing Router
+
+The `UsingContext` method will wrap the router or group in a new group at the same path, but adapted for use with `context` and `http.HandlerFunc`.
+
+```go
+router := httptreemux.New()
+
+group := router.NewGroup("/api")
+group.GET("/v1/:id", func(w http.ResponseWriter, r *http.Request, params map[string]string) {
+ id := params["id"]
+ fmt.Fprintf(w, "GET /api/v1/%s", id)
+})
+
+// UsingContext returns a version of the router or group with context support.
+ctxGroup := group.UsingContext() // sibling to 'group' node in tree
+ctxGroup.GET("/v2/:id", func(w http.ResponseWriter, r *http.Request) {
+ params := httptreemux.ContextParams(r.Context())
+ id := params["id"]
+ fmt.Fprintf(w, "GET /api/v2/%s", id)
+})
+
+http.ListenAndServe(":8080", router)
+```
+
+#### New Router with Context Support
+
+The `NewContextMux` function returns a router preconfigured for use with `context` and `http.HandlerFunc`.
+
+```go
+router := httptreemux.NewContextMux()
+
+router.GET("/:page", func(w http.ResponseWriter, r *http.Request) {
+ params := httptreemux.ContextParams(r.Context())
+ fmt.Fprintf(w, "GET /%s", params["page"])
+})
+
+group := router.NewGroup("/api")
+group.GET("/v1/:id", func(w http.ResponseWriter, r *http.Request) {
+ params := httptreemux.ContextParams(r.Context())
+ id := params["id"]
+ fmt.Fprintf(w, "GET /api/v1/%s", id)
+})
+
+http.ListenAndServe(":8080", router)
+```
+
+
+
+## Routing Rules
+The syntax here is also modeled after httprouter. Each variable in a path may match on one segment only, except for an optional catch-all variable at the end of the URL.
+
+Some examples of valid URL patterns are:
+* `/post/all`
+* `/post/:postid`
+* `/post/:postid/page/:page`
+* `/post/:postid/:page`
+* `/images/*path`
+* `/favicon.ico`
+* `/:year/:month/`
+* `/:year/:month/:post`
+* `/:page`
+
+Note that all of the above URL patterns may exist concurrently in the router.
+
+Path elements starting with `:` indicate a wildcard in the path. A wildcard will only match on a single path segment. That is, the pattern `/post/:postid` will match on `/post/1` or `/post/1/`, but not `/post/1/2`.
+
+A path element starting with `*` is a catch-all, whose value will be a string containing all text in the URL matched by the wildcards. For example, with a pattern of `/images/*path` and a requested URL `images/abc/def`, path would contain `abc/def`. A catch-all path will not match an empty string, so in this example a separate route would need to be installed if you also want to match `/images/`.
+
+#### Using : and * in routing patterns
+
+The characters `:` and `*` can be used at the beginning of a path segment by escaping them with a backslash. A double backslash at the beginning of a segment is interpreted as a single backslash. These escapes are only checked at the very beginning of a path segment; they are not necessary or processed elsewhere in a token.
+
+```go
+router.GET("/foo/\\*starToken", handler) // matches /foo/*starToken
+router.GET("/foo/star*inTheMiddle", handler) // matches /foo/star*inTheMiddle
+router.GET("/foo/starBackslash\\*", handler) // matches /foo/starBackslash\*
+router.GET("/foo/\\\\*backslashWithStar") // matches /foo/\*backslashWithStar
+```
+
+### Routing Groups
+Lets you create a new group of routes with a given path prefix. Makes it easier to create clusters of paths like:
+* `/api/v1/foo`
+* `/api/v1/bar`
+
+To use this you do:
+```go
+router = httptreemux.New()
+api := router.NewGroup("/api/v1")
+api.GET("/foo", fooHandler) // becomes /api/v1/foo
+api.GET("/bar", barHandler) // becomes /api/v1/bar
+```
+
+### Routing Priority
+The priority rules in the router are simple.
+
+1. Static path segments take the highest priority. If a segment and its subtree are able to match the URL, that match is returned.
+2. Wildcards take second priority. For a particular wildcard to match, that wildcard and its subtree must match the URL.
+3. Finally, a catch-all rule will match when the earlier path segments have matched, and none of the static or wildcard conditions have matched. Catch-all rules must be at the end of a pattern.
+
+So with the following patterns adapted from [simpleblog](https://www.github.com/dimfeld/simpleblog), we'll see certain matches:
+```go
+router = httptreemux.New()
+router.GET("/:page", pageHandler)
+router.GET("/:year/:month/:post", postHandler)
+router.GET("/:year/:month", archiveHandler)
+router.GET("/images/*path", staticHandler)
+router.GET("/favicon.ico", staticHandler)
+```
+
+#### Example scenarios
+
+- `/abc` will match `/:page`
+- `/2014/05` will match `/:year/:month`
+- `/2014/05/really-great-blog-post` will match `/:year/:month/:post`
+- `/images/CoolImage.gif` will match `/images/*path`
+- `/images/2014/05/MayImage.jpg` will also match `/images/*path`, with all the text after `/images` stored in the variable path.
+- `/favicon.ico` will match `/favicon.ico`
+
+### Special Method Behavior
+If TreeMux.HeadCanUseGet is set to true, the router will call the GET handler for a pattern when a HEAD request is processed, if no HEAD handler has been added for that pattern. This behavior is enabled by default.
+
+Go's http.ServeContent and related functions already handle the HEAD method correctly by sending only the header, so in most cases your handlers will not need any special cases for it.
+
+By default TreeMux.OptionsHandler is a null handler that doesn't affect your routing. If you set the handler, it will be called on OPTIONS requests to a path already registered by another method. If you set a path specific handler by using `router.OPTIONS`, it will override the global Options Handler for that path.
+
+### Trailing Slashes
+The router has special handling for paths with trailing slashes. If a pattern is added to the router with a trailing slash, any matches on that pattern without a trailing slash will be redirected to the version with the slash. If a pattern does not have a trailing slash, matches on that pattern with a trailing slash will be redirected to the version without.
+
+The trailing slash flag is only stored once for a pattern. That is, if a pattern is added for a method with a trailing slash, all other methods for that pattern will also be considered to have a trailing slash, regardless of whether or not it is specified for those methods too.
+However this behavior can be turned off by setting TreeMux.RedirectTrailingSlash to false. By default it is set to true.
+
+One exception to this rule is catch-all patterns. By default, trailing slash redirection is disabled on catch-all patterns, since the structure of the entire URL and the desired patterns can not be predicted. If trailing slash removal is desired on catch-all patterns, set TreeMux.RemoveCatchAllTrailingSlash to true.
+
+```go
+router = httptreemux.New()
+router.GET("/about", pageHandler)
+router.GET("/posts/", postIndexHandler)
+router.POST("/posts", postFormHandler)
+
+GET /about will match normally.
+GET /about/ will redirect to /about.
+GET /posts will redirect to /posts/.
+GET /posts/ will match normally.
+POST /posts will redirect to /posts/, because the GET method used a trailing slash.
+```
+
+### Custom Redirects
+
+RedirectBehavior sets the behavior when the router redirects the request to the canonical version of the requested URL using RedirectTrailingSlash or RedirectClean. The default behavior is to return a 301 status, redirecting the browser to the version of the URL that matches the given pattern.
+
+These are the values accepted for RedirectBehavior. You may also add these values to the RedirectMethodBehavior map to define custom per-method redirect behavior.
+
+* Redirect301 - HTTP 301 Moved Permanently; this is the default.
+* Redirect307 - HTTP/1.1 Temporary Redirect
+* Redirect308 - RFC7538 Permanent Redirect
+* UseHandler - Don't redirect to the canonical path. Just call the handler instead.
+
+#### Rationale/Usage
+On a POST request, most browsers that receive a 301 will submit a GET request to the redirected URL, meaning that any data will likely be lost. If you want to handle and avoid this behavior, you may use Redirect307, which causes most browsers to resubmit the request using the original method and request body.
+
+Since 307 is supposed to be a temporary redirect, the new 308 status code has been proposed, which is treated the same, except it indicates correctly that the redirection is permanent. The big caveat here is that the RFC is relatively recent, and older or non-compliant browsers will not handle it. Therefore its use is not recommended unless you really know what you're doing.
+
+Finally, the UseHandler value will simply call the handler function for the pattern, without redirecting to the canonical version of the URL.
+
+### RequestURI vs. URL.Path
+
+#### Escaped Slashes
+Go automatically processes escaped characters in a URL, converting + to a space and %XX to the corresponding character. This can present issues when the URL contains a %2f, which is unescaped to '/'. This isn't an issue for most applications, but it will prevent the router from correctly matching paths and wildcards.
+
+For example, the pattern `/post/:post` would not match on `/post/abc%2fdef`, which is unescaped to `/post/abc/def`. The desired behavior is that it matches, and the `post` wildcard is set to `abc/def`.
+
+Therefore, this router defaults to using the raw URL, stored in the Request.RequestURI variable. Matching wildcards and catch-alls are then unescaped, to give the desired behavior.
+
+TL;DR: If a requested URL contains a %2f, this router will still do the right thing. Some Go HTTP routers may not due to [Go issue 3659](https://code.google.com/p/go/issues/detail?id=3659).
+
+#### Escaped Characters
+
+As mentioned above, characters in the URL are not unescaped when using RequestURI to determine the matched route. If this is a problem for you and you are unable to switch to URL.Path for the above reasons, you may set `router.EscapeAddedRoutes` to `true`. This option will run each added route through the `URL.EscapedPath` function, and add an additional route if the escaped version differs.
+
+#### http Package Utility Functions
+
+Although using RequestURI avoids the issue described above, certain utility functions such as `http.StripPrefix` modify URL.Path, and expect that the underlying router is using that field to make its decision. If you are using some of these functions, set the router's `PathSource` member to `URLPath`. This will give up the proper handling of escaped slashes described above, while allowing the router to work properly with these utility functions.
+
+## Concurrency
+
+The router contains an `RWMutex` that arbitrates access to the tree. This allows routes to be safely added from multiple goroutines at once.
+
+No concurrency controls are needed when only reading from the tree, so the default behavior is to not use the `RWMutex` when serving a request. This avoids a theoretical slowdown under high-usage scenarios from competing atomic integer operations inside the `RWMutex`. If your application adds routes to the router after it has begun serving requests, you should avoid potential race conditions by setting `router.SafeAddRoutesWhileRunning` to `true` to use the `RWMutex` when serving requests.
+
+## Error Handlers
+
+### NotFoundHandler
+TreeMux.NotFoundHandler can be set to provide custom 404-error handling. The default implementation is Go's `http.NotFound` function.
+
+### MethodNotAllowedHandler
+If a pattern matches, but the pattern does not have an associated handler for the requested method, the router calls the MethodNotAllowedHandler. The default
+version of this handler just writes the status code `http.StatusMethodNotAllowed` and sets the response header's `Allowed` field appropriately.
+
+### Panic Handling
+TreeMux.PanicHandler can be set to provide custom panic handling. The `SimplePanicHandler` just writes the status code `http.StatusInternalServerError`. The function `ShowErrorsPanicHandler`, adapted from [gocraft/web](https://github.com/gocraft/web), will print panic errors to the browser in an easily-readable format.
+
+## Unexpected Differences from Other Routers
+
+This router is intentionally light on features in the name of simplicity and
+performance. When coming from another router that does heavier processing behind
+the scenes, you may encounter some unexpected behavior. This list is by no means
+exhaustive, but covers some nonobvious cases that users have encountered.
+
+### gorilla/pat query string modifications
+
+When matching on parameters in a route, the `gorilla/pat` router will modify
+`Request.URL.RawQuery` to make it appear like the parameters were in the
+query string. `httptreemux` does not do this. See [Issue #26](https://github.com/dimfeld/httptreemux/issues/26) for more details and a
+code snippet that can perform this transformation for you, should you want it.
+
+### httprouter and catch-all parameters
+
+When using `httprouter`, a route with a catch-all parameter (e.g. `/images/*path`) will match on URLs like `/images/` where the catch-all parameter is empty. This router does not match on empty catch-all parameters, but the behavior can be duplicated by adding a route without the catch-all (e.g. `/images/`).
+
+## Middleware
+This package provides no middleware. But there are a lot of great options out there and it's pretty easy to write your own.
+
+# Acknowledgements
+
+* Inspiration from Julien Schmidt's [httprouter](https://github.com/julienschmidt/httprouter)
+* Show Errors panic handler from [gocraft/web](https://github.com/gocraft/web)
diff --git a/vendor/github.com/dimfeld/httptreemux/context.go b/vendor/github.com/dimfeld/httptreemux/context.go
new file mode 100644
index 000000000..de5ad449a
--- /dev/null
+++ b/vendor/github.com/dimfeld/httptreemux/context.go
@@ -0,0 +1,123 @@
+// +build go1.7
+
+package httptreemux
+
+import (
+ "context"
+ "net/http"
+)
+
+// ContextGroup is a wrapper around Group, with the purpose of mimicking its API, but with the use of http.HandlerFunc-based handlers.
+// Instead of passing a parameter map via the handler (i.e. httptreemux.HandlerFunc), the path parameters are accessed via the request
+// object's context.
+type ContextGroup struct {
+ group *Group
+}
+
+// UsingContext wraps the receiver to return a new instance of a ContextGroup.
+// The returned ContextGroup is a sibling to its wrapped Group, within the parent TreeMux.
+// The choice of using a *Group as the receiver, as opposed to a function parameter, allows chaining
+// while method calls between a TreeMux, Group, and ContextGroup. For example:
+//
+// tree := httptreemux.New()
+// group := tree.NewGroup("/api")
+//
+// group.GET("/v1", func(w http.ResponseWriter, r *http.Request, params map[string]string) {
+// w.Write([]byte(`GET /api/v1`))
+// })
+//
+// group.UsingContext().GET("/v2", func(w http.ResponseWriter, r *http.Request) {
+// w.Write([]byte(`GET /api/v2`))
+// })
+//
+// http.ListenAndServe(":8080", tree)
+//
+func (g *Group) UsingContext() *ContextGroup {
+ return &ContextGroup{g}
+}
+
+// NewContextGroup adds a child context group to its path.
+func (cg *ContextGroup) NewContextGroup(path string) *ContextGroup {
+ return &ContextGroup{cg.group.NewGroup(path)}
+}
+
+func (cg *ContextGroup) NewGroup(path string) *ContextGroup {
+ return cg.NewContextGroup(path)
+}
+
+// Handle allows handling HTTP requests via an http.HandlerFunc, as opposed to an httptreemux.HandlerFunc.
+// Any parameters from the request URL are stored in a map[string]string in the request's context.
+func (cg *ContextGroup) Handle(method, path string, handler http.HandlerFunc) {
+ cg.group.Handle(method, path, func(w http.ResponseWriter, r *http.Request, params map[string]string) {
+ if params != nil {
+ r = r.WithContext(AddParamsToContext(r.Context(), params))
+ }
+ handler(w, r)
+ })
+}
+
+// Handler allows handling HTTP requests via an http.Handler interface, as opposed to an httptreemux.HandlerFunc.
+// Any parameters from the request URL are stored in a map[string]string in the request's context.
+func (cg *ContextGroup) Handler(method, path string, handler http.Handler) {
+ cg.group.Handle(method, path, func(w http.ResponseWriter, r *http.Request, params map[string]string) {
+ if params != nil {
+ r = r.WithContext(AddParamsToContext(r.Context(), params))
+ }
+ handler.ServeHTTP(w, r)
+ })
+}
+
+// GET is convenience method for handling GET requests on a context group.
+func (cg *ContextGroup) GET(path string, handler http.HandlerFunc) {
+ cg.Handle("GET", path, handler)
+}
+
+// POST is convenience method for handling POST requests on a context group.
+func (cg *ContextGroup) POST(path string, handler http.HandlerFunc) {
+ cg.Handle("POST", path, handler)
+}
+
+// PUT is convenience method for handling PUT requests on a context group.
+func (cg *ContextGroup) PUT(path string, handler http.HandlerFunc) {
+ cg.Handle("PUT", path, handler)
+}
+
+// DELETE is convenience method for handling DELETE requests on a context group.
+func (cg *ContextGroup) DELETE(path string, handler http.HandlerFunc) {
+ cg.Handle("DELETE", path, handler)
+}
+
+// PATCH is convenience method for handling PATCH requests on a context group.
+func (cg *ContextGroup) PATCH(path string, handler http.HandlerFunc) {
+ cg.Handle("PATCH", path, handler)
+}
+
+// HEAD is convenience method for handling HEAD requests on a context group.
+func (cg *ContextGroup) HEAD(path string, handler http.HandlerFunc) {
+ cg.Handle("HEAD", path, handler)
+}
+
+// OPTIONS is convenience method for handling OPTIONS requests on a context group.
+func (cg *ContextGroup) OPTIONS(path string, handler http.HandlerFunc) {
+ cg.Handle("OPTIONS", path, handler)
+}
+
+// ContextParams returns the params map associated with the given context if one exists. Otherwise, an empty map is returned.
+func ContextParams(ctx context.Context) map[string]string {
+ if p, ok := ctx.Value(paramsContextKey).(map[string]string); ok {
+ return p
+ }
+ return map[string]string{}
+}
+
+// AddParamsToContext inserts a parameters map into a context using
+// the package's internal context key. Clients of this package should
+// really only use this for unit tests.
+func AddParamsToContext(ctx context.Context, params map[string]string) context.Context {
+ return context.WithValue(ctx, paramsContextKey, params)
+}
+
+type contextKey int
+
+// paramsContextKey is used to retrieve a path's params map from a request's context.
+const paramsContextKey contextKey = 0
diff --git a/vendor/github.com/dimfeld/httptreemux/go.mod b/vendor/github.com/dimfeld/httptreemux/go.mod
new file mode 100644
index 000000000..627765e22
--- /dev/null
+++ b/vendor/github.com/dimfeld/httptreemux/go.mod
@@ -0,0 +1,3 @@
+module github.com/dimfeld/httptreemux/v5
+
+go 1.9
diff --git a/vendor/github.com/dimfeld/httptreemux/group.go b/vendor/github.com/dimfeld/httptreemux/group.go
new file mode 100644
index 000000000..7ed533e7b
--- /dev/null
+++ b/vendor/github.com/dimfeld/httptreemux/group.go
@@ -0,0 +1,195 @@
+package httptreemux
+
+import (
+ "fmt"
+ "net/url"
+ "strings"
+)
+
+type Group struct {
+ path string
+ mux *TreeMux
+}
+
+// Add a sub-group to this group
+func (g *Group) NewGroup(path string) *Group {
+ if len(path) < 1 {
+ panic("Group path must not be empty")
+ }
+
+ checkPath(path)
+ path = g.path + path
+ //Don't want trailing slash as all sub-paths start with slash
+ if path[len(path)-1] == '/' {
+ path = path[:len(path)-1]
+ }
+ return &Group{path, g.mux}
+}
+
+// Path elements starting with : indicate a wildcard in the path. A wildcard will only match on a
+// single path segment. That is, the pattern `/post/:postid` will match on `/post/1` or `/post/1/`,
+// but not `/post/1/2`.
+//
+// A path element starting with * is a catch-all, whose value will be a string containing all text
+// in the URL matched by the wildcards. For example, with a pattern of `/images/*path` and a
+// requested URL `images/abc/def`, path would contain `abc/def`.
+//
+// # Routing Rule Priority
+//
+// The priority rules in the router are simple.
+//
+// 1. Static path segments take the highest priority. If a segment and its subtree are able to match the URL, that match is returned.
+//
+// 2. Wildcards take second priority. For a particular wildcard to match, that wildcard and its subtree must match the URL.
+//
+// 3. Finally, a catch-all rule will match when the earlier path segments have matched, and none of the static or wildcard conditions have matched. Catch-all rules must be at the end of a pattern.
+//
+// So with the following patterns, we'll see certain matches:
+// router = httptreemux.New()
+// router.GET("/:page", pageHandler)
+// router.GET("/:year/:month/:post", postHandler)
+// router.GET("/:year/:month", archiveHandler)
+// router.GET("/images/*path", staticHandler)
+// router.GET("/favicon.ico", staticHandler)
+//
+// /abc will match /:page
+// /2014/05 will match /:year/:month
+// /2014/05/really-great-blog-post will match /:year/:month/:post
+// /images/CoolImage.gif will match /images/*path
+// /images/2014/05/MayImage.jpg will also match /images/*path, with all the text after /images stored in the variable path.
+// /favicon.ico will match /favicon.ico
+//
+// # Trailing Slashes
+//
+// The router has special handling for paths with trailing slashes. If a pattern is added to the
+// router with a trailing slash, any matches on that pattern without a trailing slash will be
+// redirected to the version with the slash. If a pattern does not have a trailing slash, matches on
+// that pattern with a trailing slash will be redirected to the version without.
+//
+// The trailing slash flag is only stored once for a pattern. That is, if a pattern is added for a
+// method with a trailing slash, all other methods for that pattern will also be considered to have a
+// trailing slash, regardless of whether or not it is specified for those methods too.
+//
+// This behavior can be turned off by setting TreeMux.RedirectTrailingSlash to false. By
+// default it is set to true. The specifics of the redirect depend on RedirectBehavior.
+//
+// One exception to this rule is catch-all patterns. By default, trailing slash redirection is
+// disabled on catch-all patterns, since the structure of the entire URL and the desired patterns
+// can not be predicted. If trailing slash removal is desired on catch-all patterns, set
+// TreeMux.RemoveCatchAllTrailingSlash to true.
+//
+// router = httptreemux.New()
+// router.GET("/about", pageHandler)
+// router.GET("/posts/", postIndexHandler)
+// router.POST("/posts", postFormHandler)
+//
+// GET /about will match normally.
+// GET /about/ will redirect to /about.
+// GET /posts will redirect to /posts/.
+// GET /posts/ will match normally.
+// POST /posts will redirect to /posts/, because the GET method used a trailing slash.
+func (g *Group) Handle(method string, path string, handler HandlerFunc) {
+ g.mux.mutex.Lock()
+ defer g.mux.mutex.Unlock()
+
+ addSlash := false
+ addOne := func(thePath string) {
+ node := g.mux.root.addPath(thePath[1:], nil, false)
+ if addSlash {
+ node.addSlash = true
+ }
+ node.setHandler(method, handler, false)
+
+ if g.mux.HeadCanUseGet && method == "GET" && node.leafHandler["HEAD"] == nil {
+ node.setHandler("HEAD", handler, true)
+ }
+ }
+
+ checkPath(path)
+ path = g.path + path
+ if len(path) == 0 {
+ panic("Cannot map an empty path")
+ }
+
+ if len(path) > 1 && path[len(path)-1] == '/' && g.mux.RedirectTrailingSlash {
+ addSlash = true
+ path = path[:len(path)-1]
+ }
+
+ if g.mux.EscapeAddedRoutes {
+ u, err := url.ParseRequestURI(path)
+ if err != nil {
+ panic("URL parsing error " + err.Error() + " on url " + path)
+ }
+ escapedPath := unescapeSpecial(u.String())
+
+ if escapedPath != path {
+ addOne(escapedPath)
+ }
+ }
+
+ addOne(path)
+}
+
+// Syntactic sugar for Handle("GET", path, handler)
+func (g *Group) GET(path string, handler HandlerFunc) {
+ g.Handle("GET", path, handler)
+}
+
+// Syntactic sugar for Handle("POST", path, handler)
+func (g *Group) POST(path string, handler HandlerFunc) {
+ g.Handle("POST", path, handler)
+}
+
+// Syntactic sugar for Handle("PUT", path, handler)
+func (g *Group) PUT(path string, handler HandlerFunc) {
+ g.Handle("PUT", path, handler)
+}
+
+// Syntactic sugar for Handle("DELETE", path, handler)
+func (g *Group) DELETE(path string, handler HandlerFunc) {
+ g.Handle("DELETE", path, handler)
+}
+
+// Syntactic sugar for Handle("PATCH", path, handler)
+func (g *Group) PATCH(path string, handler HandlerFunc) {
+ g.Handle("PATCH", path, handler)
+}
+
+// Syntactic sugar for Handle("HEAD", path, handler)
+func (g *Group) HEAD(path string, handler HandlerFunc) {
+ g.Handle("HEAD", path, handler)
+}
+
+// Syntactic sugar for Handle("OPTIONS", path, handler)
+func (g *Group) OPTIONS(path string, handler HandlerFunc) {
+ g.Handle("OPTIONS", path, handler)
+}
+
+func checkPath(path string) {
+ // All non-empty paths must start with a slash
+ if len(path) > 0 && path[0] != '/' {
+ panic(fmt.Sprintf("Path %s must start with slash", path))
+ }
+}
+
+func unescapeSpecial(s string) string {
+ // Look for sequences of \*, *, and \: that were escaped, and undo some of that escaping.
+
+ // Unescape /* since it references a wildcard token.
+ s = strings.Replace(s, "/%2A", "/*", -1)
+
+ // Unescape /\: since it references a literal colon
+ s = strings.Replace(s, "/%5C:", "/\\:", -1)
+
+ // Replace escaped /\\: with /\:
+ s = strings.Replace(s, "/%5C%5C:", "/%5C:", -1)
+
+ // Replace escaped /\* with /*
+ s = strings.Replace(s, "/%5C%2A", "/%2A", -1)
+
+ // Replace escaped /\\* with /\*
+ s = strings.Replace(s, "/%5C%5C%2A", "/%5C%2A", -1)
+
+ return s
+}
diff --git a/vendor/github.com/dimfeld/httptreemux/panichandler.go b/vendor/github.com/dimfeld/httptreemux/panichandler.go
new file mode 100644
index 000000000..cebb661bc
--- /dev/null
+++ b/vendor/github.com/dimfeld/httptreemux/panichandler.go
@@ -0,0 +1,211 @@
+package httptreemux
+
+import (
+ "bufio"
+ "encoding/json"
+ "html/template"
+ "net/http"
+ "os"
+ "runtime"
+ "strings"
+)
+
+// SimplePanicHandler just returns error 500.
+func SimplePanicHandler(w http.ResponseWriter, r *http.Request, err interface{}) {
+ w.WriteHeader(http.StatusInternalServerError)
+}
+
+// ShowErrorsPanicHandler prints a nice representation of an error to the browser.
+// This was taken from github.com/gocraft/web, which adapted it from the Traffic project.
+func ShowErrorsPanicHandler(w http.ResponseWriter, r *http.Request, err interface{}) {
+ const size = 4096
+ stack := make([]byte, size)
+ stack = stack[:runtime.Stack(stack, false)]
+ renderPrettyError(w, r, err, stack)
+}
+
+func makeErrorData(r *http.Request, err interface{}, stack []byte, filePath string, line int) map[string]interface{} {
+
+ data := map[string]interface{}{
+ "Stack": string(stack),
+ "Params": r.URL.Query(),
+ "Method": r.Method,
+ "FilePath": filePath,
+ "Line": line,
+ "Lines": readErrorFileLines(filePath, line),
+ }
+
+ if e, ok := err.(error); ok {
+ data["Error"] = e.Error()
+ } else {
+ data["Error"] = err
+ }
+
+ return data
+}
+
+func renderPrettyError(rw http.ResponseWriter, req *http.Request, err interface{}, stack []byte) {
+ _, filePath, line, _ := runtime.Caller(5)
+
+ data := makeErrorData(req, err, stack, filePath, line)
+ rw.Header().Set("Content-Type", "text/html")
+ rw.WriteHeader(http.StatusInternalServerError)
+
+ tpl := template.Must(template.New("ErrorPage").Parse(panicPageTpl))
+ tpl.Execute(rw, data)
+}
+
+func ShowErrorsJsonPanicHandler(w http.ResponseWriter, r *http.Request, err interface{}) {
+ const size = 4096
+ stack := make([]byte, size)
+ stack = stack[:runtime.Stack(stack, false)]
+
+ _, filePath, line, _ := runtime.Caller(4)
+ data := makeErrorData(r, err, stack, filePath, line)
+
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(http.StatusInternalServerError)
+ json.NewEncoder(w).Encode(data)
+}
+
+func readErrorFileLines(filePath string, errorLine int) map[int]string {
+ lines := make(map[int]string)
+
+ file, err := os.Open(filePath)
+ if err != nil {
+ return lines
+ }
+
+ defer file.Close()
+
+ reader := bufio.NewReader(file)
+ currentLine := 0
+ for {
+ line, err := reader.ReadString('\n')
+ if err != nil || currentLine > errorLine+5 {
+ break
+ }
+
+ currentLine++
+
+ if currentLine >= errorLine-5 {
+ lines[currentLine] = strings.Replace(line, "\n", "", -1)
+ }
+ }
+
+ return lines
+}
+
+const panicPageTpl string = `
+
+
+ Panic
+
+
+
+
+
+
+
+
+
+
+ In {{ .FilePath }}:{{ .Line }}
+
+
+
+
+
+ {{ range $lineNumber, $line := .Lines }}{{ $lineNumber }}{{ end }}
+ |
+
+ {{ range $lineNumber, $line := .Lines }}{{ $line }} {{ end }}
+ |
+
+
+
Stack
+
{{ .Stack }}
+
Request
+
Method: {{ .Method }}
+
Parameters:
+
+ {{ range $key, $value := .Params }}
+ - {{ $key }}: {{ $value }}
+ {{ end }}
+
+
+
+
+ `
diff --git a/vendor/github.com/dimfeld/httptreemux/path.go b/vendor/github.com/dimfeld/httptreemux/path.go
new file mode 100644
index 000000000..506ac3846
--- /dev/null
+++ b/vendor/github.com/dimfeld/httptreemux/path.go
@@ -0,0 +1,127 @@
+// Copyright 2013 Julien Schmidt. All rights reserved.
+// Based on the path package, Copyright 2009 The Go Authors.
+// Use of this source code is governed by a BSD-style license that can be found
+// in the LICENSE file.
+
+package httptreemux
+
+// Clean is the URL version of path.Clean, it returns a canonical URL path
+// for p, eliminating . and .. elements.
+//
+// The following rules are applied iteratively until no further processing can
+// be done:
+// 1. Replace multiple slashes with a single slash.
+// 2. Eliminate each . path name element (the current directory).
+// 3. Eliminate each inner .. path name element (the parent directory)
+// along with the non-.. element that precedes it.
+// 4. Eliminate .. elements that begin a rooted path:
+// that is, replace "/.." by "/" at the beginning of a path.
+//
+// If the result of this process is an empty string, "/" is returned
+func Clean(p string) string {
+ if p == "" {
+ return "/"
+ }
+
+ n := len(p)
+ var buf []byte
+
+ // Invariants:
+ // reading from path; r is index of next byte to process.
+ // writing to buf; w is index of next byte to write.
+
+ // path must start with '/'
+ r := 1
+ w := 1
+
+ if p[0] != '/' {
+ r = 0
+ buf = make([]byte, n+1)
+ buf[0] = '/'
+ }
+
+ trailing := n > 2 && p[n-1] == '/'
+
+ // A bit more clunky without a 'lazybuf' like the path package, but the loop
+ // gets completely inlined (bufApp). So in contrast to the path package this
+ // loop has no expensive function calls (except 1x make)
+
+ for r < n {
+ switch {
+ case p[r] == '/':
+ // empty path element, trailing slash is added after the end
+ r++
+
+ case p[r] == '.' && r+1 == n:
+ trailing = true
+ r++
+
+ case p[r] == '.' && p[r+1] == '/':
+ // . element
+ r++
+
+ case p[r] == '.' && p[r+1] == '.' && (r+2 == n || p[r+2] == '/'):
+ // .. element: remove to last /
+ r += 2
+
+ if w > 1 {
+ // can backtrack
+ w--
+
+ if buf == nil {
+ for w > 1 && p[w] != '/' {
+ w--
+ }
+ } else {
+ for w > 1 && buf[w] != '/' {
+ w--
+ }
+ }
+ }
+
+ default:
+ // real path element.
+ // add slash if needed
+ if w > 1 {
+ bufApp(&buf, p, w, '/')
+ w++
+ }
+
+ // copy element
+ for r < n && p[r] != '/' {
+ bufApp(&buf, p, w, p[r])
+ w++
+ r++
+ }
+ }
+ }
+
+ // re-append trailing slash
+ if trailing && w > 1 {
+ bufApp(&buf, p, w, '/')
+ w++
+ }
+
+ // Turn empty string into "/"
+ if w == 0 {
+ return "/"
+ }
+
+ if buf == nil {
+ return p[:w]
+ }
+ return string(buf[:w])
+}
+
+// internal helper to lazily create a buffer if necessary
+func bufApp(buf *[]byte, s string, w int, c byte) {
+ if *buf == nil {
+ if s[w] == c {
+ return
+ }
+
+ *buf = make([]byte, len(s))
+ copy(*buf, s[:w])
+ }
+ (*buf)[w] = c
+}
diff --git a/vendor/github.com/dimfeld/httptreemux/router.go b/vendor/github.com/dimfeld/httptreemux/router.go
new file mode 100644
index 000000000..b8063e4d2
--- /dev/null
+++ b/vendor/github.com/dimfeld/httptreemux/router.go
@@ -0,0 +1,300 @@
+// This is inspired by Julien Schmidt's httprouter, in that it uses a patricia tree, but the
+// implementation is rather different. Specifically, the routing rules are relaxed so that a
+// single path segment may be a wildcard in one route and a static token in another. This gives a
+// nice combination of high performance with a lot of convenience in designing the routing patterns.
+package httptreemux
+
+import (
+ "fmt"
+ "net/http"
+ "net/url"
+)
+
+// The params argument contains the parameters parsed from wildcards and catch-alls in the URL.
+type HandlerFunc func(http.ResponseWriter, *http.Request, map[string]string)
+type PanicHandler func(http.ResponseWriter, *http.Request, interface{})
+
+// RedirectBehavior sets the behavior when the router redirects the request to the
+// canonical version of the requested URL using RedirectTrailingSlash or RedirectClean.
+// The default behavior is to return a 301 status, redirecting the browser to the version
+// of the URL that matches the given pattern.
+//
+// On a POST request, most browsers that receive a 301 will submit a GET request to
+// the redirected URL, meaning that any data will likely be lost. If you want to handle
+// and avoid this behavior, you may use Redirect307, which causes most browsers to
+// resubmit the request using the original method and request body.
+//
+// Since 307 is supposed to be a temporary redirect, the new 308 status code has been
+// proposed, which is treated the same, except it indicates correctly that the redirection
+// is permanent. The big caveat here is that the RFC is relatively recent, and older
+// browsers will not know what to do with it. Therefore its use is not recommended
+// unless you really know what you're doing.
+//
+// Finally, the UseHandler value will simply call the handler function for the pattern.
+type RedirectBehavior int
+
+type PathSource int
+
+const (
+ Redirect301 RedirectBehavior = iota // Return 301 Moved Permanently
+ Redirect307 // Return 307 HTTP/1.1 Temporary Redirect
+ Redirect308 // Return a 308 RFC7538 Permanent Redirect
+ UseHandler // Just call the handler function
+
+ RequestURI PathSource = iota // Use r.RequestURI
+ URLPath // Use r.URL.Path
+)
+
+// LookupResult contains information about a route lookup, which is returned from Lookup and
+// can be passed to ServeLookupResult if the request should be served.
+type LookupResult struct {
+ // StatusCode informs the caller about the result of the lookup.
+ // This will generally be `http.StatusNotFound` or `http.StatusMethodNotAllowed` for an
+ // error case. On a normal success, the statusCode will be `http.StatusOK`. A redirect code
+ // will also be used in the case
+ StatusCode int
+ handler HandlerFunc
+ params map[string]string
+ leafHandler map[string]HandlerFunc // Only has a value when StatusCode is MethodNotAllowed.
+}
+
+// Dump returns a text representation of the routing tree.
+func (t *TreeMux) Dump() string {
+ return t.root.dumpTree("", "")
+}
+
+func (t *TreeMux) serveHTTPPanic(w http.ResponseWriter, r *http.Request) {
+ if err := recover(); err != nil {
+ t.PanicHandler(w, r, err)
+ }
+}
+
+func (t *TreeMux) redirectStatusCode(method string) (int, bool) {
+ var behavior RedirectBehavior
+ var ok bool
+ if behavior, ok = t.RedirectMethodBehavior[method]; !ok {
+ behavior = t.RedirectBehavior
+ }
+ switch behavior {
+ case Redirect301:
+ return http.StatusMovedPermanently, true
+ case Redirect307:
+ return http.StatusTemporaryRedirect, true
+ case Redirect308:
+ // Go doesn't have a constant for this yet. Yet another sign
+ // that you probably shouldn't use it.
+ return 308, true
+ case UseHandler:
+ return 0, false
+ default:
+ return http.StatusMovedPermanently, true
+ }
+}
+
+func redirectHandler(newPath string, statusCode int) HandlerFunc {
+ return func(w http.ResponseWriter, r *http.Request, params map[string]string) {
+ redirect(w, r, newPath, statusCode)
+ }
+}
+
+func redirect(w http.ResponseWriter, r *http.Request, newPath string, statusCode int) {
+ newURL := url.URL{
+ Path: newPath,
+ RawQuery: r.URL.RawQuery,
+ Fragment: r.URL.Fragment,
+ }
+ http.Redirect(w, r, newURL.String(), statusCode)
+}
+
+func (t *TreeMux) lookup(w http.ResponseWriter, r *http.Request) (result LookupResult, found bool) {
+ result.StatusCode = http.StatusNotFound
+ path := r.RequestURI
+ unescapedPath := r.URL.Path
+ pathLen := len(path)
+ if pathLen > 0 && t.PathSource == RequestURI {
+ rawQueryLen := len(r.URL.RawQuery)
+
+ if rawQueryLen != 0 || path[pathLen-1] == '?' {
+ // Remove any query string and the ?.
+ path = path[:pathLen-rawQueryLen-1]
+ pathLen = len(path)
+ }
+ } else {
+ // In testing with http.NewRequest,
+ // RequestURI is not set so just grab URL.Path instead.
+ path = r.URL.Path
+ pathLen = len(path)
+ }
+
+ trailingSlash := path[pathLen-1] == '/' && pathLen > 1
+ if trailingSlash && t.RedirectTrailingSlash {
+ path = path[:pathLen-1]
+ unescapedPath = unescapedPath[:len(unescapedPath)-1]
+ }
+
+ n, handler, params := t.root.search(r.Method, path[1:])
+ if n == nil {
+ if t.RedirectCleanPath {
+ // Path was not found. Try cleaning it up and search again.
+ // TODO Test this
+ cleanPath := Clean(unescapedPath)
+ n, handler, params = t.root.search(r.Method, cleanPath[1:])
+ if n == nil {
+ // Still nothing found.
+ return
+ }
+ if statusCode, ok := t.redirectStatusCode(r.Method); ok {
+ // Redirect to the actual path
+ return LookupResult{statusCode, redirectHandler(cleanPath, statusCode), nil, nil}, true
+ }
+ } else {
+ // Not found.
+ return
+ }
+ }
+
+ if handler == nil {
+ if r.Method == "OPTIONS" && t.OptionsHandler != nil {
+ handler = t.OptionsHandler
+ }
+
+ if handler == nil {
+ result.leafHandler = n.leafHandler
+ result.StatusCode = http.StatusMethodNotAllowed
+ return
+ }
+ }
+
+ if !n.isCatchAll || t.RemoveCatchAllTrailingSlash {
+ if trailingSlash != n.addSlash && t.RedirectTrailingSlash {
+ if statusCode, ok := t.redirectStatusCode(r.Method); ok {
+ var h HandlerFunc
+ if n.addSlash {
+ // Need to add a slash.
+ h = redirectHandler(unescapedPath+"/", statusCode)
+ } else if path != "/" {
+ // We need to remove the slash. This was already done at the
+ // beginning of the function.
+ h = redirectHandler(unescapedPath, statusCode)
+ }
+
+ if h != nil {
+ return LookupResult{statusCode, h, nil, nil}, true
+ }
+ }
+ }
+ }
+
+ var paramMap map[string]string
+ if len(params) != 0 {
+ if len(params) != len(n.leafWildcardNames) {
+ // Need better behavior here. Should this be a panic?
+ panic(fmt.Sprintf("httptreemux parameter list length mismatch: %v, %v",
+ params, n.leafWildcardNames))
+ }
+
+ paramMap = make(map[string]string)
+ numParams := len(params)
+ for index := 0; index < numParams; index++ {
+ paramMap[n.leafWildcardNames[numParams-index-1]] = params[index]
+ }
+ }
+
+ return LookupResult{http.StatusOK, handler, paramMap, nil}, true
+}
+
+// Lookup performs a lookup without actually serving the request or mutating the request or response.
+// The return values are a LookupResult and a boolean. The boolean will be true when a handler
+// was found or the lookup resulted in a redirect which will point to a real handler. It is false
+// for requests which would result in a `StatusNotFound` or `StatusMethodNotAllowed`.
+//
+// Regardless of the returned boolean's value, the LookupResult may be passed to ServeLookupResult
+// to be served appropriately.
+func (t *TreeMux) Lookup(w http.ResponseWriter, r *http.Request) (LookupResult, bool) {
+ if t.SafeAddRoutesWhileRunning {
+ // In concurrency safe mode, we acquire a read lock on the mutex for any access.
+ // This is optional to avoid potential performance loss in high-usage scenarios.
+ t.mutex.RLock()
+ }
+
+ result, found := t.lookup(w, r)
+
+ if t.SafeAddRoutesWhileRunning {
+ t.mutex.RUnlock()
+ }
+
+ return result, found
+}
+
+// ServeLookupResult serves a request, given a lookup result from the Lookup function.
+func (t *TreeMux) ServeLookupResult(w http.ResponseWriter, r *http.Request, lr LookupResult) {
+ if lr.handler == nil {
+ if lr.StatusCode == http.StatusMethodNotAllowed && lr.leafHandler != nil {
+ if t.SafeAddRoutesWhileRunning {
+ t.mutex.RLock()
+ }
+
+ t.MethodNotAllowedHandler(w, r, lr.leafHandler)
+
+ if t.SafeAddRoutesWhileRunning {
+ t.mutex.RUnlock()
+ }
+ } else {
+ t.NotFoundHandler(w, r)
+ }
+ } else {
+ r = t.setDefaultRequestContext(r)
+ lr.handler(w, r, lr.params)
+ }
+}
+
+func (t *TreeMux) ServeHTTP(w http.ResponseWriter, r *http.Request) {
+ if t.PanicHandler != nil {
+ defer t.serveHTTPPanic(w, r)
+ }
+
+ if t.SafeAddRoutesWhileRunning {
+ // In concurrency safe mode, we acquire a read lock on the mutex for any access.
+ // This is optional to avoid potential performance loss in high-usage scenarios.
+ t.mutex.RLock()
+ }
+
+ result, _ := t.lookup(w, r)
+
+ if t.SafeAddRoutesWhileRunning {
+ t.mutex.RUnlock()
+ }
+
+ t.ServeLookupResult(w, r, result)
+}
+
+// MethodNotAllowedHandler is the default handler for TreeMux.MethodNotAllowedHandler,
+// which is called for patterns that match, but do not have a handler installed for the
+// requested method. It simply writes the status code http.StatusMethodNotAllowed and fills
+// in the `Allow` header value appropriately.
+func MethodNotAllowedHandler(w http.ResponseWriter, r *http.Request,
+ methods map[string]HandlerFunc) {
+
+ for m := range methods {
+ w.Header().Add("Allow", m)
+ }
+
+ w.WriteHeader(http.StatusMethodNotAllowed)
+}
+
+func New() *TreeMux {
+ tm := &TreeMux{
+ root: &node{path: "/"},
+ NotFoundHandler: http.NotFound,
+ MethodNotAllowedHandler: MethodNotAllowedHandler,
+ HeadCanUseGet: true,
+ RedirectTrailingSlash: true,
+ RedirectCleanPath: true,
+ RedirectBehavior: Redirect301,
+ RedirectMethodBehavior: make(map[string]RedirectBehavior),
+ PathSource: RequestURI,
+ EscapeAddedRoutes: false,
+ }
+ tm.Group.mux = tm
+ return tm
+}
diff --git a/vendor/github.com/dimfeld/httptreemux/tree.go b/vendor/github.com/dimfeld/httptreemux/tree.go
new file mode 100644
index 000000000..530d42784
--- /dev/null
+++ b/vendor/github.com/dimfeld/httptreemux/tree.go
@@ -0,0 +1,340 @@
+package httptreemux
+
+import (
+ "fmt"
+ "strings"
+)
+
+type node struct {
+ path string
+
+ priority int
+
+ // The list of static children to check.
+ staticIndices []byte
+ staticChild []*node
+
+ // If none of the above match, check the wildcard children
+ wildcardChild *node
+
+ // If none of the above match, then we use the catch-all, if applicable.
+ catchAllChild *node
+
+ // Data for the node is below.
+
+ addSlash bool
+ isCatchAll bool
+ // If true, the head handler was set implicitly, so let it also be set explicitly.
+ implicitHead bool
+ // If this node is the end of the URL, then call the handler, if applicable.
+ leafHandler map[string]HandlerFunc
+
+ // The names of the parameters to apply.
+ leafWildcardNames []string
+}
+
+func (n *node) sortStaticChild(i int) {
+ for i > 0 && n.staticChild[i].priority > n.staticChild[i-1].priority {
+ n.staticChild[i], n.staticChild[i-1] = n.staticChild[i-1], n.staticChild[i]
+ n.staticIndices[i], n.staticIndices[i-1] = n.staticIndices[i-1], n.staticIndices[i]
+ i -= 1
+ }
+}
+
+func (n *node) setHandler(verb string, handler HandlerFunc, implicitHead bool) {
+ if n.leafHandler == nil {
+ n.leafHandler = make(map[string]HandlerFunc)
+ }
+ _, ok := n.leafHandler[verb]
+ if ok && (verb != "HEAD" || !n.implicitHead) {
+ panic(fmt.Sprintf("%s already handles %s", n.path, verb))
+ }
+ n.leafHandler[verb] = handler
+
+ if verb == "HEAD" {
+ n.implicitHead = implicitHead
+ }
+}
+
+func (n *node) addPath(path string, wildcards []string, inStaticToken bool) *node {
+ leaf := len(path) == 0
+ if leaf {
+ if wildcards != nil {
+ // Make sure the current wildcards are the same as the old ones.
+ // If not then we have an ambiguous path.
+ if n.leafWildcardNames != nil {
+ if len(n.leafWildcardNames) != len(wildcards) {
+ // This should never happen.
+ panic("Reached leaf node with differing wildcard array length. Please report this as a bug.")
+ }
+
+ for i := 0; i < len(wildcards); i++ {
+ if n.leafWildcardNames[i] != wildcards[i] {
+ panic(fmt.Sprintf("Wildcards %v are ambiguous with wildcards %v",
+ n.leafWildcardNames, wildcards))
+ }
+ }
+ } else {
+ // No wildcards yet, so just add the existing set.
+ n.leafWildcardNames = wildcards
+ }
+ }
+
+ return n
+ }
+
+ c := path[0]
+ nextSlash := strings.Index(path, "/")
+ var thisToken string
+ var tokenEnd int
+
+ if c == '/' {
+ // Done processing the previous token, so reset inStaticToken to false.
+ thisToken = "/"
+ tokenEnd = 1
+ } else if nextSlash == -1 {
+ thisToken = path
+ tokenEnd = len(path)
+ } else {
+ thisToken = path[0:nextSlash]
+ tokenEnd = nextSlash
+ }
+ remainingPath := path[tokenEnd:]
+
+ if c == '*' && !inStaticToken {
+ // Token starts with a *, so it's a catch-all
+ thisToken = thisToken[1:]
+ if n.catchAllChild == nil {
+ n.catchAllChild = &node{path: thisToken, isCatchAll: true}
+ }
+
+ if path[1:] != n.catchAllChild.path {
+ panic(fmt.Sprintf("Catch-all name in %s doesn't match %s. You probably tried to define overlapping catchalls",
+ path, n.catchAllChild.path))
+ }
+
+ if nextSlash != -1 {
+ panic("/ after catch-all found in " + path)
+ }
+
+ if wildcards == nil {
+ wildcards = []string{thisToken}
+ } else {
+ wildcards = append(wildcards, thisToken)
+ }
+ n.catchAllChild.leafWildcardNames = wildcards
+
+ return n.catchAllChild
+ } else if c == ':' && !inStaticToken {
+ // Token starts with a :
+ thisToken = thisToken[1:]
+
+ if wildcards == nil {
+ wildcards = []string{thisToken}
+ } else {
+ wildcards = append(wildcards, thisToken)
+ }
+
+ if n.wildcardChild == nil {
+ n.wildcardChild = &node{path: "wildcard"}
+ }
+
+ return n.wildcardChild.addPath(remainingPath, wildcards, false)
+
+ } else {
+ // if strings.ContainsAny(thisToken, ":*") {
+ // panic("* or : in middle of path component " + path)
+ // }
+
+ unescaped := false
+ if len(thisToken) >= 2 && !inStaticToken {
+ if thisToken[0] == '\\' && (thisToken[1] == '*' || thisToken[1] == ':' || thisToken[1] == '\\') {
+ // The token starts with a character escaped by a backslash. Drop the backslash.
+ c = thisToken[1]
+ thisToken = thisToken[1:]
+ unescaped = true
+ }
+ }
+
+ // Set inStaticToken to ensure that the rest of this token is not mistaken
+ // for a wildcard if a prefix split occurs at a '*' or ':'.
+ inStaticToken = (c != '/')
+
+ // Do we have an existing node that starts with the same letter?
+ for i, index := range n.staticIndices {
+ if c == index {
+ // Yes. Split it based on the common prefix of the existing
+ // node and the new one.
+ child, prefixSplit := n.splitCommonPrefix(i, thisToken)
+
+ child.priority++
+ n.sortStaticChild(i)
+ if unescaped {
+ // Account for the removed backslash.
+ prefixSplit++
+ }
+ return child.addPath(path[prefixSplit:], wildcards, inStaticToken)
+ }
+ }
+
+ // No existing node starting with this letter, so create it.
+ child := &node{path: thisToken}
+
+ if n.staticIndices == nil {
+ n.staticIndices = []byte{c}
+ n.staticChild = []*node{child}
+ } else {
+ n.staticIndices = append(n.staticIndices, c)
+ n.staticChild = append(n.staticChild, child)
+ }
+ return child.addPath(remainingPath, wildcards, inStaticToken)
+ }
+}
+
+func (n *node) splitCommonPrefix(existingNodeIndex int, path string) (*node, int) {
+ childNode := n.staticChild[existingNodeIndex]
+
+ if strings.HasPrefix(path, childNode.path) {
+ // No split needs to be done. Rather, the new path shares the entire
+ // prefix with the existing node, so the new node is just a child of
+ // the existing one. Or the new path is the same as the existing path,
+ // which means that we just move on to the next token. Either way,
+ // this return accomplishes that
+ return childNode, len(childNode.path)
+ }
+
+ var i int
+ // Find the length of the common prefix of the child node and the new path.
+ for i = range childNode.path {
+ if i == len(path) {
+ break
+ }
+ if path[i] != childNode.path[i] {
+ break
+ }
+ }
+
+ commonPrefix := path[0:i]
+ childNode.path = childNode.path[i:]
+
+ // Create a new intermediary node in the place of the existing node, with
+ // the existing node as a child.
+ newNode := &node{
+ path: commonPrefix,
+ priority: childNode.priority,
+ // Index is the first letter of the non-common part of the path.
+ staticIndices: []byte{childNode.path[0]},
+ staticChild: []*node{childNode},
+ }
+ n.staticChild[existingNodeIndex] = newNode
+
+ return newNode, i
+}
+
+func (n *node) search(method, path string) (found *node, handler HandlerFunc, params []string) {
+ // if test != nil {
+ // test.Logf("Searching for %s in %s", path, n.dumpTree("", ""))
+ // }
+ pathLen := len(path)
+ if pathLen == 0 {
+ if len(n.leafHandler) == 0 {
+ return nil, nil, nil
+ } else {
+ return n, n.leafHandler[method], nil
+ }
+ }
+
+ // First see if this matches a static token.
+ firstChar := path[0]
+ for i, staticIndex := range n.staticIndices {
+ if staticIndex == firstChar {
+ child := n.staticChild[i]
+ childPathLen := len(child.path)
+ if pathLen >= childPathLen && child.path == path[:childPathLen] {
+ nextPath := path[childPathLen:]
+ found, handler, params = child.search(method, nextPath)
+ }
+ break
+ }
+ }
+
+ // If we found a node and it had a valid handler, then return here. Otherwise
+ // let's remember that we found this one, but look for a better match.
+ if handler != nil {
+ return
+ }
+
+ if n.wildcardChild != nil {
+ // Didn't find a static token, so check for a wildcard.
+ nextSlash := strings.IndexByte(path, '/')
+ if nextSlash < 0 {
+ nextSlash = pathLen
+ }
+
+ thisToken := path[0:nextSlash]
+ nextToken := path[nextSlash:]
+
+ if len(thisToken) > 0 { // Don't match on empty tokens.
+ wcNode, wcHandler, wcParams := n.wildcardChild.search(method, nextToken)
+ if wcHandler != nil || (found == nil && wcNode != nil) {
+ unescaped, err := unescape(thisToken)
+ if err != nil {
+ unescaped = thisToken
+ }
+
+ if wcParams == nil {
+ wcParams = []string{unescaped}
+ } else {
+ wcParams = append(wcParams, unescaped)
+ }
+
+ if wcHandler != nil {
+ return wcNode, wcHandler, wcParams
+ }
+
+ // Didn't actually find a handler here, so remember that we
+ // found a node but also see if we can fall through to the
+ // catchall.
+ found = wcNode
+ handler = wcHandler
+ params = wcParams
+ }
+ }
+ }
+
+ catchAllChild := n.catchAllChild
+ if catchAllChild != nil {
+ // Hit the catchall, so just assign the whole remaining path if it
+ // has a matching handler.
+ handler = catchAllChild.leafHandler[method]
+ // Found a handler, or we found a catchall node without a handler.
+ // Either way, return it since there's nothing left to check after this.
+ if handler != nil || found == nil {
+ unescaped, err := unescape(path)
+ if err != nil {
+ unescaped = path
+ }
+
+ return catchAllChild, handler, []string{unescaped}
+ }
+
+ }
+
+ return found, handler, params
+}
+
+func (n *node) dumpTree(prefix, nodeType string) string {
+ line := fmt.Sprintf("%s %02d %s%s [%d] %v wildcards %v\n", prefix, n.priority, nodeType, n.path,
+ len(n.staticChild), n.leafHandler, n.leafWildcardNames)
+ prefix += " "
+ for _, node := range n.staticChild {
+ line += node.dumpTree(prefix, "")
+ }
+ if n.wildcardChild != nil {
+ line += n.wildcardChild.dumpTree(prefix, ":")
+ }
+ if n.catchAllChild != nil {
+ line += n.catchAllChild.dumpTree(prefix, "*")
+ }
+ return line
+}
diff --git a/vendor/github.com/dimfeld/httptreemux/treemux_16.go b/vendor/github.com/dimfeld/httptreemux/treemux_16.go
new file mode 100644
index 000000000..6bd5f2e97
--- /dev/null
+++ b/vendor/github.com/dimfeld/httptreemux/treemux_16.go
@@ -0,0 +1,86 @@
+// +build !go1.7
+
+package httptreemux
+
+import (
+ "net/http"
+ "sync"
+)
+
+type TreeMux struct {
+ root *node
+ mutex sync.RWMutex
+
+ Group
+
+ // The default PanicHandler just returns a 500 code.
+ PanicHandler PanicHandler
+
+ // The default NotFoundHandler is http.NotFound.
+ NotFoundHandler func(w http.ResponseWriter, r *http.Request)
+
+ // Any OPTIONS request that matches a path without its own OPTIONS handler will use this handler,
+ // if set, instead of calling MethodNotAllowedHandler.
+ OptionsHandler HandlerFunc
+
+ // MethodNotAllowedHandler is called when a pattern matches, but that
+ // pattern does not have a handler for the requested method. The default
+ // handler just writes the status code http.StatusMethodNotAllowed and adds
+ // the required Allowed header.
+ // The methods parameter contains the map of each method to the corresponding
+ // handler function.
+ MethodNotAllowedHandler func(w http.ResponseWriter, r *http.Request,
+ methods map[string]HandlerFunc)
+
+ // HeadCanUseGet allows the router to use the GET handler to respond to
+ // HEAD requests if no explicit HEAD handler has been added for the
+ // matching pattern. This is true by default.
+ HeadCanUseGet bool
+
+ // RedirectCleanPath allows the router to try clean the current request path,
+ // if no handler is registered for it, using CleanPath from github.com/dimfeld/httppath.
+ // This is true by default.
+ RedirectCleanPath bool
+
+ // RedirectTrailingSlash enables automatic redirection in case router doesn't find a matching route
+ // for the current request path but a handler for the path with or without the trailing
+ // slash exists. This is true by default.
+ RedirectTrailingSlash bool
+
+ // RemoveCatchAllTrailingSlash removes the trailing slash when a catch-all pattern
+ // is matched, if set to true. By default, catch-all paths are never redirected.
+ RemoveCatchAllTrailingSlash bool
+
+ // RedirectBehavior sets the default redirect behavior when RedirectTrailingSlash or
+ // RedirectCleanPath are true. The default value is Redirect301.
+ RedirectBehavior RedirectBehavior
+
+ // RedirectMethodBehavior overrides the default behavior for a particular HTTP method.
+ // The key is the method name, and the value is the behavior to use for that method.
+ RedirectMethodBehavior map[string]RedirectBehavior
+
+ // PathSource determines from where the router gets its path to search.
+ // By default it pulls the data from the RequestURI member, but this can
+ // be overridden to use URL.Path instead.
+ //
+ // There is a small tradeoff here. Using RequestURI allows the router to handle
+ // encoded slashes (i.e. %2f) in the URL properly, while URL.Path provides
+ // better compatibility with some utility functions in the http
+ // library that modify the Request before passing it to the router.
+ PathSource PathSource
+
+ // EscapeAddedRoutes controls URI escaping behavior when adding a route to the tree.
+ // If set to true, the router will add both the route as originally passed, and
+ // a version passed through URL.EscapedPath. This behavior is disabled by default.
+ EscapeAddedRoutes bool
+
+ // SafeAddRoutesWhileRunning tells the router to protect all accesses to the tree with an RWMutex. This is only needed
+ // if you are going to add routes after the router has already begun serving requests. There is a potential
+ // performance penalty at high load.
+ SafeAddRoutesWhileRunning bool
+}
+
+func (t *TreeMux) setDefaultRequestContext(r *http.Request) *http.Request {
+ // Nothing to do on Go 1.6 and before
+ return r
+}
diff --git a/vendor/github.com/dimfeld/httptreemux/treemux_17.go b/vendor/github.com/dimfeld/httptreemux/treemux_17.go
new file mode 100644
index 000000000..a80a500f3
--- /dev/null
+++ b/vendor/github.com/dimfeld/httptreemux/treemux_17.go
@@ -0,0 +1,149 @@
+// +build go1.7
+
+package httptreemux
+
+import (
+ "context"
+ "net/http"
+ "sync"
+)
+
+type TreeMux struct {
+ root *node
+ mutex sync.RWMutex
+
+ Group
+
+ // The default PanicHandler just returns a 500 code.
+ PanicHandler PanicHandler
+
+ // The default NotFoundHandler is http.NotFound.
+ NotFoundHandler func(w http.ResponseWriter, r *http.Request)
+
+ // Any OPTIONS request that matches a path without its own OPTIONS handler will use this handler,
+ // if set, instead of calling MethodNotAllowedHandler.
+ OptionsHandler HandlerFunc
+
+ // MethodNotAllowedHandler is called when a pattern matches, but that
+ // pattern does not have a handler for the requested method. The default
+ // handler just writes the status code http.StatusMethodNotAllowed and adds
+ // the required Allowed header.
+ // The methods parameter contains the map of each method to the corresponding
+ // handler function.
+ MethodNotAllowedHandler func(w http.ResponseWriter, r *http.Request,
+ methods map[string]HandlerFunc)
+
+ // HeadCanUseGet allows the router to use the GET handler to respond to
+ // HEAD requests if no explicit HEAD handler has been added for the
+ // matching pattern. This is true by default.
+ HeadCanUseGet bool
+
+ // RedirectCleanPath allows the router to try clean the current request path,
+ // if no handler is registered for it, using CleanPath from github.com/dimfeld/httppath.
+ // This is true by default.
+ RedirectCleanPath bool
+
+ // RedirectTrailingSlash enables automatic redirection in case router doesn't find a matching route
+ // for the current request path but a handler for the path with or without the trailing
+ // slash exists. This is true by default.
+ RedirectTrailingSlash bool
+
+ // RemoveCatchAllTrailingSlash removes the trailing slash when a catch-all pattern
+ // is matched, if set to true. By default, catch-all paths are never redirected.
+ RemoveCatchAllTrailingSlash bool
+
+ // RedirectBehavior sets the default redirect behavior when RedirectTrailingSlash or
+ // RedirectCleanPath are true. The default value is Redirect301.
+ RedirectBehavior RedirectBehavior
+
+ // RedirectMethodBehavior overrides the default behavior for a particular HTTP method.
+ // The key is the method name, and the value is the behavior to use for that method.
+ RedirectMethodBehavior map[string]RedirectBehavior
+
+ // PathSource determines from where the router gets its path to search.
+ // By default it pulls the data from the RequestURI member, but this can
+ // be overridden to use URL.Path instead.
+ //
+ // There is a small tradeoff here. Using RequestURI allows the router to handle
+ // encoded slashes (i.e. %2f) in the URL properly, while URL.Path provides
+ // better compatibility with some utility functions in the http
+ // library that modify the Request before passing it to the router.
+ PathSource PathSource
+
+ // EscapeAddedRoutes controls URI escaping behavior when adding a route to the tree.
+ // If set to true, the router will add both the route as originally passed, and
+ // a version passed through URL.EscapedPath. This behavior is disabled by default.
+ EscapeAddedRoutes bool
+
+ // If present, override the default context with this one.
+ DefaultContext context.Context
+
+ // SafeAddRoutesWhileRunning tells the router to protect all accesses to the tree with an RWMutex. This is only needed
+ // if you are going to add routes after the router has already begun serving requests. There is a potential
+ // performance penalty at high load.
+ SafeAddRoutesWhileRunning bool
+}
+
+func (t *TreeMux) setDefaultRequestContext(r *http.Request) *http.Request {
+ if t.DefaultContext != nil {
+ r = r.WithContext(t.DefaultContext)
+ }
+
+ return r
+}
+
+type ContextMux struct {
+ *TreeMux
+ *ContextGroup
+}
+
+// NewContextMux returns a TreeMux preconfigured to work with standard http
+// Handler functions and context objects.
+func NewContextMux() *ContextMux {
+ mux := New()
+ cg := mux.UsingContext()
+
+ return &ContextMux{
+ TreeMux: mux,
+ ContextGroup: cg,
+ }
+}
+
+func (cm *ContextMux) NewGroup(path string) *ContextGroup {
+ return cm.ContextGroup.NewGroup(path)
+}
+
+// GET is convenience method for handling GET requests on a context group.
+func (cm *ContextMux) GET(path string, handler http.HandlerFunc) {
+ cm.ContextGroup.Handle("GET", path, handler)
+}
+
+// POST is convenience method for handling POST requests on a context group.
+func (cm *ContextMux) POST(path string, handler http.HandlerFunc) {
+ cm.ContextGroup.Handle("POST", path, handler)
+}
+
+// PUT is convenience method for handling PUT requests on a context group.
+func (cm *ContextMux) PUT(path string, handler http.HandlerFunc) {
+ cm.ContextGroup.Handle("PUT", path, handler)
+}
+
+// DELETE is convenience method for handling DELETE requests on a context group.
+func (cm *ContextMux) DELETE(path string, handler http.HandlerFunc) {
+ cm.ContextGroup.Handle("DELETE", path, handler)
+}
+
+// PATCH is convenience method for handling PATCH requests on a context group.
+func (cm *ContextMux) PATCH(path string, handler http.HandlerFunc) {
+ cm.ContextGroup.Handle("PATCH", path, handler)
+}
+
+// HEAD is convenience method for handling HEAD requests on a context group.
+func (cm *ContextMux) HEAD(path string, handler http.HandlerFunc) {
+ cm.ContextGroup.Handle("HEAD", path, handler)
+}
+
+// OPTIONS is convenience method for handling OPTIONS requests on a context group.
+func (cm *ContextMux) OPTIONS(path string, handler http.HandlerFunc) {
+ cm.ContextGroup.Handle("OPTIONS", path, handler)
+}
diff --git a/vendor/github.com/dimfeld/httptreemux/unescape_17.go b/vendor/github.com/dimfeld/httptreemux/unescape_17.go
new file mode 100644
index 000000000..5d6d08787
--- /dev/null
+++ b/vendor/github.com/dimfeld/httptreemux/unescape_17.go
@@ -0,0 +1,9 @@
+// +build !go1.8
+
+package httptreemux
+
+import "net/url"
+
+func unescape(path string) (string, error) {
+ return url.QueryUnescape(path)
+}
diff --git a/vendor/github.com/dimfeld/httptreemux/unescape_18.go b/vendor/github.com/dimfeld/httptreemux/unescape_18.go
new file mode 100644
index 000000000..254dfcdd7
--- /dev/null
+++ b/vendor/github.com/dimfeld/httptreemux/unescape_18.go
@@ -0,0 +1,9 @@
+// +build go1.8
+
+package httptreemux
+
+import "net/url"
+
+func unescape(path string) (string, error) {
+ return url.PathUnescape(path)
+}
diff --git a/vendor/github.com/juju/errors/.gitignore b/vendor/github.com/juju/errors/.gitignore
new file mode 100644
index 000000000..836562412
--- /dev/null
+++ b/vendor/github.com/juju/errors/.gitignore
@@ -0,0 +1,23 @@
+# Compiled Object files, Static and Dynamic libs (Shared Objects)
+*.o
+*.a
+*.so
+
+# Folders
+_obj
+_test
+
+# Architecture specific extensions/prefixes
+*.[568vq]
+[568vq].out
+
+*.cgo1.go
+*.cgo2.c
+_cgo_defun.c
+_cgo_gotypes.go
+_cgo_export.*
+
+_testmain.go
+
+*.exe
+*.test
diff --git a/vendor/github.com/juju/errors/LICENSE b/vendor/github.com/juju/errors/LICENSE
new file mode 100644
index 000000000..ade9307b3
--- /dev/null
+++ b/vendor/github.com/juju/errors/LICENSE
@@ -0,0 +1,191 @@
+All files in this repository are licensed as follows. If you contribute
+to this repository, it is assumed that you license your contribution
+under the same license unless you state otherwise.
+
+All files Copyright (C) 2015 Canonical Ltd. unless otherwise specified in the file.
+
+This software is licensed under the LGPLv3, included below.
+
+As a special exception to the GNU Lesser General Public License version 3
+("LGPL3"), the copyright holders of this Library give you permission to
+convey to a third party a Combined Work that links statically or dynamically
+to this Library without providing any Minimal Corresponding Source or
+Minimal Application Code as set out in 4d or providing the installation
+information set out in section 4e, provided that you comply with the other
+provisions of LGPL3 and provided that you meet, for the Application the
+terms and conditions of the license(s) which apply to the Application.
+
+Except as stated in this special exception, the provisions of LGPL3 will
+continue to comply in full to this Library. If you modify this Library, you
+may apply this exception to your version of this Library, but you are not
+obliged to do so. If you do not wish to do so, delete this exception
+statement from your version. This exception does not (and cannot) modify any
+license terms which apply to the Application, with which you must still
+comply.
+
+
+ GNU LESSER GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+
+ This version of the GNU Lesser General Public License incorporates
+the terms and conditions of version 3 of the GNU General Public
+License, supplemented by the additional permissions listed below.
+
+ 0. Additional Definitions.
+
+ As used herein, "this License" refers to version 3 of the GNU Lesser
+General Public License, and the "GNU GPL" refers to version 3 of the GNU
+General Public License.
+
+ "The Library" refers to a covered work governed by this License,
+other than an Application or a Combined Work as defined below.
+
+ An "Application" is any work that makes use of an interface provided
+by the Library, but which is not otherwise based on the Library.
+Defining a subclass of a class defined by the Library is deemed a mode
+of using an interface provided by the Library.
+
+ A "Combined Work" is a work produced by combining or linking an
+Application with the Library. The particular version of the Library
+with which the Combined Work was made is also called the "Linked
+Version".
+
+ The "Minimal Corresponding Source" for a Combined Work means the
+Corresponding Source for the Combined Work, excluding any source code
+for portions of the Combined Work that, considered in isolation, are
+based on the Application, and not on the Linked Version.
+
+ The "Corresponding Application Code" for a Combined Work means the
+object code and/or source code for the Application, including any data
+and utility programs needed for reproducing the Combined Work from the
+Application, but excluding the System Libraries of the Combined Work.
+
+ 1. Exception to Section 3 of the GNU GPL.
+
+ You may convey a covered work under sections 3 and 4 of this License
+without being bound by section 3 of the GNU GPL.
+
+ 2. Conveying Modified Versions.
+
+ If you modify a copy of the Library, and, in your modifications, a
+facility refers to a function or data to be supplied by an Application
+that uses the facility (other than as an argument passed when the
+facility is invoked), then you may convey a copy of the modified
+version:
+
+ a) under this License, provided that you make a good faith effort to
+ ensure that, in the event an Application does not supply the
+ function or data, the facility still operates, and performs
+ whatever part of its purpose remains meaningful, or
+
+ b) under the GNU GPL, with none of the additional permissions of
+ this License applicable to that copy.
+
+ 3. Object Code Incorporating Material from Library Header Files.
+
+ The object code form of an Application may incorporate material from
+a header file that is part of the Library. You may convey such object
+code under terms of your choice, provided that, if the incorporated
+material is not limited to numerical parameters, data structure
+layouts and accessors, or small macros, inline functions and templates
+(ten or fewer lines in length), you do both of the following:
+
+ a) Give prominent notice with each copy of the object code that the
+ Library is used in it and that the Library and its use are
+ covered by this License.
+
+ b) Accompany the object code with a copy of the GNU GPL and this license
+ document.
+
+ 4. Combined Works.
+
+ You may convey a Combined Work under terms of your choice that,
+taken together, effectively do not restrict modification of the
+portions of the Library contained in the Combined Work and reverse
+engineering for debugging such modifications, if you also do each of
+the following:
+
+ a) Give prominent notice with each copy of the Combined Work that
+ the Library is used in it and that the Library and its use are
+ covered by this License.
+
+ b) Accompany the Combined Work with a copy of the GNU GPL and this license
+ document.
+
+ c) For a Combined Work that displays copyright notices during
+ execution, include the copyright notice for the Library among
+ these notices, as well as a reference directing the user to the
+ copies of the GNU GPL and this license document.
+
+ d) Do one of the following:
+
+ 0) Convey the Minimal Corresponding Source under the terms of this
+ License, and the Corresponding Application Code in a form
+ suitable for, and under terms that permit, the user to
+ recombine or relink the Application with a modified version of
+ the Linked Version to produce a modified Combined Work, in the
+ manner specified by section 6 of the GNU GPL for conveying
+ Corresponding Source.
+
+ 1) Use a suitable shared library mechanism for linking with the
+ Library. A suitable mechanism is one that (a) uses at run time
+ a copy of the Library already present on the user's computer
+ system, and (b) will operate properly with a modified version
+ of the Library that is interface-compatible with the Linked
+ Version.
+
+ e) Provide Installation Information, but only if you would otherwise
+ be required to provide such information under section 6 of the
+ GNU GPL, and only to the extent that such information is
+ necessary to install and execute a modified version of the
+ Combined Work produced by recombining or relinking the
+ Application with a modified version of the Linked Version. (If
+ you use option 4d0, the Installation Information must accompany
+ the Minimal Corresponding Source and Corresponding Application
+ Code. If you use option 4d1, you must provide the Installation
+ Information in the manner specified by section 6 of the GNU GPL
+ for conveying Corresponding Source.)
+
+ 5. Combined Libraries.
+
+ You may place library facilities that are a work based on the
+Library side by side in a single library together with other library
+facilities that are not Applications and are not covered by this
+License, and convey such a combined library under terms of your
+choice, if you do both of the following:
+
+ a) Accompany the combined library with a copy of the same work based
+ on the Library, uncombined with any other library facilities,
+ conveyed under the terms of this License.
+
+ b) Give prominent notice with the combined library that part of it
+ is a work based on the Library, and explaining where to find the
+ accompanying uncombined form of the same work.
+
+ 6. Revised Versions of the GNU Lesser General Public License.
+
+ The Free Software Foundation may publish revised and/or new versions
+of the GNU Lesser General Public License from time to time. Such new
+versions will be similar in spirit to the present version, but may
+differ in detail to address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Library as you received it specifies that a certain numbered version
+of the GNU Lesser General Public License "or any later version"
+applies to it, you have the option of following the terms and
+conditions either of that published version or of any later version
+published by the Free Software Foundation. If the Library as you
+received it does not specify a version number of the GNU Lesser
+General Public License, you may choose any version of the GNU Lesser
+General Public License ever published by the Free Software Foundation.
+
+ If the Library as you received it specifies that a proxy can decide
+whether future versions of the GNU Lesser General Public License shall
+apply, that proxy's public statement of acceptance of any version is
+permanent authorization for you to choose that version for the
+Library.
diff --git a/vendor/github.com/juju/errors/Makefile b/vendor/github.com/juju/errors/Makefile
new file mode 100644
index 000000000..41836d684
--- /dev/null
+++ b/vendor/github.com/juju/errors/Makefile
@@ -0,0 +1,24 @@
+PROJECT := github.com/juju/errors
+
+.PHONY: check-licence check-go check docs
+
+check: check-licence check-go
+ go test $(PROJECT)/...
+
+check-licence:
+ @(fgrep -rl "Licensed under the LGPLv3" --exclude *.s .;\
+ fgrep -rl "MACHINE GENERATED BY THE COMMAND ABOVE; DO NOT EDIT" --exclude *.s .;\
+ find . -name "*.go") | sed -e 's,\./,,' | sort | uniq -u | \
+ xargs -I {} echo FAIL: licence missed: {}
+
+check-go:
+ $(eval GOFMT := $(strip $(shell gofmt -l .| sed -e "s/^/ /g")))
+ @(if [ x$(GOFMT) != x"" ]; then \
+ echo go fmt is sad: $(GOFMT); \
+ exit 1; \
+ fi )
+ @(go tool vet -all -composites=false -copylocks=false .)
+
+docs:
+ godoc2md github.com/juju/errors > README.md
+ sed -i 's|\[godoc-link-here\]|[](https://godoc.org/github.com/juju/errors)|' README.md
diff --git a/vendor/github.com/juju/errors/README.md b/vendor/github.com/juju/errors/README.md
new file mode 100644
index 000000000..4584d100e
--- /dev/null
+++ b/vendor/github.com/juju/errors/README.md
@@ -0,0 +1,707 @@
+
+# errors
+ import "github.com/juju/errors"
+
+[](https://godoc.org/github.com/juju/errors)
+
+The juju/errors provides an easy way to annotate errors without losing the
+orginal error context.
+
+The exported `New` and `Errorf` functions are designed to replace the
+`errors.New` and `fmt.Errorf` functions respectively. The same underlying
+error is there, but the package also records the location at which the error
+was created.
+
+A primary use case for this library is to add extra context any time an
+error is returned from a function.
+
+
+ if err := SomeFunc(); err != nil {
+ return err
+ }
+
+This instead becomes:
+
+
+ if err := SomeFunc(); err != nil {
+ return errors.Trace(err)
+ }
+
+which just records the file and line number of the Trace call, or
+
+
+ if err := SomeFunc(); err != nil {
+ return errors.Annotate(err, "more context")
+ }
+
+which also adds an annotation to the error.
+
+When you want to check to see if an error is of a particular type, a helper
+function is normally exported by the package that returned the error, like the
+`os` package does. The underlying cause of the error is available using the
+`Cause` function.
+
+
+ os.IsNotExist(errors.Cause(err))
+
+The result of the `Error()` call on an annotated error is the annotations joined
+with colons, then the result of the `Error()` method for the underlying error
+that was the cause.
+
+
+ err := errors.Errorf("original")
+ err = errors.Annotatef(err, "context")
+ err = errors.Annotatef(err, "more context")
+ err.Error() -> "more context: context: original"
+
+Obviously recording the file, line and functions is not very useful if you
+cannot get them back out again.
+
+
+ errors.ErrorStack(err)
+
+will return something like:
+
+
+ first error
+ github.com/juju/errors/annotation_test.go:193:
+ github.com/juju/errors/annotation_test.go:194: annotation
+ github.com/juju/errors/annotation_test.go:195:
+ github.com/juju/errors/annotation_test.go:196: more context
+ github.com/juju/errors/annotation_test.go:197:
+
+The first error was generated by an external system, so there was no location
+associated. The second, fourth, and last lines were generated with Trace calls,
+and the other two through Annotate.
+
+Sometimes when responding to an error you want to return a more specific error
+for the situation.
+
+
+ if err := FindField(field); err != nil {
+ return errors.Wrap(err, errors.NotFoundf(field))
+ }
+
+This returns an error where the complete error stack is still available, and
+`errors.Cause()` will return the `NotFound` error.
+
+
+
+
+
+
+## func AlreadyExistsf
+``` go
+func AlreadyExistsf(format string, args ...interface{}) error
+```
+AlreadyExistsf returns an error which satisfies IsAlreadyExists().
+
+
+## func Annotate
+``` go
+func Annotate(other error, message string) error
+```
+Annotate is used to add extra context to an existing error. The location of
+the Annotate call is recorded with the annotations. The file, line and
+function are also recorded.
+
+For example:
+
+
+ if err := SomeFunc(); err != nil {
+ return errors.Annotate(err, "failed to frombulate")
+ }
+
+
+## func Annotatef
+``` go
+func Annotatef(other error, format string, args ...interface{}) error
+```
+Annotatef is used to add extra context to an existing error. The location of
+the Annotate call is recorded with the annotations. The file, line and
+function are also recorded.
+
+For example:
+
+
+ if err := SomeFunc(); err != nil {
+ return errors.Annotatef(err, "failed to frombulate the %s", arg)
+ }
+
+
+## func BadRequestf
+``` go
+func BadRequestf(format string, args ...interface{}) error
+```
+BadRequestf returns an error which satisfies IsBadRequest().
+
+
+## func Cause
+``` go
+func Cause(err error) error
+```
+Cause returns the cause of the given error. This will be either the
+original error, or the result of a Wrap or Mask call.
+
+Cause is the usual way to diagnose errors that may have been wrapped by
+the other errors functions.
+
+
+## func DeferredAnnotatef
+``` go
+func DeferredAnnotatef(err *error, format string, args ...interface{})
+```
+DeferredAnnotatef annotates the given error (when it is not nil) with the given
+format string and arguments (like fmt.Sprintf). If *err is nil, DeferredAnnotatef
+does nothing. This method is used in a defer statement in order to annotate any
+resulting error with the same message.
+
+For example:
+
+
+ defer DeferredAnnotatef(&err, "failed to frombulate the %s", arg)
+
+
+## func Details
+``` go
+func Details(err error) string
+```
+Details returns information about the stack of errors wrapped by err, in
+the format:
+
+
+ [{filename:99: error one} {otherfile:55: cause of error one}]
+
+This is a terse alternative to ErrorStack as it returns a single line.
+
+
+## func ErrorStack
+``` go
+func ErrorStack(err error) string
+```
+ErrorStack returns a string representation of the annotated error. If the
+error passed as the parameter is not an annotated error, the result is
+simply the result of the Error() method on that error.
+
+If the error is an annotated error, a multi-line string is returned where
+each line represents one entry in the annotation stack. The full filename
+from the call stack is used in the output.
+
+
+ first error
+ github.com/juju/errors/annotation_test.go:193:
+ github.com/juju/errors/annotation_test.go:194: annotation
+ github.com/juju/errors/annotation_test.go:195:
+ github.com/juju/errors/annotation_test.go:196: more context
+ github.com/juju/errors/annotation_test.go:197:
+
+
+## func Errorf
+``` go
+func Errorf(format string, args ...interface{}) error
+```
+Errorf creates a new annotated error and records the location that the
+error is created. This should be a drop in replacement for fmt.Errorf.
+
+For example:
+
+
+ return errors.Errorf("validation failed: %s", message)
+
+
+## func Forbiddenf
+``` go
+func Forbiddenf(format string, args ...interface{}) error
+```
+Forbiddenf returns an error which satistifes IsForbidden()
+
+
+## func IsAlreadyExists
+``` go
+func IsAlreadyExists(err error) bool
+```
+IsAlreadyExists reports whether the error was created with
+AlreadyExistsf() or NewAlreadyExists().
+
+
+## func IsBadRequest
+``` go
+func IsBadRequest(err error) bool
+```
+IsBadRequest reports whether err was created with BadRequestf() or
+NewBadRequest().
+
+
+## func IsForbidden
+``` go
+func IsForbidden(err error) bool
+```
+IsForbidden reports whether err was created with Forbiddenf() or
+NewForbidden().
+
+
+## func IsMethodNotAllowed
+``` go
+func IsMethodNotAllowed(err error) bool
+```
+IsMethodNotAllowed reports whether err was created with MethodNotAllowedf() or
+NewMethodNotAllowed().
+
+
+## func IsNotAssigned
+``` go
+func IsNotAssigned(err error) bool
+```
+IsNotAssigned reports whether err was created with NotAssignedf() or
+NewNotAssigned().
+
+
+## func IsNotFound
+``` go
+func IsNotFound(err error) bool
+```
+IsNotFound reports whether err was created with NotFoundf() or
+NewNotFound().
+
+
+## func IsNotImplemented
+``` go
+func IsNotImplemented(err error) bool
+```
+IsNotImplemented reports whether err was created with
+NotImplementedf() or NewNotImplemented().
+
+
+## func IsNotProvisioned
+``` go
+func IsNotProvisioned(err error) bool
+```
+IsNotProvisioned reports whether err was created with NotProvisionedf() or
+NewNotProvisioned().
+
+
+## func IsNotSupported
+``` go
+func IsNotSupported(err error) bool
+```
+IsNotSupported reports whether the error was created with
+NotSupportedf() or NewNotSupported().
+
+
+## func IsNotValid
+``` go
+func IsNotValid(err error) bool
+```
+IsNotValid reports whether the error was created with NotValidf() or
+NewNotValid().
+
+
+## func IsUnauthorized
+``` go
+func IsUnauthorized(err error) bool
+```
+IsUnauthorized reports whether err was created with Unauthorizedf() or
+NewUnauthorized().
+
+
+## func IsUserNotFound
+``` go
+func IsUserNotFound(err error) bool
+```
+IsUserNotFound reports whether err was created with UserNotFoundf() or
+NewUserNotFound().
+
+
+## func Mask
+``` go
+func Mask(other error) error
+```
+Mask hides the underlying error type, and records the location of the masking.
+
+
+## func Maskf
+``` go
+func Maskf(other error, format string, args ...interface{}) error
+```
+Mask masks the given error with the given format string and arguments (like
+fmt.Sprintf), returning a new error that maintains the error stack, but
+hides the underlying error type. The error string still contains the full
+annotations. If you want to hide the annotations, call Wrap.
+
+
+## func MethodNotAllowedf
+``` go
+func MethodNotAllowedf(format string, args ...interface{}) error
+```
+MethodNotAllowedf returns an error which satisfies IsMethodNotAllowed().
+
+
+## func New
+``` go
+func New(message string) error
+```
+New is a drop in replacement for the standard library errors module that records
+the location that the error is created.
+
+For example:
+
+
+ return errors.New("validation failed")
+
+
+## func NewAlreadyExists
+``` go
+func NewAlreadyExists(err error, msg string) error
+```
+NewAlreadyExists returns an error which wraps err and satisfies
+IsAlreadyExists().
+
+
+## func NewBadRequest
+``` go
+func NewBadRequest(err error, msg string) error
+```
+NewBadRequest returns an error which wraps err that satisfies
+IsBadRequest().
+
+
+## func NewForbidden
+``` go
+func NewForbidden(err error, msg string) error
+```
+NewForbidden returns an error which wraps err that satisfies
+IsForbidden().
+
+
+## func NewMethodNotAllowed
+``` go
+func NewMethodNotAllowed(err error, msg string) error
+```
+NewMethodNotAllowed returns an error which wraps err that satisfies
+IsMethodNotAllowed().
+
+
+## func NewNotAssigned
+``` go
+func NewNotAssigned(err error, msg string) error
+```
+NewNotAssigned returns an error which wraps err that satisfies
+IsNotAssigned().
+
+
+## func NewNotFound
+``` go
+func NewNotFound(err error, msg string) error
+```
+NewNotFound returns an error which wraps err that satisfies
+IsNotFound().
+
+
+## func NewNotImplemented
+``` go
+func NewNotImplemented(err error, msg string) error
+```
+NewNotImplemented returns an error which wraps err and satisfies
+IsNotImplemented().
+
+
+## func NewNotProvisioned
+``` go
+func NewNotProvisioned(err error, msg string) error
+```
+NewNotProvisioned returns an error which wraps err that satisfies
+IsNotProvisioned().
+
+
+## func NewNotSupported
+``` go
+func NewNotSupported(err error, msg string) error
+```
+NewNotSupported returns an error which wraps err and satisfies
+IsNotSupported().
+
+
+## func NewNotValid
+``` go
+func NewNotValid(err error, msg string) error
+```
+NewNotValid returns an error which wraps err and satisfies IsNotValid().
+
+
+## func NewUnauthorized
+``` go
+func NewUnauthorized(err error, msg string) error
+```
+NewUnauthorized returns an error which wraps err and satisfies
+IsUnauthorized().
+
+
+## func NewUserNotFound
+``` go
+func NewUserNotFound(err error, msg string) error
+```
+NewUserNotFound returns an error which wraps err and satisfies
+IsUserNotFound().
+
+
+## func NotAssignedf
+``` go
+func NotAssignedf(format string, args ...interface{}) error
+```
+NotAssignedf returns an error which satisfies IsNotAssigned().
+
+
+## func NotFoundf
+``` go
+func NotFoundf(format string, args ...interface{}) error
+```
+NotFoundf returns an error which satisfies IsNotFound().
+
+
+## func NotImplementedf
+``` go
+func NotImplementedf(format string, args ...interface{}) error
+```
+NotImplementedf returns an error which satisfies IsNotImplemented().
+
+
+## func NotProvisionedf
+``` go
+func NotProvisionedf(format string, args ...interface{}) error
+```
+NotProvisionedf returns an error which satisfies IsNotProvisioned().
+
+
+## func NotSupportedf
+``` go
+func NotSupportedf(format string, args ...interface{}) error
+```
+NotSupportedf returns an error which satisfies IsNotSupported().
+
+
+## func NotValidf
+``` go
+func NotValidf(format string, args ...interface{}) error
+```
+NotValidf returns an error which satisfies IsNotValid().
+
+
+## func Trace
+``` go
+func Trace(other error) error
+```
+Trace adds the location of the Trace call to the stack. The Cause of the
+resulting error is the same as the error parameter. If the other error is
+nil, the result will be nil.
+
+For example:
+
+
+ if err := SomeFunc(); err != nil {
+ return errors.Trace(err)
+ }
+
+
+## func Unauthorizedf
+``` go
+func Unauthorizedf(format string, args ...interface{}) error
+```
+Unauthorizedf returns an error which satisfies IsUnauthorized().
+
+
+## func UserNotFoundf
+``` go
+func UserNotFoundf(format string, args ...interface{}) error
+```
+UserNotFoundf returns an error which satisfies IsUserNotFound().
+
+
+## func Wrap
+``` go
+func Wrap(other, newDescriptive error) error
+```
+Wrap changes the Cause of the error. The location of the Wrap call is also
+stored in the error stack.
+
+For example:
+
+
+ if err := SomeFunc(); err != nil {
+ newErr := &packageError{"more context", private_value}
+ return errors.Wrap(err, newErr)
+ }
+
+
+## func Wrapf
+``` go
+func Wrapf(other, newDescriptive error, format string, args ...interface{}) error
+```
+Wrapf changes the Cause of the error, and adds an annotation. The location
+of the Wrap call is also stored in the error stack.
+
+For example:
+
+
+ if err := SomeFunc(); err != nil {
+ return errors.Wrapf(err, simpleErrorType, "invalid value %q", value)
+ }
+
+
+
+## type Err
+``` go
+type Err struct {
+ // contains filtered or unexported fields
+}
+```
+Err holds a description of an error along with information about
+where the error was created.
+
+It may be embedded in custom error types to add extra information that
+this errors package can understand.
+
+
+
+
+
+
+
+
+
+### func NewErr
+``` go
+func NewErr(format string, args ...interface{}) Err
+```
+NewErr is used to return an Err for the purpose of embedding in other
+structures. The location is not specified, and needs to be set with a call
+to SetLocation.
+
+For example:
+
+
+ type FooError struct {
+ errors.Err
+ code int
+ }
+
+ func NewFooError(code int) error {
+ err := &FooError{errors.NewErr("foo"), code}
+ err.SetLocation(1)
+ return err
+ }
+
+
+### func NewErrWithCause
+``` go
+func NewErrWithCause(other error, format string, args ...interface{}) Err
+```
+NewErrWithCause is used to return an Err with cause by other error for the purpose of embedding in other
+structures. The location is not specified, and needs to be set with a call
+to SetLocation.
+
+For example:
+
+
+ type FooError struct {
+ errors.Err
+ code int
+ }
+
+ func (e *FooError) Annotate(format string, args ...interface{}) error {
+ err := &FooError{errors.NewErrWithCause(e.Err, format, args...), e.code}
+ err.SetLocation(1)
+ return err
+ })
+
+
+
+
+### func (\*Err) Cause
+``` go
+func (e *Err) Cause() error
+```
+The Cause of an error is the most recent error in the error stack that
+meets one of these criteria: the original error that was raised; the new
+error that was passed into the Wrap function; the most recently masked
+error; or nil if the error itself is considered the Cause. Normally this
+method is not invoked directly, but instead through the Cause stand alone
+function.
+
+
+
+### func (\*Err) Error
+``` go
+func (e *Err) Error() string
+```
+Error implements error.Error.
+
+
+
+### func (\*Err) Format
+``` go
+func (e *Err) Format(s fmt.State, verb rune)
+```
+Format implements fmt.Formatter
+When printing errors with %+v it also prints the stack trace.
+%#v unsurprisingly will print the real underlying type.
+
+
+
+### func (\*Err) Location
+``` go
+func (e *Err) Location() (filename string, line int)
+```
+Location is the file and line of where the error was most recently
+created or annotated.
+
+
+
+### func (\*Err) Message
+``` go
+func (e *Err) Message() string
+```
+Message returns the message stored with the most recent location. This is
+the empty string if the most recent call was Trace, or the message stored
+with Annotate or Mask.
+
+
+
+### func (\*Err) SetLocation
+``` go
+func (e *Err) SetLocation(callDepth int)
+```
+SetLocation records the source location of the error at callDepth stack
+frames above the call.
+
+
+
+### func (\*Err) StackTrace
+``` go
+func (e *Err) StackTrace() []string
+```
+StackTrace returns one string for each location recorded in the stack of
+errors. The first value is the originating error, with a line for each
+other annotation or tracing of the error.
+
+
+
+### func (\*Err) Underlying
+``` go
+func (e *Err) Underlying() error
+```
+Underlying returns the previous error in the error stack, if any. A client
+should not ever really call this method. It is used to build the error
+stack and should not be introspected by client calls. Or more
+specifically, clients should not depend on anything but the `Cause` of an
+error.
+
+
+
+
+
+
+
+
+
+- - -
+Generated by [godoc2md](http://godoc.org/github.com/davecheney/godoc2md)
\ No newline at end of file
diff --git a/vendor/github.com/juju/errors/dependencies.tsv b/vendor/github.com/juju/errors/dependencies.tsv
new file mode 100644
index 000000000..e32434494
--- /dev/null
+++ b/vendor/github.com/juju/errors/dependencies.tsv
@@ -0,0 +1,5 @@
+github.com/juju/loggo git 8232ab8918d91c72af1a9fb94d3edbe31d88b790 2017-06-05T01:46:07Z
+github.com/juju/testing git 72703b1e95eb8ce4737fd8a3d8496c6b0be280a6 2018-05-17T13:41:05Z
+gopkg.in/check.v1 git 4f90aeace3a26ad7021961c297b22c42160c7b25 2016-01-05T16:49:36Z
+gopkg.in/mgo.v2 git f2b6f6c918c452ad107eec89615f074e3bd80e33 2016-08-18T01:52:18Z
+gopkg.in/yaml.v2 git 1be3d31502d6eabc0dd7ce5b0daab022e14a5538 2017-07-12T05:45:46Z
diff --git a/vendor/github.com/juju/errors/doc.go b/vendor/github.com/juju/errors/doc.go
new file mode 100644
index 000000000..35b119aa3
--- /dev/null
+++ b/vendor/github.com/juju/errors/doc.go
@@ -0,0 +1,81 @@
+// Copyright 2013, 2014 Canonical Ltd.
+// Licensed under the LGPLv3, see LICENCE file for details.
+
+/*
+[godoc-link-here]
+
+The juju/errors provides an easy way to annotate errors without losing the
+orginal error context.
+
+The exported `New` and `Errorf` functions are designed to replace the
+`errors.New` and `fmt.Errorf` functions respectively. The same underlying
+error is there, but the package also records the location at which the error
+was created.
+
+A primary use case for this library is to add extra context any time an
+error is returned from a function.
+
+ if err := SomeFunc(); err != nil {
+ return err
+ }
+
+This instead becomes:
+
+ if err := SomeFunc(); err != nil {
+ return errors.Trace(err)
+ }
+
+which just records the file and line number of the Trace call, or
+
+ if err := SomeFunc(); err != nil {
+ return errors.Annotate(err, "more context")
+ }
+
+which also adds an annotation to the error.
+
+When you want to check to see if an error is of a particular type, a helper
+function is normally exported by the package that returned the error, like the
+`os` package does. The underlying cause of the error is available using the
+`Cause` function.
+
+ os.IsNotExist(errors.Cause(err))
+
+The result of the `Error()` call on an annotated error is the annotations joined
+with colons, then the result of the `Error()` method for the underlying error
+that was the cause.
+
+ err := errors.Errorf("original")
+ err = errors.Annotatef(err, "context")
+ err = errors.Annotatef(err, "more context")
+ err.Error() -> "more context: context: original"
+
+Obviously recording the file, line and functions is not very useful if you
+cannot get them back out again.
+
+ errors.ErrorStack(err)
+
+will return something like:
+
+ first error
+ github.com/juju/errors/annotation_test.go:193:
+ github.com/juju/errors/annotation_test.go:194: annotation
+ github.com/juju/errors/annotation_test.go:195:
+ github.com/juju/errors/annotation_test.go:196: more context
+ github.com/juju/errors/annotation_test.go:197:
+
+The first error was generated by an external system, so there was no location
+associated. The second, fourth, and last lines were generated with Trace calls,
+and the other two through Annotate.
+
+Sometimes when responding to an error you want to return a more specific error
+for the situation.
+
+ if err := FindField(field); err != nil {
+ return errors.Wrap(err, errors.NotFoundf(field))
+ }
+
+This returns an error where the complete error stack is still available, and
+`errors.Cause()` will return the `NotFound` error.
+
+*/
+package errors
diff --git a/vendor/github.com/juju/errors/error.go b/vendor/github.com/juju/errors/error.go
new file mode 100644
index 000000000..169e3a4e3
--- /dev/null
+++ b/vendor/github.com/juju/errors/error.go
@@ -0,0 +1,176 @@
+// Copyright 2014 Canonical Ltd.
+// Licensed under the LGPLv3, see LICENCE file for details.
+
+package errors
+
+import (
+ "fmt"
+ "reflect"
+ "runtime"
+)
+
+// Err holds a description of an error along with information about
+// where the error was created.
+//
+// It may be embedded in custom error types to add extra information that
+// this errors package can understand.
+type Err struct {
+ // message holds an annotation of the error.
+ message string
+
+ // cause holds the cause of the error as returned
+ // by the Cause method.
+ cause error
+
+ // previous holds the previous error in the error stack, if any.
+ previous error
+
+ // file and line hold the source code location where the error was
+ // created.
+ file string
+ line int
+}
+
+// NewErr is used to return an Err for the purpose of embedding in other
+// structures. The location is not specified, and needs to be set with a call
+// to SetLocation.
+//
+// For example:
+// type FooError struct {
+// errors.Err
+// code int
+// }
+//
+// func NewFooError(code int) error {
+// err := &FooError{errors.NewErr("foo"), code}
+// err.SetLocation(1)
+// return err
+// }
+func NewErr(format string, args ...interface{}) Err {
+ return Err{
+ message: fmt.Sprintf(format, args...),
+ }
+}
+
+// NewErrWithCause is used to return an Err with cause by other error for the purpose of embedding in other
+// structures. The location is not specified, and needs to be set with a call
+// to SetLocation.
+//
+// For example:
+// type FooError struct {
+// errors.Err
+// code int
+// }
+//
+// func (e *FooError) Annotate(format string, args ...interface{}) error {
+// err := &FooError{errors.NewErrWithCause(e.Err, format, args...), e.code}
+// err.SetLocation(1)
+// return err
+// })
+func NewErrWithCause(other error, format string, args ...interface{}) Err {
+ return Err{
+ message: fmt.Sprintf(format, args...),
+ cause: Cause(other),
+ previous: other,
+ }
+}
+
+// Location is the file and line of where the error was most recently
+// created or annotated.
+func (e *Err) Location() (filename string, line int) {
+ return e.file, e.line
+}
+
+// Underlying returns the previous error in the error stack, if any. A client
+// should not ever really call this method. It is used to build the error
+// stack and should not be introspected by client calls. Or more
+// specifically, clients should not depend on anything but the `Cause` of an
+// error.
+func (e *Err) Underlying() error {
+ return e.previous
+}
+
+// The Cause of an error is the most recent error in the error stack that
+// meets one of these criteria: the original error that was raised; the new
+// error that was passed into the Wrap function; the most recently masked
+// error; or nil if the error itself is considered the Cause. Normally this
+// method is not invoked directly, but instead through the Cause stand alone
+// function.
+func (e *Err) Cause() error {
+ return e.cause
+}
+
+// Message returns the message stored with the most recent location. This is
+// the empty string if the most recent call was Trace, or the message stored
+// with Annotate or Mask.
+func (e *Err) Message() string {
+ return e.message
+}
+
+// Error implements error.Error.
+func (e *Err) Error() string {
+ // We want to walk up the stack of errors showing the annotations
+ // as long as the cause is the same.
+ err := e.previous
+ if !sameError(Cause(err), e.cause) && e.cause != nil {
+ err = e.cause
+ }
+ switch {
+ case err == nil:
+ return e.message
+ case e.message == "":
+ return err.Error()
+ }
+ return fmt.Sprintf("%s: %v", e.message, err)
+}
+
+// Format implements fmt.Formatter
+// When printing errors with %+v it also prints the stack trace.
+// %#v unsurprisingly will print the real underlying type.
+func (e *Err) Format(s fmt.State, verb rune) {
+ switch verb {
+ case 'v':
+ switch {
+ case s.Flag('+'):
+ fmt.Fprintf(s, "%s", ErrorStack(e))
+ return
+ case s.Flag('#'):
+ // avoid infinite recursion by wrapping e into a type
+ // that doesn't implement Formatter.
+ fmt.Fprintf(s, "%#v", (*unformatter)(e))
+ return
+ }
+ fallthrough
+ case 's':
+ fmt.Fprintf(s, "%s", e.Error())
+ case 'q':
+ fmt.Fprintf(s, "%q", e.Error())
+ default:
+ fmt.Fprintf(s, "%%!%c(%T=%s)", verb, e, e.Error())
+ }
+}
+
+// helper for Format
+type unformatter Err
+
+func (unformatter) Format() { /* break the fmt.Formatter interface */ }
+
+// SetLocation records the source location of the error at callDepth stack
+// frames above the call.
+func (e *Err) SetLocation(callDepth int) {
+ _, file, line, _ := runtime.Caller(callDepth + 1)
+ e.file = trimGoPath(file)
+ e.line = line
+}
+
+// StackTrace returns one string for each location recorded in the stack of
+// errors. The first value is the originating error, with a line for each
+// other annotation or tracing of the error.
+func (e *Err) StackTrace() []string {
+ return errorStack(e)
+}
+
+// Ideally we'd have a way to check identity, but deep equals will do.
+func sameError(e1, e2 error) bool {
+ return reflect.DeepEqual(e1, e2)
+}
diff --git a/vendor/github.com/juju/errors/errortypes.go b/vendor/github.com/juju/errors/errortypes.go
new file mode 100644
index 000000000..5faf1e22d
--- /dev/null
+++ b/vendor/github.com/juju/errors/errortypes.go
@@ -0,0 +1,333 @@
+// Copyright 2014 Canonical Ltd.
+// Licensed under the LGPLv3, see LICENCE file for details.
+
+package errors
+
+import (
+ "fmt"
+)
+
+// wrap is a helper to construct an *wrapper.
+func wrap(err error, format, suffix string, args ...interface{}) Err {
+ newErr := Err{
+ message: fmt.Sprintf(format+suffix, args...),
+ previous: err,
+ }
+ newErr.SetLocation(2)
+ return newErr
+}
+
+// timeout represents an error on timeout.
+type timeout struct {
+ Err
+}
+
+// Timeoutf returns an error which satisfies IsTimeout().
+func Timeoutf(format string, args ...interface{}) error {
+ return &timeout{wrap(nil, format, " timeout", args...)}
+}
+
+// NewTimeout returns an error which wraps err that satisfies
+// IsTimeout().
+func NewTimeout(err error, msg string) error {
+ return &timeout{wrap(err, msg, "")}
+}
+
+// IsTimeout reports whether err was created with Timeoutf() or
+// NewTimeout().
+func IsTimeout(err error) bool {
+ err = Cause(err)
+ _, ok := err.(*timeout)
+ return ok
+}
+
+// notFound represents an error when something has not been found.
+type notFound struct {
+ Err
+}
+
+// NotFoundf returns an error which satisfies IsNotFound().
+func NotFoundf(format string, args ...interface{}) error {
+ return ¬Found{wrap(nil, format, " not found", args...)}
+}
+
+// NewNotFound returns an error which wraps err that satisfies
+// IsNotFound().
+func NewNotFound(err error, msg string) error {
+ return ¬Found{wrap(err, msg, "")}
+}
+
+// IsNotFound reports whether err was created with NotFoundf() or
+// NewNotFound().
+func IsNotFound(err error) bool {
+ err = Cause(err)
+ _, ok := err.(*notFound)
+ return ok
+}
+
+// userNotFound represents an error when an inexistent user is looked up.
+type userNotFound struct {
+ Err
+}
+
+// UserNotFoundf returns an error which satisfies IsUserNotFound().
+func UserNotFoundf(format string, args ...interface{}) error {
+ return &userNotFound{wrap(nil, format, " user not found", args...)}
+}
+
+// NewUserNotFound returns an error which wraps err and satisfies
+// IsUserNotFound().
+func NewUserNotFound(err error, msg string) error {
+ return &userNotFound{wrap(err, msg, "")}
+}
+
+// IsUserNotFound reports whether err was created with UserNotFoundf() or
+// NewUserNotFound().
+func IsUserNotFound(err error) bool {
+ err = Cause(err)
+ _, ok := err.(*userNotFound)
+ return ok
+}
+
+// unauthorized represents an error when an operation is unauthorized.
+type unauthorized struct {
+ Err
+}
+
+// Unauthorizedf returns an error which satisfies IsUnauthorized().
+func Unauthorizedf(format string, args ...interface{}) error {
+ return &unauthorized{wrap(nil, format, "", args...)}
+}
+
+// NewUnauthorized returns an error which wraps err and satisfies
+// IsUnauthorized().
+func NewUnauthorized(err error, msg string) error {
+ return &unauthorized{wrap(err, msg, "")}
+}
+
+// IsUnauthorized reports whether err was created with Unauthorizedf() or
+// NewUnauthorized().
+func IsUnauthorized(err error) bool {
+ err = Cause(err)
+ _, ok := err.(*unauthorized)
+ return ok
+}
+
+// notImplemented represents an error when something is not
+// implemented.
+type notImplemented struct {
+ Err
+}
+
+// NotImplementedf returns an error which satisfies IsNotImplemented().
+func NotImplementedf(format string, args ...interface{}) error {
+ return ¬Implemented{wrap(nil, format, " not implemented", args...)}
+}
+
+// NewNotImplemented returns an error which wraps err and satisfies
+// IsNotImplemented().
+func NewNotImplemented(err error, msg string) error {
+ return ¬Implemented{wrap(err, msg, "")}
+}
+
+// IsNotImplemented reports whether err was created with
+// NotImplementedf() or NewNotImplemented().
+func IsNotImplemented(err error) bool {
+ err = Cause(err)
+ _, ok := err.(*notImplemented)
+ return ok
+}
+
+// alreadyExists represents and error when something already exists.
+type alreadyExists struct {
+ Err
+}
+
+// AlreadyExistsf returns an error which satisfies IsAlreadyExists().
+func AlreadyExistsf(format string, args ...interface{}) error {
+ return &alreadyExists{wrap(nil, format, " already exists", args...)}
+}
+
+// NewAlreadyExists returns an error which wraps err and satisfies
+// IsAlreadyExists().
+func NewAlreadyExists(err error, msg string) error {
+ return &alreadyExists{wrap(err, msg, "")}
+}
+
+// IsAlreadyExists reports whether the error was created with
+// AlreadyExistsf() or NewAlreadyExists().
+func IsAlreadyExists(err error) bool {
+ err = Cause(err)
+ _, ok := err.(*alreadyExists)
+ return ok
+}
+
+// notSupported represents an error when something is not supported.
+type notSupported struct {
+ Err
+}
+
+// NotSupportedf returns an error which satisfies IsNotSupported().
+func NotSupportedf(format string, args ...interface{}) error {
+ return ¬Supported{wrap(nil, format, " not supported", args...)}
+}
+
+// NewNotSupported returns an error which wraps err and satisfies
+// IsNotSupported().
+func NewNotSupported(err error, msg string) error {
+ return ¬Supported{wrap(err, msg, "")}
+}
+
+// IsNotSupported reports whether the error was created with
+// NotSupportedf() or NewNotSupported().
+func IsNotSupported(err error) bool {
+ err = Cause(err)
+ _, ok := err.(*notSupported)
+ return ok
+}
+
+// notValid represents an error when something is not valid.
+type notValid struct {
+ Err
+}
+
+// NotValidf returns an error which satisfies IsNotValid().
+func NotValidf(format string, args ...interface{}) error {
+ return ¬Valid{wrap(nil, format, " not valid", args...)}
+}
+
+// NewNotValid returns an error which wraps err and satisfies IsNotValid().
+func NewNotValid(err error, msg string) error {
+ return ¬Valid{wrap(err, msg, "")}
+}
+
+// IsNotValid reports whether the error was created with NotValidf() or
+// NewNotValid().
+func IsNotValid(err error) bool {
+ err = Cause(err)
+ _, ok := err.(*notValid)
+ return ok
+}
+
+// notProvisioned represents an error when something is not yet provisioned.
+type notProvisioned struct {
+ Err
+}
+
+// NotProvisionedf returns an error which satisfies IsNotProvisioned().
+func NotProvisionedf(format string, args ...interface{}) error {
+ return ¬Provisioned{wrap(nil, format, " not provisioned", args...)}
+}
+
+// NewNotProvisioned returns an error which wraps err that satisfies
+// IsNotProvisioned().
+func NewNotProvisioned(err error, msg string) error {
+ return ¬Provisioned{wrap(err, msg, "")}
+}
+
+// IsNotProvisioned reports whether err was created with NotProvisionedf() or
+// NewNotProvisioned().
+func IsNotProvisioned(err error) bool {
+ err = Cause(err)
+ _, ok := err.(*notProvisioned)
+ return ok
+}
+
+// notAssigned represents an error when something is not yet assigned to
+// something else.
+type notAssigned struct {
+ Err
+}
+
+// NotAssignedf returns an error which satisfies IsNotAssigned().
+func NotAssignedf(format string, args ...interface{}) error {
+ return ¬Assigned{wrap(nil, format, " not assigned", args...)}
+}
+
+// NewNotAssigned returns an error which wraps err that satisfies
+// IsNotAssigned().
+func NewNotAssigned(err error, msg string) error {
+ return ¬Assigned{wrap(err, msg, "")}
+}
+
+// IsNotAssigned reports whether err was created with NotAssignedf() or
+// NewNotAssigned().
+func IsNotAssigned(err error) bool {
+ err = Cause(err)
+ _, ok := err.(*notAssigned)
+ return ok
+}
+
+// badRequest represents an error when a request has bad parameters.
+type badRequest struct {
+ Err
+}
+
+// BadRequestf returns an error which satisfies IsBadRequest().
+func BadRequestf(format string, args ...interface{}) error {
+ return &badRequest{wrap(nil, format, "", args...)}
+}
+
+// NewBadRequest returns an error which wraps err that satisfies
+// IsBadRequest().
+func NewBadRequest(err error, msg string) error {
+ return &badRequest{wrap(err, msg, "")}
+}
+
+// IsBadRequest reports whether err was created with BadRequestf() or
+// NewBadRequest().
+func IsBadRequest(err error) bool {
+ err = Cause(err)
+ _, ok := err.(*badRequest)
+ return ok
+}
+
+// methodNotAllowed represents an error when an HTTP request
+// is made with an inappropriate method.
+type methodNotAllowed struct {
+ Err
+}
+
+// MethodNotAllowedf returns an error which satisfies IsMethodNotAllowed().
+func MethodNotAllowedf(format string, args ...interface{}) error {
+ return &methodNotAllowed{wrap(nil, format, "", args...)}
+}
+
+// NewMethodNotAllowed returns an error which wraps err that satisfies
+// IsMethodNotAllowed().
+func NewMethodNotAllowed(err error, msg string) error {
+ return &methodNotAllowed{wrap(err, msg, "")}
+}
+
+// IsMethodNotAllowed reports whether err was created with MethodNotAllowedf() or
+// NewMethodNotAllowed().
+func IsMethodNotAllowed(err error) bool {
+ err = Cause(err)
+ _, ok := err.(*methodNotAllowed)
+ return ok
+}
+
+// forbidden represents an error when a request cannot be completed because of
+// missing privileges
+type forbidden struct {
+ Err
+}
+
+// Forbiddenf returns an error which satistifes IsForbidden()
+func Forbiddenf(format string, args ...interface{}) error {
+ return &forbidden{wrap(nil, format, "", args...)}
+}
+
+// NewForbidden returns an error which wraps err that satisfies
+// IsForbidden().
+func NewForbidden(err error, msg string) error {
+ return &forbidden{wrap(err, msg, "")}
+}
+
+// IsForbidden reports whether err was created with Forbiddenf() or
+// NewForbidden().
+func IsForbidden(err error) bool {
+ err = Cause(err)
+ _, ok := err.(*forbidden)
+ return ok
+}
diff --git a/vendor/github.com/juju/errors/functions.go b/vendor/github.com/juju/errors/functions.go
new file mode 100644
index 000000000..f86b09b2d
--- /dev/null
+++ b/vendor/github.com/juju/errors/functions.go
@@ -0,0 +1,330 @@
+// Copyright 2014 Canonical Ltd.
+// Licensed under the LGPLv3, see LICENCE file for details.
+
+package errors
+
+import (
+ "fmt"
+ "strings"
+)
+
+// New is a drop in replacement for the standard library errors module that records
+// the location that the error is created.
+//
+// For example:
+// return errors.New("validation failed")
+//
+func New(message string) error {
+ err := &Err{message: message}
+ err.SetLocation(1)
+ return err
+}
+
+// Errorf creates a new annotated error and records the location that the
+// error is created. This should be a drop in replacement for fmt.Errorf.
+//
+// For example:
+// return errors.Errorf("validation failed: %s", message)
+//
+func Errorf(format string, args ...interface{}) error {
+ err := &Err{message: fmt.Sprintf(format, args...)}
+ err.SetLocation(1)
+ return err
+}
+
+// Trace adds the location of the Trace call to the stack. The Cause of the
+// resulting error is the same as the error parameter. If the other error is
+// nil, the result will be nil.
+//
+// For example:
+// if err := SomeFunc(); err != nil {
+// return errors.Trace(err)
+// }
+//
+func Trace(other error) error {
+ if other == nil {
+ return nil
+ }
+ err := &Err{previous: other, cause: Cause(other)}
+ err.SetLocation(1)
+ return err
+}
+
+// Annotate is used to add extra context to an existing error. The location of
+// the Annotate call is recorded with the annotations. The file, line and
+// function are also recorded.
+//
+// For example:
+// if err := SomeFunc(); err != nil {
+// return errors.Annotate(err, "failed to frombulate")
+// }
+//
+func Annotate(other error, message string) error {
+ if other == nil {
+ return nil
+ }
+ err := &Err{
+ previous: other,
+ cause: Cause(other),
+ message: message,
+ }
+ err.SetLocation(1)
+ return err
+}
+
+// Annotatef is used to add extra context to an existing error. The location of
+// the Annotate call is recorded with the annotations. The file, line and
+// function are also recorded.
+//
+// For example:
+// if err := SomeFunc(); err != nil {
+// return errors.Annotatef(err, "failed to frombulate the %s", arg)
+// }
+//
+func Annotatef(other error, format string, args ...interface{}) error {
+ if other == nil {
+ return nil
+ }
+ err := &Err{
+ previous: other,
+ cause: Cause(other),
+ message: fmt.Sprintf(format, args...),
+ }
+ err.SetLocation(1)
+ return err
+}
+
+// DeferredAnnotatef annotates the given error (when it is not nil) with the given
+// format string and arguments (like fmt.Sprintf). If *err is nil, DeferredAnnotatef
+// does nothing. This method is used in a defer statement in order to annotate any
+// resulting error with the same message.
+//
+// For example:
+//
+// defer DeferredAnnotatef(&err, "failed to frombulate the %s", arg)
+//
+func DeferredAnnotatef(err *error, format string, args ...interface{}) {
+ if *err == nil {
+ return
+ }
+ newErr := &Err{
+ message: fmt.Sprintf(format, args...),
+ cause: Cause(*err),
+ previous: *err,
+ }
+ newErr.SetLocation(1)
+ *err = newErr
+}
+
+// Wrap changes the Cause of the error. The location of the Wrap call is also
+// stored in the error stack.
+//
+// For example:
+// if err := SomeFunc(); err != nil {
+// newErr := &packageError{"more context", private_value}
+// return errors.Wrap(err, newErr)
+// }
+//
+func Wrap(other, newDescriptive error) error {
+ err := &Err{
+ previous: other,
+ cause: newDescriptive,
+ }
+ err.SetLocation(1)
+ return err
+}
+
+// Wrapf changes the Cause of the error, and adds an annotation. The location
+// of the Wrap call is also stored in the error stack.
+//
+// For example:
+// if err := SomeFunc(); err != nil {
+// return errors.Wrapf(err, simpleErrorType, "invalid value %q", value)
+// }
+//
+func Wrapf(other, newDescriptive error, format string, args ...interface{}) error {
+ err := &Err{
+ message: fmt.Sprintf(format, args...),
+ previous: other,
+ cause: newDescriptive,
+ }
+ err.SetLocation(1)
+ return err
+}
+
+// Mask masks the given error with the given format string and arguments (like
+// fmt.Sprintf), returning a new error that maintains the error stack, but
+// hides the underlying error type. The error string still contains the full
+// annotations. If you want to hide the annotations, call Wrap.
+func Maskf(other error, format string, args ...interface{}) error {
+ if other == nil {
+ return nil
+ }
+ err := &Err{
+ message: fmt.Sprintf(format, args...),
+ previous: other,
+ }
+ err.SetLocation(1)
+ return err
+}
+
+// Mask hides the underlying error type, and records the location of the masking.
+func Mask(other error) error {
+ if other == nil {
+ return nil
+ }
+ err := &Err{
+ previous: other,
+ }
+ err.SetLocation(1)
+ return err
+}
+
+// Cause returns the cause of the given error. This will be either the
+// original error, or the result of a Wrap or Mask call.
+//
+// Cause is the usual way to diagnose errors that may have been wrapped by
+// the other errors functions.
+func Cause(err error) error {
+ var diag error
+ if err, ok := err.(causer); ok {
+ diag = err.Cause()
+ }
+ if diag != nil {
+ return diag
+ }
+ return err
+}
+
+type causer interface {
+ Cause() error
+}
+
+type wrapper interface {
+ // Message returns the top level error message,
+ // not including the message from the Previous
+ // error.
+ Message() string
+
+ // Underlying returns the Previous error, or nil
+ // if there is none.
+ Underlying() error
+}
+
+type locationer interface {
+ Location() (string, int)
+}
+
+var (
+ _ wrapper = (*Err)(nil)
+ _ locationer = (*Err)(nil)
+ _ causer = (*Err)(nil)
+)
+
+// Details returns information about the stack of errors wrapped by err, in
+// the format:
+//
+// [{filename:99: error one} {otherfile:55: cause of error one}]
+//
+// This is a terse alternative to ErrorStack as it returns a single line.
+func Details(err error) string {
+ if err == nil {
+ return "[]"
+ }
+ var s []byte
+ s = append(s, '[')
+ for {
+ s = append(s, '{')
+ if err, ok := err.(locationer); ok {
+ file, line := err.Location()
+ if file != "" {
+ s = append(s, fmt.Sprintf("%s:%d", file, line)...)
+ s = append(s, ": "...)
+ }
+ }
+ if cerr, ok := err.(wrapper); ok {
+ s = append(s, cerr.Message()...)
+ err = cerr.Underlying()
+ } else {
+ s = append(s, err.Error()...)
+ err = nil
+ }
+ s = append(s, '}')
+ if err == nil {
+ break
+ }
+ s = append(s, ' ')
+ }
+ s = append(s, ']')
+ return string(s)
+}
+
+// ErrorStack returns a string representation of the annotated error. If the
+// error passed as the parameter is not an annotated error, the result is
+// simply the result of the Error() method on that error.
+//
+// If the error is an annotated error, a multi-line string is returned where
+// each line represents one entry in the annotation stack. The full filename
+// from the call stack is used in the output.
+//
+// first error
+// github.com/juju/errors/annotation_test.go:193:
+// github.com/juju/errors/annotation_test.go:194: annotation
+// github.com/juju/errors/annotation_test.go:195:
+// github.com/juju/errors/annotation_test.go:196: more context
+// github.com/juju/errors/annotation_test.go:197:
+func ErrorStack(err error) string {
+ return strings.Join(errorStack(err), "\n")
+}
+
+func errorStack(err error) []string {
+ if err == nil {
+ return nil
+ }
+
+ // We want the first error first
+ var lines []string
+ for {
+ var buff []byte
+ if err, ok := err.(locationer); ok {
+ file, line := err.Location()
+ // Strip off the leading GOPATH/src path elements.
+ file = trimGoPath(file)
+ if file != "" {
+ buff = append(buff, fmt.Sprintf("%s:%d", file, line)...)
+ buff = append(buff, ": "...)
+ }
+ }
+ if cerr, ok := err.(wrapper); ok {
+ message := cerr.Message()
+ buff = append(buff, message...)
+ // If there is a cause for this error, and it is different to the cause
+ // of the underlying error, then output the error string in the stack trace.
+ var cause error
+ if err1, ok := err.(causer); ok {
+ cause = err1.Cause()
+ }
+ err = cerr.Underlying()
+ if cause != nil && !sameError(Cause(err), cause) {
+ if message != "" {
+ buff = append(buff, ": "...)
+ }
+ buff = append(buff, cause.Error()...)
+ }
+ } else {
+ buff = append(buff, err.Error()...)
+ err = nil
+ }
+ lines = append(lines, string(buff))
+ if err == nil {
+ break
+ }
+ }
+ // reverse the lines to get the original error, which was at the end of
+ // the list, back to the start.
+ var result []string
+ for i := len(lines); i > 0; i-- {
+ result = append(result, lines[i-1])
+ }
+ return result
+}
diff --git a/vendor/github.com/juju/errors/path.go b/vendor/github.com/juju/errors/path.go
new file mode 100644
index 000000000..e216eb8ff
--- /dev/null
+++ b/vendor/github.com/juju/errors/path.go
@@ -0,0 +1,19 @@
+// Copyright 2013, 2014 Canonical Ltd.
+// Licensed under the LGPLv3, see LICENCE file for details.
+
+package errors
+
+import (
+ "fmt"
+ "go/build"
+ "os"
+ "path/filepath"
+ "strings"
+)
+
+var goPath = build.Default.GOPATH
+var srcDir = filepath.Join(goPath, "src")
+
+func trimGoPath(filename string) string {
+ return strings.TrimPrefix(filename, fmt.Sprintf("%s%s", srcDir, string(os.PathSeparator)))
+}
diff --git a/vendor/github.com/manveru/faker/.travis.yml b/vendor/github.com/manveru/faker/.travis.yml
new file mode 100644
index 000000000..b8e958969
--- /dev/null
+++ b/vendor/github.com/manveru/faker/.travis.yml
@@ -0,0 +1,13 @@
+language: go
+
+go:
+ - 1.6
+ - 1.7
+ - tip
+
+install:
+ - go get
+ - go get github.com/manveru/gobdd
+
+script:
+ - go test
diff --git a/vendor/github.com/manveru/faker/LICENSE b/vendor/github.com/manveru/faker/LICENSE
new file mode 100644
index 000000000..7bbef04b7
--- /dev/null
+++ b/vendor/github.com/manveru/faker/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 Michael Fellinger
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/manveru/faker/README.md b/vendor/github.com/manveru/faker/README.md
new file mode 100644
index 000000000..573cb577d
--- /dev/null
+++ b/vendor/github.com/manveru/faker/README.md
@@ -0,0 +1,22 @@
+# Faker for Go
+
+
+
+## Usage
+
+ package main
+
+ import (
+ "github.com/manveru/faker"
+ )
+
+ func main() {
+ fake, err := faker.New("en")
+ if err != nil {
+ panic(err)
+ }
+ println(fake.Name()) //> "Adriana Crona"
+ println(fake.Email()) //> charity.brown@fritschbotsford.biz
+ }
+
+Inspired by the ruby faker gem, which is a port of the Perl Data::Faker library.
diff --git a/vendor/github.com/manveru/faker/dict.go b/vendor/github.com/manveru/faker/dict.go
new file mode 100644
index 000000000..05234f387
--- /dev/null
+++ b/vendor/github.com/manveru/faker/dict.go
@@ -0,0 +1,14453 @@
+package faker
+
+// You can add your own translations and words to this dictionary.
+// It's a map[string]map[string][]string.
+// For example:
+//
+// faker.Dict["en"]["company.suffix"] = []string{
+// "Inc",
+// "and Sons",
+// "LLC",
+// "Group",
+// }
+var Dict = dict
+
+// this just messes up godoc very badly, so keep it private.
+var dict = map[string]map[string][]string{
+ "de": map[string][]string{
+ "internet.free_email": []string{
+ "gmail.com",
+ "yahoo.com",
+ "hotmail.com",
+ },
+ "lorem.words": []string{
+ "alias",
+ "consequatur",
+ "aut",
+ "perferendis",
+ "sit",
+ "voluptatem",
+ "accusantium",
+ "doloremque",
+ "aperiam",
+ "eaque",
+ "ipsa",
+ "quae",
+ "ab",
+ "illo",
+ "inventore",
+ "veritatis",
+ "et",
+ "quasi",
+ "architecto",
+ "beatae",
+ "vitae",
+ "dicta",
+ "sunt",
+ "explicabo",
+ "aspernatur",
+ "aut",
+ "odit",
+ "aut",
+ "fugit",
+ "sed",
+ "quia",
+ "consequuntur",
+ "magni",
+ "dolores",
+ "eos",
+ "qui",
+ "ratione",
+ "voluptatem",
+ "sequi",
+ "nesciunt",
+ "neque",
+ "dolorem",
+ "ipsum",
+ "quia",
+ "dolor",
+ "sit",
+ "amet",
+ "consectetur",
+ "adipisci",
+ "velit",
+ "sed",
+ "quia",
+ "non",
+ "numquam",
+ "eius",
+ "modi",
+ "tempora",
+ "incidunt",
+ "ut",
+ "labore",
+ "et",
+ "dolore",
+ "magnam",
+ "aliquam",
+ "quaerat",
+ "voluptatem",
+ "ut",
+ "enim",
+ "ad",
+ "minima",
+ "veniam",
+ "quis",
+ "nostrum",
+ "exercitationem",
+ "ullam",
+ "corporis",
+ "nemo",
+ "enim",
+ "ipsam",
+ "voluptatem",
+ "quia",
+ "voluptas",
+ "sit",
+ "suscipit",
+ "laboriosam",
+ "nisi",
+ "ut",
+ "aliquid",
+ "ex",
+ "ea",
+ "commodi",
+ "consequatur",
+ "quis",
+ "autem",
+ "vel",
+ "eum",
+ "iure",
+ "reprehenderit",
+ "qui",
+ "in",
+ "ea",
+ "voluptate",
+ "velit",
+ "esse",
+ "quam",
+ "nihil",
+ "molestiae",
+ "et",
+ "iusto",
+ "odio",
+ "dignissimos",
+ "ducimus",
+ "qui",
+ "blanditiis",
+ "praesentium",
+ "laudantium",
+ "totam",
+ "rem",
+ "voluptatum",
+ "deleniti",
+ "atque",
+ "corrupti",
+ "quos",
+ "dolores",
+ "et",
+ "quas",
+ "molestias",
+ "excepturi",
+ "sint",
+ "occaecati",
+ "cupiditate",
+ "non",
+ "provident",
+ "sed",
+ "ut",
+ "perspiciatis",
+ "unde",
+ "omnis",
+ "iste",
+ "natus",
+ "error",
+ "similique",
+ "sunt",
+ "in",
+ "culpa",
+ "qui",
+ "officia",
+ "deserunt",
+ "mollitia",
+ "animi",
+ "id",
+ "est",
+ "laborum",
+ "et",
+ "dolorum",
+ "fuga",
+ "et",
+ "harum",
+ "quidem",
+ "rerum",
+ "facilis",
+ "est",
+ "et",
+ "expedita",
+ "distinctio",
+ "nam",
+ "libero",
+ "tempore",
+ "cum",
+ "soluta",
+ "nobis",
+ "est",
+ "eligendi",
+ "optio",
+ "cumque",
+ "nihil",
+ "impedit",
+ "quo",
+ "porro",
+ "quisquam",
+ "est",
+ "qui",
+ "minus",
+ "id",
+ "quod",
+ "maxime",
+ "placeat",
+ "facere",
+ "possimus",
+ "omnis",
+ "voluptas",
+ "assumenda",
+ "est",
+ "omnis",
+ "dolor",
+ "repellendus",
+ "temporibus",
+ "autem",
+ "quibusdam",
+ "et",
+ "aut",
+ "consequatur",
+ "vel",
+ "illum",
+ "qui",
+ "dolorem",
+ "eum",
+ "fugiat",
+ "quo",
+ "voluptas",
+ "nulla",
+ "pariatur",
+ "at",
+ "vero",
+ "eos",
+ "et",
+ "accusamus",
+ "officiis",
+ "debitis",
+ "aut",
+ "rerum",
+ "necessitatibus",
+ "saepe",
+ "eveniet",
+ "ut",
+ "et",
+ "voluptates",
+ "repudiandae",
+ "sint",
+ "et",
+ "molestiae",
+ "non",
+ "recusandae",
+ "itaque",
+ "earum",
+ "rerum",
+ "hic",
+ "tenetur",
+ "a",
+ "sapiente",
+ "delectus",
+ "ut",
+ "aut",
+ "reiciendis",
+ "voluptatibus",
+ "maiores",
+ "doloribus",
+ "asperiores",
+ "repellat",
+ },
+ "address.building_number": []string{
+ "####",
+ "###",
+ "##",
+ "#",
+ "##a",
+ "##b",
+ "##c",
+ },
+ "phone_number.formats": []string{
+ "(0###) #########",
+ "(0####) #######",
+ "+49-###-#######",
+ "+49-####-########",
+ },
+ "address.street_name": []string{
+ "#{street_root}",
+ },
+ "company.name": []string{
+ "#{Name.last_name} #{suffix}",
+ "#{Name.last_name}-#{Name.last_name}",
+ "#{Name.last_name}, #{Name.last_name} und #{Name.last_name}",
+ },
+ "address.city_prefix": []string{
+ "Nord",
+ "Ost",
+ "West",
+ "Süd",
+ "Neu",
+ "Alt",
+ "Bad",
+ },
+ "internet.domain_suffix": []string{
+ "com",
+ "info",
+ "name",
+ "net",
+ "org",
+ "de",
+ "ch",
+ },
+ "name.last_name": []string{
+ "Abel",
+ "Abicht",
+ "Abraham",
+ "Abramovic",
+ "Abt",
+ "Achilles",
+ "Achkinadze",
+ "Ackermann",
+ "Adam",
+ "Adams",
+ "Ade",
+ "Agostini",
+ "Ahlke",
+ "Ahrenberg",
+ "Ahrens",
+ "Aigner",
+ "Albert",
+ "Albrecht",
+ "Alexa",
+ "Alexander",
+ "Alizadeh",
+ "Allgeyer",
+ "Amann",
+ "Amberg",
+ "Anding",
+ "Anggreny",
+ "Apitz",
+ "Arendt",
+ "Arens",
+ "Arndt",
+ "Aryee",
+ "Aschenbroich",
+ "Assmus",
+ "Astafei",
+ "Auer",
+ "Axmann",
+ "Baarck",
+ "Bachmann",
+ "Badane",
+ "Bader",
+ "Baganz",
+ "Bahl",
+ "Bak",
+ "Balcer",
+ "Balck",
+ "Balkow",
+ "Balnuweit",
+ "Balzer",
+ "Banse",
+ "Barr",
+ "Bartels",
+ "Barth",
+ "Barylla",
+ "Baseda",
+ "Battke",
+ "Bauer",
+ "Bauermeister",
+ "Baumann",
+ "Baumeister",
+ "Bauschinger",
+ "Bauschke",
+ "Bayer",
+ "Beavogui",
+ "Beck",
+ "Beckel",
+ "Becker",
+ "Beckmann",
+ "Bedewitz",
+ "Beele",
+ "Beer",
+ "Beggerow",
+ "Beh",
+ "Behr",
+ "Behrenbruch",
+ "Belz",
+ "Bender",
+ "Benecke",
+ "Benner",
+ "Benninger",
+ "Benzing",
+ "Berends",
+ "Berger",
+ "Berner",
+ "Berning",
+ "Bertenbreiter",
+ "Best",
+ "Bethke",
+ "Betz",
+ "Beushausen",
+ "Beutelspacher",
+ "Beyer",
+ "Biba",
+ "Bichler",
+ "Bickel",
+ "Biedermann",
+ "Bieler",
+ "Bielert",
+ "Bienasch",
+ "Bienias",
+ "Biesenbach",
+ "Bigdeli",
+ "Birkemeyer",
+ "Bittner",
+ "Blank",
+ "Blaschek",
+ "Blassneck",
+ "Bloch",
+ "Blochwitz",
+ "Blockhaus",
+ "Blum",
+ "Blume",
+ "Bock",
+ "Bode",
+ "Bogdashin",
+ "Bogenrieder",
+ "Bohge",
+ "Bolm",
+ "Borgschulze",
+ "Bork",
+ "Bormann",
+ "Bornscheuer",
+ "Borrmann",
+ "Borsch",
+ "Boruschewski",
+ "Bos",
+ "Bosler",
+ "Bourrouag",
+ "Bouschen",
+ "Boxhammer",
+ "Boyde",
+ "Bozsik",
+ "Brand",
+ "Brandenburg",
+ "Brandis",
+ "Brandt",
+ "Brauer",
+ "Braun",
+ "Brehmer",
+ "Breitenstein",
+ "Bremer",
+ "Bremser",
+ "Brenner",
+ "Brettschneider",
+ "Breu",
+ "Breuer",
+ "Briesenick",
+ "Bringmann",
+ "Brinkmann",
+ "Brix",
+ "Broening",
+ "Brosch",
+ "Bruckmann",
+ "Bruder",
+ "Bruhns",
+ "Brunner",
+ "Bruns",
+ "Bräutigam",
+ "Brömme",
+ "Brüggmann",
+ "Buchholz",
+ "Buchrucker",
+ "Buder",
+ "Bultmann",
+ "Bunjes",
+ "Burger",
+ "Burghagen",
+ "Burkhard",
+ "Burkhardt",
+ "Burmeister",
+ "Busch",
+ "Buschbaum",
+ "Busemann",
+ "Buss",
+ "Busse",
+ "Bussmann",
+ "Byrd",
+ "Bäcker",
+ "Böhm",
+ "Bönisch",
+ "Börgeling",
+ "Börner",
+ "Böttner",
+ "Büchele",
+ "Bühler",
+ "Büker",
+ "Büngener",
+ "Bürger",
+ "Bürklein",
+ "Büscher",
+ "Büttner",
+ "Camara",
+ "Carlowitz",
+ "Carlsohn",
+ "Caspari",
+ "Caspers",
+ "Chapron",
+ "Christ",
+ "Cierpinski",
+ "Clarius",
+ "Cleem",
+ "Cleve",
+ "Co",
+ "Conrad",
+ "Cordes",
+ "Cornelsen",
+ "Cors",
+ "Cotthardt",
+ "Crews",
+ "Cronjäger",
+ "Crosskofp",
+ "Da",
+ "Dahm",
+ "Dahmen",
+ "Daimer",
+ "Damaske",
+ "Danneberg",
+ "Danner",
+ "Daub",
+ "Daubner",
+ "Daudrich",
+ "Dauer",
+ "Daum",
+ "Dauth",
+ "Dautzenberg",
+ "De",
+ "Decker",
+ "Deckert",
+ "Deerberg",
+ "Dehmel",
+ "Deja",
+ "Delonge",
+ "Demut",
+ "Dengler",
+ "Denner",
+ "Denzinger",
+ "Derr",
+ "Dertmann",
+ "Dethloff",
+ "Deuschle",
+ "Dieckmann",
+ "Diedrich",
+ "Diekmann",
+ "Dienel",
+ "Dies",
+ "Dietrich",
+ "Dietz",
+ "Dietzsch",
+ "Diezel",
+ "Dilla",
+ "Dingelstedt",
+ "Dippl",
+ "Dittmann",
+ "Dittmar",
+ "Dittmer",
+ "Dix",
+ "Dobbrunz",
+ "Dobler",
+ "Dohring",
+ "Dolch",
+ "Dold",
+ "Dombrowski",
+ "Donie",
+ "Doskoczynski",
+ "Dragu",
+ "Drechsler",
+ "Drees",
+ "Dreher",
+ "Dreier",
+ "Dreissigacker",
+ "Dressler",
+ "Drews",
+ "Duma",
+ "Dutkiewicz",
+ "Dyett",
+ "Dylus",
+ "Dächert",
+ "Döbel",
+ "Döring",
+ "Dörner",
+ "Dörre",
+ "Dück",
+ "Eberhard",
+ "Eberhardt",
+ "Ecker",
+ "Eckhardt",
+ "Edorh",
+ "Effler",
+ "Eggenmueller",
+ "Ehm",
+ "Ehmann",
+ "Ehrig",
+ "Eich",
+ "Eichmann",
+ "Eifert",
+ "Einert",
+ "Eisenlauer",
+ "Ekpo",
+ "Elbe",
+ "Eleyth",
+ "Elss",
+ "Emert",
+ "Emmelmann",
+ "Ender",
+ "Engel",
+ "Engelen",
+ "Engelmann",
+ "Eplinius",
+ "Erdmann",
+ "Erhardt",
+ "Erlei",
+ "Erm",
+ "Ernst",
+ "Ertl",
+ "Erwes",
+ "Esenwein",
+ "Esser",
+ "Evers",
+ "Everts",
+ "Ewald",
+ "Fahner",
+ "Faller",
+ "Falter",
+ "Farber",
+ "Fassbender",
+ "Faulhaber",
+ "Fehrig",
+ "Feld",
+ "Felke",
+ "Feller",
+ "Fenner",
+ "Fenske",
+ "Feuerbach",
+ "Fietz",
+ "Figl",
+ "Figura",
+ "Filipowski",
+ "Filsinger",
+ "Fincke",
+ "Fink",
+ "Finke",
+ "Fischer",
+ "Fitschen",
+ "Fleischer",
+ "Fleischmann",
+ "Floder",
+ "Florczak",
+ "Flore",
+ "Flottmann",
+ "Forkel",
+ "Forst",
+ "Frahmeke",
+ "Frank",
+ "Franke",
+ "Franta",
+ "Frantz",
+ "Franz",
+ "Franzis",
+ "Franzmann",
+ "Frauen",
+ "Frauendorf",
+ "Freigang",
+ "Freimann",
+ "Freimuth",
+ "Freisen",
+ "Frenzel",
+ "Frey",
+ "Fricke",
+ "Fried",
+ "Friedek",
+ "Friedenberg",
+ "Friedmann",
+ "Friedrich",
+ "Friess",
+ "Frisch",
+ "Frohn",
+ "Frosch",
+ "Fuchs",
+ "Fuhlbrügge",
+ "Fusenig",
+ "Fust",
+ "Förster",
+ "Gaba",
+ "Gabius",
+ "Gabler",
+ "Gadschiew",
+ "Gakstädter",
+ "Galander",
+ "Gamlin",
+ "Gamper",
+ "Gangnus",
+ "Ganzmann",
+ "Garatva",
+ "Gast",
+ "Gastel",
+ "Gatzka",
+ "Gauder",
+ "Gebhardt",
+ "Geese",
+ "Gehre",
+ "Gehrig",
+ "Gehring",
+ "Gehrke",
+ "Geiger",
+ "Geisler",
+ "Geissler",
+ "Gelling",
+ "Gens",
+ "Gerbennow",
+ "Gerdel",
+ "Gerhardt",
+ "Gerschler",
+ "Gerson",
+ "Gesell",
+ "Geyer",
+ "Ghirmai",
+ "Ghosh",
+ "Giehl",
+ "Gierisch",
+ "Giesa",
+ "Giesche",
+ "Gilde",
+ "Glatting",
+ "Goebel",
+ "Goedicke",
+ "Goldbeck",
+ "Goldfuss",
+ "Goldkamp",
+ "Goldkühle",
+ "Goller",
+ "Golling",
+ "Gollnow",
+ "Golomski",
+ "Gombert",
+ "Gotthardt",
+ "Gottschalk",
+ "Gotz",
+ "Goy",
+ "Gradzki",
+ "Graf",
+ "Grams",
+ "Grasse",
+ "Gratzky",
+ "Grau",
+ "Greb",
+ "Green",
+ "Greger",
+ "Greithanner",
+ "Greschner",
+ "Griem",
+ "Griese",
+ "Grimm",
+ "Gromisch",
+ "Gross",
+ "Grosser",
+ "Grossheim",
+ "Grosskopf",
+ "Grothaus",
+ "Grothkopp",
+ "Grotke",
+ "Grube",
+ "Gruber",
+ "Grundmann",
+ "Gruning",
+ "Gruszecki",
+ "Gröss",
+ "Grötzinger",
+ "Grün",
+ "Grüner",
+ "Gummelt",
+ "Gunkel",
+ "Gunther",
+ "Gutjahr",
+ "Gutowicz",
+ "Gutschank",
+ "Göbel",
+ "Göckeritz",
+ "Göhler",
+ "Görlich",
+ "Görmer",
+ "Götz",
+ "Götzelmann",
+ "Güldemeister",
+ "Günther",
+ "Günz",
+ "Gürbig",
+ "Haack",
+ "Haaf",
+ "Habel",
+ "Hache",
+ "Hackbusch",
+ "Hackelbusch",
+ "Hadfield",
+ "Hadwich",
+ "Haferkamp",
+ "Hahn",
+ "Hajek",
+ "Hallmann",
+ "Hamann",
+ "Hanenberger",
+ "Hannecker",
+ "Hanniske",
+ "Hansen",
+ "Hardy",
+ "Hargasser",
+ "Harms",
+ "Harnapp",
+ "Harter",
+ "Harting",
+ "Hartlieb",
+ "Hartmann",
+ "Hartwig",
+ "Hartz",
+ "Haschke",
+ "Hasler",
+ "Hasse",
+ "Hassfeld",
+ "Haug",
+ "Hauke",
+ "Haupt",
+ "Haverney",
+ "Heberstreit",
+ "Hechler",
+ "Hecht",
+ "Heck",
+ "Hedermann",
+ "Hehl",
+ "Heidelmann",
+ "Heidler",
+ "Heinemann",
+ "Heinig",
+ "Heinke",
+ "Heinrich",
+ "Heinze",
+ "Heiser",
+ "Heist",
+ "Hellmann",
+ "Helm",
+ "Helmke",
+ "Helpling",
+ "Hengmith",
+ "Henkel",
+ "Hennes",
+ "Henry",
+ "Hense",
+ "Hensel",
+ "Hentel",
+ "Hentschel",
+ "Hentschke",
+ "Hepperle",
+ "Herberger",
+ "Herbrand",
+ "Hering",
+ "Hermann",
+ "Hermecke",
+ "Herms",
+ "Herold",
+ "Herrmann",
+ "Herschmann",
+ "Hertel",
+ "Herweg",
+ "Herwig",
+ "Herzenberg",
+ "Hess",
+ "Hesse",
+ "Hessek",
+ "Hessler",
+ "Hetzler",
+ "Heuck",
+ "Heydemüller",
+ "Hiebl",
+ "Hildebrand",
+ "Hildenbrand",
+ "Hilgendorf",
+ "Hillard",
+ "Hiller",
+ "Hingsen",
+ "Hingst",
+ "Hinrichs",
+ "Hirsch",
+ "Hirschberg",
+ "Hirt",
+ "Hodea",
+ "Hoffman",
+ "Hoffmann",
+ "Hofmann",
+ "Hohenberger",
+ "Hohl",
+ "Hohn",
+ "Hohnheiser",
+ "Hold",
+ "Holdt",
+ "Holinski",
+ "Holl",
+ "Holtfreter",
+ "Holz",
+ "Holzdeppe",
+ "Holzner",
+ "Hommel",
+ "Honz",
+ "Hooss",
+ "Hoppe",
+ "Horak",
+ "Horn",
+ "Horna",
+ "Hornung",
+ "Hort",
+ "Howard",
+ "Huber",
+ "Huckestein",
+ "Hudak",
+ "Huebel",
+ "Hugo",
+ "Huhn",
+ "Hujo",
+ "Huke",
+ "Huls",
+ "Humbert",
+ "Huneke",
+ "Huth",
+ "Häber",
+ "Häfner",
+ "Höcke",
+ "Höft",
+ "Höhne",
+ "Hönig",
+ "Hördt",
+ "Hübenbecker",
+ "Hübl",
+ "Hübner",
+ "Hügel",
+ "Hüttcher",
+ "Hütter",
+ "Ibe",
+ "Ihly",
+ "Illing",
+ "Isak",
+ "Isekenmeier",
+ "Itt",
+ "Jacob",
+ "Jacobs",
+ "Jagusch",
+ "Jahn",
+ "Jahnke",
+ "Jakobs",
+ "Jakubczyk",
+ "Jambor",
+ "Jamrozy",
+ "Jander",
+ "Janich",
+ "Janke",
+ "Jansen",
+ "Jarets",
+ "Jaros",
+ "Jasinski",
+ "Jasper",
+ "Jegorov",
+ "Jellinghaus",
+ "Jeorga",
+ "Jerschabek",
+ "Jess",
+ "John",
+ "Jonas",
+ "Jossa",
+ "Jucken",
+ "Jung",
+ "Jungbluth",
+ "Jungton",
+ "Just",
+ "Jürgens",
+ "Kaczmarek",
+ "Kaesmacher",
+ "Kahl",
+ "Kahlert",
+ "Kahles",
+ "Kahlmeyer",
+ "Kaiser",
+ "Kalinowski",
+ "Kallabis",
+ "Kallensee",
+ "Kampf",
+ "Kampschulte",
+ "Kappe",
+ "Kappler",
+ "Karhoff",
+ "Karrass",
+ "Karst",
+ "Karsten",
+ "Karus",
+ "Kass",
+ "Kasten",
+ "Kastner",
+ "Katzinski",
+ "Kaufmann",
+ "Kaul",
+ "Kausemann",
+ "Kawohl",
+ "Kazmarek",
+ "Kedzierski",
+ "Keil",
+ "Keiner",
+ "Keller",
+ "Kelm",
+ "Kempe",
+ "Kemper",
+ "Kempter",
+ "Kerl",
+ "Kern",
+ "Kesselring",
+ "Kesselschläger",
+ "Kette",
+ "Kettenis",
+ "Keutel",
+ "Kick",
+ "Kiessling",
+ "Kinadeter",
+ "Kinzel",
+ "Kinzy",
+ "Kirch",
+ "Kirst",
+ "Kisabaka",
+ "Klaas",
+ "Klabuhn",
+ "Klapper",
+ "Klauder",
+ "Klaus",
+ "Kleeberg",
+ "Kleiber",
+ "Klein",
+ "Kleinert",
+ "Kleininger",
+ "Kleinmann",
+ "Kleinsteuber",
+ "Kleiss",
+ "Klemme",
+ "Klimczak",
+ "Klinger",
+ "Klink",
+ "Klopsch",
+ "Klose",
+ "Kloss",
+ "Kluge",
+ "Kluwe",
+ "Knabe",
+ "Kneifel",
+ "Knetsch",
+ "Knies",
+ "Knippel",
+ "Knobel",
+ "Knoblich",
+ "Knoll",
+ "Knorr",
+ "Knorscheidt",
+ "Knut",
+ "Kobs",
+ "Koch",
+ "Kochan",
+ "Kock",
+ "Koczulla",
+ "Koderisch",
+ "Koehl",
+ "Koehler",
+ "Koenig",
+ "Koester",
+ "Kofferschlager",
+ "Koha",
+ "Kohle",
+ "Kohlmann",
+ "Kohnle",
+ "Kohrt",
+ "Koj",
+ "Kolb",
+ "Koleiski",
+ "Kolokas",
+ "Komoll",
+ "Konieczny",
+ "Konig",
+ "Konow",
+ "Konya",
+ "Koob",
+ "Kopf",
+ "Kosenkow",
+ "Koster",
+ "Koszewski",
+ "Koubaa",
+ "Kovacs",
+ "Kowalick",
+ "Kowalinski",
+ "Kozakiewicz",
+ "Krabbe",
+ "Kraft",
+ "Kral",
+ "Kramer",
+ "Krauel",
+ "Kraus",
+ "Krause",
+ "Krauspe",
+ "Kreb",
+ "Krebs",
+ "Kreissig",
+ "Kresse",
+ "Kreutz",
+ "Krieger",
+ "Krippner",
+ "Krodinger",
+ "Krohn",
+ "Krol",
+ "Kron",
+ "Krueger",
+ "Krug",
+ "Kruger",
+ "Krull",
+ "Kruschinski",
+ "Krämer",
+ "Kröckert",
+ "Kröger",
+ "Krüger",
+ "Kubera",
+ "Kufahl",
+ "Kuhlee",
+ "Kuhnen",
+ "Kulimann",
+ "Kulma",
+ "Kumbernuss",
+ "Kummle",
+ "Kunz",
+ "Kupfer",
+ "Kupprion",
+ "Kuprion",
+ "Kurnicki",
+ "Kurrat",
+ "Kurschilgen",
+ "Kuschewitz",
+ "Kuschmann",
+ "Kuske",
+ "Kustermann",
+ "Kutscherauer",
+ "Kutzner",
+ "Kwadwo",
+ "Kähler",
+ "Käther",
+ "Köhler",
+ "Köhrbrück",
+ "Köhre",
+ "Kölotzei",
+ "König",
+ "Köpernick",
+ "Köseoglu",
+ "Kúhn",
+ "Kúhnert",
+ "Kühn",
+ "Kühnel",
+ "Kühnemund",
+ "Kühnert",
+ "Kühnke",
+ "Küsters",
+ "Küter",
+ "Laack",
+ "Lack",
+ "Ladewig",
+ "Lakomy",
+ "Lammert",
+ "Lamos",
+ "Landmann",
+ "Lang",
+ "Lange",
+ "Langfeld",
+ "Langhirt",
+ "Lanig",
+ "Lauckner",
+ "Lauinger",
+ "Laurén",
+ "Lausecker",
+ "Laux",
+ "Laws",
+ "Lax",
+ "Leberer",
+ "Lehmann",
+ "Lehner",
+ "Leibold",
+ "Leide",
+ "Leimbach",
+ "Leipold",
+ "Leist",
+ "Leiter",
+ "Leiteritz",
+ "Leitheim",
+ "Leiwesmeier",
+ "Lenfers",
+ "Lenk",
+ "Lenz",
+ "Lenzen",
+ "Leo",
+ "Lepthin",
+ "Lesch",
+ "Leschnik",
+ "Letzelter",
+ "Lewin",
+ "Lewke",
+ "Leyckes",
+ "Lg",
+ "Lichtenfeld",
+ "Lichtenhagen",
+ "Lichtl",
+ "Liebach",
+ "Liebe",
+ "Liebich",
+ "Liebold",
+ "Lieder",
+ "Lienshöft",
+ "Linden",
+ "Lindenberg",
+ "Lindenmayer",
+ "Lindner",
+ "Linke",
+ "Linnenbaum",
+ "Lippe",
+ "Lipske",
+ "Lipus",
+ "Lischka",
+ "Lobinger",
+ "Logsch",
+ "Lohmann",
+ "Lohre",
+ "Lohse",
+ "Lokar",
+ "Loogen",
+ "Lorenz",
+ "Losch",
+ "Loska",
+ "Lott",
+ "Loy",
+ "Lubina",
+ "Ludolf",
+ "Lufft",
+ "Lukoschek",
+ "Lutje",
+ "Lutz",
+ "Löser",
+ "Löwa",
+ "Lübke",
+ "Maak",
+ "Maczey",
+ "Madetzky",
+ "Madubuko",
+ "Mai",
+ "Maier",
+ "Maisch",
+ "Malek",
+ "Malkus",
+ "Mallmann",
+ "Malucha",
+ "Manns",
+ "Manz",
+ "Marahrens",
+ "Marchewski",
+ "Margis",
+ "Markowski",
+ "Marl",
+ "Marner",
+ "Marquart",
+ "Marschek",
+ "Martel",
+ "Marten",
+ "Martin",
+ "Marx",
+ "Marxen",
+ "Mathes",
+ "Mathies",
+ "Mathiszik",
+ "Matschke",
+ "Mattern",
+ "Matthes",
+ "Matula",
+ "Mau",
+ "Maurer",
+ "Mauroff",
+ "May",
+ "Maybach",
+ "Mayer",
+ "Mebold",
+ "Mehl",
+ "Mehlhorn",
+ "Mehlorn",
+ "Meier",
+ "Meisch",
+ "Meissner",
+ "Meloni",
+ "Melzer",
+ "Menga",
+ "Menne",
+ "Mensah",
+ "Mensing",
+ "Merkel",
+ "Merseburg",
+ "Mertens",
+ "Mesloh",
+ "Metzger",
+ "Metzner",
+ "Mewes",
+ "Meyer",
+ "Michallek",
+ "Michel",
+ "Mielke",
+ "Mikitenko",
+ "Milde",
+ "Minah",
+ "Mintzlaff",
+ "Mockenhaupt",
+ "Moede",
+ "Moedl",
+ "Moeller",
+ "Moguenara",
+ "Mohr",
+ "Mohrhard",
+ "Molitor",
+ "Moll",
+ "Moller",
+ "Molzan",
+ "Montag",
+ "Moormann",
+ "Mordhorst",
+ "Morgenstern",
+ "Morhelfer",
+ "Moritz",
+ "Moser",
+ "Motchebon",
+ "Motzenbbäcker",
+ "Mrugalla",
+ "Muckenthaler",
+ "Mues",
+ "Muller",
+ "Mulrain",
+ "Mächtig",
+ "Mäder",
+ "Möcks",
+ "Mögenburg",
+ "Möhsner",
+ "Möldner",
+ "Möllenbeck",
+ "Möller",
+ "Möllinger",
+ "Mörsch",
+ "Mühleis",
+ "Müller",
+ "Münch",
+ "Nabein",
+ "Nabow",
+ "Nagel",
+ "Nannen",
+ "Nastvogel",
+ "Nau",
+ "Naubert",
+ "Naumann",
+ "Ne",
+ "Neimke",
+ "Nerius",
+ "Neubauer",
+ "Neubert",
+ "Neuendorf",
+ "Neumair",
+ "Neumann",
+ "Neupert",
+ "Neurohr",
+ "Neuschwander",
+ "Newton",
+ "Ney",
+ "Nicolay",
+ "Niedermeier",
+ "Nieklauson",
+ "Niklaus",
+ "Nitzsche",
+ "Noack",
+ "Nodler",
+ "Nolte",
+ "Normann",
+ "Norris",
+ "Northoff",
+ "Nowak",
+ "Nussbeck",
+ "Nwachukwu",
+ "Nytra",
+ "Nöh",
+ "Oberem",
+ "Obergföll",
+ "Obermaier",
+ "Ochs",
+ "Oeser",
+ "Olbrich",
+ "Onnen",
+ "Ophey",
+ "Oppong",
+ "Orth",
+ "Orthmann",
+ "Oschkenat",
+ "Osei",
+ "Osenberg",
+ "Ostendarp",
+ "Ostwald",
+ "Otte",
+ "Otto",
+ "Paesler",
+ "Pajonk",
+ "Pallentin",
+ "Panzig",
+ "Paschke",
+ "Patzwahl",
+ "Paukner",
+ "Peselman",
+ "Peter",
+ "Peters",
+ "Petzold",
+ "Pfeiffer",
+ "Pfennig",
+ "Pfersich",
+ "Pfingsten",
+ "Pflieger",
+ "Pflügner",
+ "Philipp",
+ "Pichlmaier",
+ "Piesker",
+ "Pietsch",
+ "Pingpank",
+ "Pinnock",
+ "Pippig",
+ "Pitschugin",
+ "Plank",
+ "Plass",
+ "Platzer",
+ "Plauk",
+ "Plautz",
+ "Pletsch",
+ "Plotzitzka",
+ "Poehn",
+ "Poeschl",
+ "Pogorzelski",
+ "Pohl",
+ "Pohland",
+ "Pohle",
+ "Polifka",
+ "Polizzi",
+ "Pollmächer",
+ "Pomp",
+ "Ponitzsch",
+ "Porsche",
+ "Porth",
+ "Poschmann",
+ "Poser",
+ "Pottel",
+ "Prah",
+ "Prange",
+ "Prediger",
+ "Pressler",
+ "Preuk",
+ "Preuss",
+ "Prey",
+ "Priemer",
+ "Proske",
+ "Pusch",
+ "Pöche",
+ "Pöge",
+ "Raabe",
+ "Rabenstein",
+ "Rach",
+ "Radtke",
+ "Rahn",
+ "Ranftl",
+ "Rangen",
+ "Ranz",
+ "Rapp",
+ "Rath",
+ "Rau",
+ "Raubuch",
+ "Raukuc",
+ "Rautenkranz",
+ "Rehwagen",
+ "Reiber",
+ "Reichardt",
+ "Reichel",
+ "Reichling",
+ "Reif",
+ "Reifenrath",
+ "Reimann",
+ "Reinberg",
+ "Reinelt",
+ "Reinhardt",
+ "Reinke",
+ "Reitze",
+ "Renk",
+ "Rentz",
+ "Renz",
+ "Reppin",
+ "Restle",
+ "Restorff",
+ "Retzke",
+ "Reuber",
+ "Reumann",
+ "Reus",
+ "Reuss",
+ "Reusse",
+ "Rheder",
+ "Rhoden",
+ "Richards",
+ "Richter",
+ "Riedel",
+ "Riediger",
+ "Rieger",
+ "Riekmann",
+ "Riepl",
+ "Riermeier",
+ "Riester",
+ "Riethmüller",
+ "Rietmüller",
+ "Rietscher",
+ "Ringel",
+ "Ringer",
+ "Rink",
+ "Ripken",
+ "Ritosek",
+ "Ritschel",
+ "Ritter",
+ "Rittweg",
+ "Ritz",
+ "Roba",
+ "Rockmeier",
+ "Rodehau",
+ "Rodowski",
+ "Roecker",
+ "Roggatz",
+ "Rohländer",
+ "Rohrer",
+ "Rokossa",
+ "Roleder",
+ "Roloff",
+ "Roos",
+ "Rosbach",
+ "Roschinsky",
+ "Rose",
+ "Rosenauer",
+ "Rosenbauer",
+ "Rosenthal",
+ "Rosksch",
+ "Rossberg",
+ "Rossler",
+ "Roth",
+ "Rother",
+ "Ruch",
+ "Ruckdeschel",
+ "Rumpf",
+ "Rupprecht",
+ "Ruth",
+ "Ryjikh",
+ "Ryzih",
+ "Rädler",
+ "Räntsch",
+ "Rödiger",
+ "Röse",
+ "Röttger",
+ "Rücker",
+ "Rüdiger",
+ "Rüter",
+ "Sachse",
+ "Sack",
+ "Saflanis",
+ "Sagafe",
+ "Sagonas",
+ "Sahner",
+ "Saile",
+ "Sailer",
+ "Salow",
+ "Salzer",
+ "Salzmann",
+ "Sammert",
+ "Sander",
+ "Sarvari",
+ "Sattelmaier",
+ "Sauer",
+ "Sauerland",
+ "Saumweber",
+ "Savoia",
+ "Scc",
+ "Schacht",
+ "Schaefer",
+ "Schaffarzik",
+ "Schahbasian",
+ "Scharf",
+ "Schedler",
+ "Scheer",
+ "Schelk",
+ "Schellenbeck",
+ "Schembera",
+ "Schenk",
+ "Scherbarth",
+ "Scherer",
+ "Schersing",
+ "Scherz",
+ "Scheurer",
+ "Scheuring",
+ "Scheytt",
+ "Schielke",
+ "Schieskow",
+ "Schildhauer",
+ "Schilling",
+ "Schima",
+ "Schimmer",
+ "Schindzielorz",
+ "Schirmer",
+ "Schirrmeister",
+ "Schlachter",
+ "Schlangen",
+ "Schlawitz",
+ "Schlechtweg",
+ "Schley",
+ "Schlicht",
+ "Schlitzer",
+ "Schmalzle",
+ "Schmid",
+ "Schmidt",
+ "Schmidtchen",
+ "Schmitt",
+ "Schmitz",
+ "Schmuhl",
+ "Schneider",
+ "Schnelting",
+ "Schnieder",
+ "Schniedermeier",
+ "Schnürer",
+ "Schoberg",
+ "Scholz",
+ "Schonberg",
+ "Schondelmaier",
+ "Schorr",
+ "Schott",
+ "Schottmann",
+ "Schouren",
+ "Schrader",
+ "Schramm",
+ "Schreck",
+ "Schreiber",
+ "Schreiner",
+ "Schreiter",
+ "Schroder",
+ "Schröder",
+ "Schuermann",
+ "Schuff",
+ "Schuhaj",
+ "Schuldt",
+ "Schult",
+ "Schulte",
+ "Schultz",
+ "Schultze",
+ "Schulz",
+ "Schulze",
+ "Schumacher",
+ "Schumann",
+ "Schupp",
+ "Schuri",
+ "Schuster",
+ "Schwab",
+ "Schwalm",
+ "Schwanbeck",
+ "Schwandke",
+ "Schwanitz",
+ "Schwarthoff",
+ "Schwartz",
+ "Schwarz",
+ "Schwarzer",
+ "Schwarzkopf",
+ "Schwarzmeier",
+ "Schwatlo",
+ "Schweisfurth",
+ "Schwennen",
+ "Schwerdtner",
+ "Schwidde",
+ "Schwirkschlies",
+ "Schwuchow",
+ "Schäfer",
+ "Schäffel",
+ "Schäffer",
+ "Schäning",
+ "Schöckel",
+ "Schönball",
+ "Schönbeck",
+ "Schönberg",
+ "Schönebeck",
+ "Schönenberger",
+ "Schönfeld",
+ "Schönherr",
+ "Schönlebe",
+ "Schötz",
+ "Schüler",
+ "Schüppel",
+ "Schütz",
+ "Schütze",
+ "Seeger",
+ "Seelig",
+ "Sehls",
+ "Seibold",
+ "Seidel",
+ "Seiders",
+ "Seigel",
+ "Seiler",
+ "Seitz",
+ "Semisch",
+ "Senkel",
+ "Sewald",
+ "Siebel",
+ "Siebert",
+ "Siegling",
+ "Sielemann",
+ "Siemon",
+ "Siener",
+ "Sievers",
+ "Siewert",
+ "Sihler",
+ "Sillah",
+ "Simon",
+ "Sinnhuber",
+ "Sischka",
+ "Skibicki",
+ "Sladek",
+ "Slotta",
+ "Smieja",
+ "Soboll",
+ "Sokolowski",
+ "Soller",
+ "Sollner",
+ "Sommer",
+ "Somssich",
+ "Sonn",
+ "Sonnabend",
+ "Spahn",
+ "Spank",
+ "Spelmeyer",
+ "Spiegelburg",
+ "Spielvogel",
+ "Spinner",
+ "Spitzmüller",
+ "Splinter",
+ "Sporrer",
+ "Sprenger",
+ "Spöttel",
+ "Stahl",
+ "Stang",
+ "Stanger",
+ "Stauss",
+ "Steding",
+ "Steffen",
+ "Steffny",
+ "Steidl",
+ "Steigauf",
+ "Stein",
+ "Steinecke",
+ "Steinert",
+ "Steinkamp",
+ "Steinmetz",
+ "Stelkens",
+ "Stengel",
+ "Stengl",
+ "Stenzel",
+ "Stepanov",
+ "Stephan",
+ "Stern",
+ "Steuk",
+ "Stief",
+ "Stifel",
+ "Stoll",
+ "Stolle",
+ "Stolz",
+ "Storl",
+ "Storp",
+ "Stoutjesdijk",
+ "Stratmann",
+ "Straub",
+ "Strausa",
+ "Streck",
+ "Streese",
+ "Strege",
+ "Streit",
+ "Streller",
+ "Strieder",
+ "Striezel",
+ "Strogies",
+ "Strohschank",
+ "Strunz",
+ "Strutz",
+ "Stube",
+ "Stöckert",
+ "Stöppler",
+ "Stöwer",
+ "Stürmer",
+ "Suffa",
+ "Sujew",
+ "Sussmann",
+ "Suthe",
+ "Sutschet",
+ "Swillims",
+ "Szendrei",
+ "Sören",
+ "Sürth",
+ "Tafelmeier",
+ "Tang",
+ "Tasche",
+ "Taufratshofer",
+ "Tegethof",
+ "Teichmann",
+ "Tepper",
+ "Terheiden",
+ "Terlecki",
+ "Teufel",
+ "Theele",
+ "Thieke",
+ "Thimm",
+ "Thiomas",
+ "Thomas",
+ "Thriene",
+ "Thränhardt",
+ "Thust",
+ "Thyssen",
+ "Thöne",
+ "Tidow",
+ "Tiedtke",
+ "Tietze",
+ "Tilgner",
+ "Tillack",
+ "Timmermann",
+ "Tischler",
+ "Tischmann",
+ "Tittman",
+ "Tivontschik",
+ "Tonat",
+ "Tonn",
+ "Trampeli",
+ "Trauth",
+ "Trautmann",
+ "Travan",
+ "Treff",
+ "Tremmel",
+ "Tress",
+ "Tsamonikian",
+ "Tschiers",
+ "Tschirch",
+ "Tuch",
+ "Tucholke",
+ "Tudow",
+ "Tuschmo",
+ "Tächl",
+ "Többen",
+ "Töpfer",
+ "Uhlemann",
+ "Uhlig",
+ "Uhrig",
+ "Uibel",
+ "Uliczka",
+ "Ullmann",
+ "Ullrich",
+ "Umbach",
+ "Umlauft",
+ "Umminger",
+ "Unger",
+ "Unterpaintner",
+ "Urban",
+ "Urbaniak",
+ "Urbansky",
+ "Urhig",
+ "Vahlensieck",
+ "Van",
+ "Vangermain",
+ "Vater",
+ "Venghaus",
+ "Verniest",
+ "Verzi",
+ "Vey",
+ "Viellehner",
+ "Vieweg",
+ "Voelkel",
+ "Vogel",
+ "Vogelgsang",
+ "Vogt",
+ "Voigt",
+ "Vokuhl",
+ "Volk",
+ "Volker",
+ "Volkmann",
+ "Von",
+ "Vona",
+ "Vontein",
+ "Wachenbrunner",
+ "Wachtel",
+ "Wagner",
+ "Waibel",
+ "Wakan",
+ "Waldmann",
+ "Wallner",
+ "Wallstab",
+ "Walter",
+ "Walther",
+ "Walton",
+ "Walz",
+ "Wanner",
+ "Wartenberg",
+ "Waschbüsch",
+ "Wassilew",
+ "Wassiluk",
+ "Weber",
+ "Wehrsen",
+ "Weidlich",
+ "Weidner",
+ "Weigel",
+ "Weight",
+ "Weiler",
+ "Weimer",
+ "Weis",
+ "Weiss",
+ "Weller",
+ "Welsch",
+ "Welz",
+ "Welzel",
+ "Weniger",
+ "Wenk",
+ "Werle",
+ "Werner",
+ "Werrmann",
+ "Wessel",
+ "Wessinghage",
+ "Weyel",
+ "Wezel",
+ "Wichmann",
+ "Wickert",
+ "Wiebe",
+ "Wiechmann",
+ "Wiegelmann",
+ "Wierig",
+ "Wiese",
+ "Wieser",
+ "Wilhelm",
+ "Wilky",
+ "Will",
+ "Willwacher",
+ "Wilts",
+ "Wimmer",
+ "Winkelmann",
+ "Winkler",
+ "Winter",
+ "Wischek",
+ "Wischer",
+ "Wissing",
+ "Wittich",
+ "Wittl",
+ "Wolf",
+ "Wolfarth",
+ "Wolff",
+ "Wollenberg",
+ "Wollmann",
+ "Woytkowska",
+ "Wujak",
+ "Wurm",
+ "Wyludda",
+ "Wölpert",
+ "Wöschler",
+ "Wühn",
+ "Wünsche",
+ "Zach",
+ "Zaczkiewicz",
+ "Zahn",
+ "Zaituc",
+ "Zandt",
+ "Zanner",
+ "Zapletal",
+ "Zauber",
+ "Zeidler",
+ "Zekl",
+ "Zender",
+ "Zeuch",
+ "Zeyen",
+ "Zeyhle",
+ "Ziegler",
+ "Zimanyi",
+ "Zimmer",
+ "Zimmermann",
+ "Zinser",
+ "Zintl",
+ "Zipp",
+ "Zipse",
+ "Zschunke",
+ "Zuber",
+ "Zwiener",
+ "Zümsande",
+ "Östringer",
+ "Überacker",
+ },
+ "name.suffix": []string{
+ "von",
+ "vom",
+ "von der",
+ },
+ "address.postcode": []string{
+ "#####",
+ "#####",
+ },
+ "address.state_abbr": []string{
+ "BB",
+ "BE",
+ "BW",
+ "BY",
+ "HB",
+ "HE",
+ "HH",
+ "MV",
+ "NI",
+ "NW",
+ "RP",
+ "SH",
+ "SL",
+ "SN",
+ "ST",
+ "TH",
+ },
+ "address.street_address": []string{
+ "#{street_name} #{building_number}",
+ },
+ "company.suffix": []string{
+ "GmbH",
+ "AG",
+ "Gruppe",
+ },
+ "address.secondary_address": []string{
+ "Apt. ###",
+ "Zimmer ###",
+ },
+ "address.city_suffix": []string{
+ "stadt",
+ "dorf",
+ "land",
+ "scheid",
+ "burg",
+ },
+ "address.city": []string{
+ "#{city_prefix} #{Name.first_name}#{city_suffix}",
+ "#{city_prefix} #{Name.first_name}",
+ "#{Name.first_name}#{city_suffix}",
+ "#{Name.last_name}#{city_suffix}",
+ },
+ "address.country": []string{
+ "Ägypten",
+ "Äquatorialguinea",
+ "Äthiopien",
+ "Österreich",
+ "Afghanistan",
+ "Albanien",
+ "Algerien",
+ "Amerikanisch-Samoa",
+ "Amerikanische Jungferninseln",
+ "Andorra",
+ "Angola",
+ "Anguilla",
+ "Antarktis",
+ "Antigua und Barbuda",
+ "Argentinien",
+ "Armenien",
+ "Aruba",
+ "Aserbaidschan",
+ "Australien",
+ "Bahamas",
+ "Bahrain",
+ "Bangladesch",
+ "Barbados",
+ "Belarus",
+ "Belgien",
+ "Belize",
+ "Benin",
+ "die Bermudas",
+ "Bhutan",
+ "Bolivien",
+ "Bosnien und Herzegowina",
+ "Botsuana",
+ "Bouvetinsel",
+ "Brasilien",
+ "Britische Jungferninseln",
+ "Britisches Territorium im Indischen Ozean",
+ "Brunei Darussalam",
+ "Bulgarien",
+ "Burkina Faso",
+ "Burundi",
+ "Chile",
+ "China",
+ "Cookinseln",
+ "Costa Rica",
+ "Dänemark",
+ "Demokratische Republik Kongo",
+ "Demokratische Volksrepublik Korea",
+ "Deutschland",
+ "Dominica",
+ "Dominikanische Republik",
+ "Dschibuti",
+ "Ecuador",
+ "El Salvador",
+ "Eritrea",
+ "Estland",
+ "Färöer",
+ "Falklandinseln",
+ "Fidschi",
+ "Finnland",
+ "Frankreich",
+ "Französisch-Guayana",
+ "Französisch-Polynesien",
+ "Französische Gebiete im südlichen Indischen Ozean",
+ "Gabun",
+ "Gambia",
+ "Georgien",
+ "Ghana",
+ "Gibraltar",
+ "Grönland",
+ "Grenada",
+ "Griechenland",
+ "Guadeloupe",
+ "Guam",
+ "Guatemala",
+ "Guinea",
+ "Guinea-Bissau",
+ "Guyana",
+ "Haiti",
+ "Heard und McDonaldinseln",
+ "Honduras",
+ "Hongkong",
+ "Indien",
+ "Indonesien",
+ "Irak",
+ "Iran",
+ "Irland",
+ "Island",
+ "Israel",
+ "Italien",
+ "Jamaika",
+ "Japan",
+ "Jemen",
+ "Jordanien",
+ "Jugoslawien",
+ "Kaimaninseln",
+ "Kambodscha",
+ "Kamerun",
+ "Kanada",
+ "Kap Verde",
+ "Kasachstan",
+ "Katar",
+ "Kenia",
+ "Kirgisistan",
+ "Kiribati",
+ "Kleinere amerikanische Überseeinseln",
+ "Kokosinseln",
+ "Kolumbien",
+ "Komoren",
+ "Kongo",
+ "Kroatien",
+ "Kuba",
+ "Kuwait",
+ "Laos",
+ "Lesotho",
+ "Lettland",
+ "Libanon",
+ "Liberia",
+ "Libyen",
+ "Liechtenstein",
+ "Litauen",
+ "Luxemburg",
+ "Macau",
+ "Madagaskar",
+ "Malawi",
+ "Malaysia",
+ "Malediven",
+ "Mali",
+ "Malta",
+ "ehemalige jugoslawische Republik Mazedonien",
+ "Marokko",
+ "Marshallinseln",
+ "Martinique",
+ "Mauretanien",
+ "Mauritius",
+ "Mayotte",
+ "Mexiko",
+ "Mikronesien",
+ "Monaco",
+ "Mongolei",
+ "Montserrat",
+ "Mosambik",
+ "Myanmar",
+ "Nördliche Marianen",
+ "Namibia",
+ "Nauru",
+ "Nepal",
+ "Neukaledonien",
+ "Neuseeland",
+ "Nicaragua",
+ "Niederländische Antillen",
+ "Niederlande",
+ "Niger",
+ "Nigeria",
+ "Niue",
+ "Norfolkinsel",
+ "Norwegen",
+ "Oman",
+ "Osttimor",
+ "Pakistan",
+ "Palau",
+ "Panama",
+ "Papua-Neuguinea",
+ "Paraguay",
+ "Peru",
+ "Philippinen",
+ "Pitcairninseln",
+ "Polen",
+ "Portugal",
+ "Puerto Rico",
+ "Réunion",
+ "Republik Korea",
+ "Republik Moldau",
+ "Ruanda",
+ "Rumänien",
+ "Russische Föderation",
+ "São Tomé und Príncipe",
+ "Südafrika",
+ "Südgeorgien und Südliche Sandwichinseln",
+ "Salomonen",
+ "Sambia",
+ "Samoa",
+ "San Marino",
+ "Saudi-Arabien",
+ "Schweden",
+ "Schweiz",
+ "Senegal",
+ "Seychellen",
+ "Sierra Leone",
+ "Simbabwe",
+ "Singapur",
+ "Slowakei",
+ "Slowenien",
+ "Somalien",
+ "Spanien",
+ "Sri Lanka",
+ "St. Helena",
+ "St. Kitts und Nevis",
+ "St. Lucia",
+ "St. Pierre und Miquelon",
+ "St. Vincent und die Grenadinen",
+ "Sudan",
+ "Surinam",
+ "Svalbard und Jan Mayen",
+ "Swasiland",
+ "Syrien",
+ "Türkei",
+ "Tadschikistan",
+ "Taiwan",
+ "Tansania",
+ "Thailand",
+ "Togo",
+ "Tokelau",
+ "Tonga",
+ "Trinidad und Tobago",
+ "Tschad",
+ "Tschechische Republik",
+ "Tunesien",
+ "Turkmenistan",
+ "Turks- und Caicosinseln",
+ "Tuvalu",
+ "Uganda",
+ "Ukraine",
+ "Ungarn",
+ "Uruguay",
+ "Usbekistan",
+ "Vanuatu",
+ "Vatikanstadt",
+ "Venezuela",
+ "Vereinigte Arabische Emirate",
+ "Vereinigte Staaten",
+ "Vereinigtes Königreich",
+ "Vietnam",
+ "Wallis und Futuna",
+ "Weihnachtsinsel",
+ "Westsahara",
+ "Zentralafrikanische Republik",
+ "Zypern",
+ },
+ "address.street_root": []string{
+ "Ackerweg",
+ "Adalbert-Stifter-Str.",
+ "Adalbertstr.",
+ "Adolf-Baeyer-Str.",
+ "Adolf-Kaschny-Str.",
+ "Adolf-Reichwein-Str.",
+ "Adolfsstr.",
+ "Ahornweg",
+ "Ahrstr.",
+ "Akazienweg",
+ "Albert-Einstein-Str.",
+ "Albert-Schweitzer-Str.",
+ "Albertus-Magnus-Str.",
+ "Albert-Zarthe-Weg",
+ "Albin-Edelmann-Str.",
+ "Albrecht-Haushofer-Str.",
+ "Aldegundisstr.",
+ "Alexanderstr.",
+ "Alfred-Delp-Str.",
+ "Alfred-Kubin-Str.",
+ "Alfred-Stock-Str.",
+ "Alkenrather Str.",
+ "Allensteiner Str.",
+ "Alsenstr.",
+ "Alt Steinbücheler Weg",
+ "Alte Garten",
+ "Alte Heide",
+ "Alte Landstr.",
+ "Alte Ziegelei",
+ "Altenberger Str.",
+ "Altenhof",
+ "Alter Grenzweg",
+ "Altstadtstr.",
+ "Am Alten Gaswerk",
+ "Am Alten Schafstall",
+ "Am Arenzberg",
+ "Am Benthal",
+ "Am Birkenberg",
+ "Am Blauen Berg",
+ "Am Borsberg",
+ "Am Brungen",
+ "Am Büchelter Hof",
+ "Am Buttermarkt",
+ "Am Ehrenfriedhof",
+ "Am Eselsdamm",
+ "Am Falkenberg",
+ "Am Frankenberg",
+ "Am Gesundheitspark",
+ "Am Gierlichshof",
+ "Am Graben",
+ "Am Hagelkreuz",
+ "Am Hang",
+ "Am Heidkamp",
+ "Am Hemmelrather Hof",
+ "Am Hofacker",
+ "Am Hohen Ufer",
+ "Am Höllers Eck",
+ "Am Hühnerberg",
+ "Am Jägerhof",
+ "Am Junkernkamp",
+ "Am Kemperstiegel",
+ "Am Kettnersbusch",
+ "Am Kiesberg",
+ "Am Klösterchen",
+ "Am Knechtsgraben",
+ "Am Köllerweg",
+ "Am Köttersbach",
+ "Am Kreispark",
+ "Am Kronefeld",
+ "Am Küchenhof",
+ "Am Kühnsbusch",
+ "Am Lindenfeld",
+ "Am Märchen",
+ "Am Mittelberg",
+ "Am Mönchshof",
+ "Am Mühlenbach",
+ "Am Neuenhof",
+ "Am Nonnenbruch",
+ "Am Plattenbusch",
+ "Am Quettinger Feld",
+ "Am Rosenhügel",
+ "Am Sandberg",
+ "Am Scherfenbrand",
+ "Am Schokker",
+ "Am Silbersee",
+ "Am Sonnenhang",
+ "Am Sportplatz",
+ "Am Stadtpark",
+ "Am Steinberg",
+ "Am Telegraf",
+ "Am Thelenhof",
+ "Am Vogelkreuz",
+ "Am Vogelsang",
+ "Am Vogelsfeldchen",
+ "Am Wambacher Hof",
+ "Am Wasserturm",
+ "Am Weidenbusch",
+ "Am Weiher",
+ "Am Weingarten",
+ "Am Werth",
+ "Amselweg",
+ "An den Irlen",
+ "An den Rheinauen",
+ "An der Bergerweide",
+ "An der Dingbank",
+ "An der Evangelischen Kirche",
+ "An der Evgl. Kirche",
+ "An der Feldgasse",
+ "An der Fettehenne",
+ "An der Kante",
+ "An der Laach",
+ "An der Lehmkuhle",
+ "An der Lichtenburg",
+ "An der Luisenburg",
+ "An der Robertsburg",
+ "An der Schmitten",
+ "An der Schusterinsel",
+ "An der Steinrütsch",
+ "An St. Andreas",
+ "An St. Remigius",
+ "Andreasstr.",
+ "Ankerweg",
+ "Annette-Kolb-Str.",
+ "Apenrader Str.",
+ "Arnold-Ohletz-Str.",
+ "Atzlenbacher Str.",
+ "Auerweg",
+ "Auestr.",
+ "Auf dem Acker",
+ "Auf dem Blahnenhof",
+ "Auf dem Bohnbüchel",
+ "Auf dem Bruch",
+ "Auf dem End",
+ "Auf dem Forst",
+ "Auf dem Herberg",
+ "Auf dem Lehn",
+ "Auf dem Stein",
+ "Auf dem Weierberg",
+ "Auf dem Weiherhahn",
+ "Auf den Reien",
+ "Auf der Donnen",
+ "Auf der Grieße",
+ "Auf der Ohmer",
+ "Auf der Weide",
+ "Auf'm Berg",
+ "Auf'm Kamp",
+ "Augustastr.",
+ "August-Kekulé-Str.",
+ "A.-W.-v.-Hofmann-Str.",
+ "Bahnallee",
+ "Bahnhofstr.",
+ "Baltrumstr.",
+ "Bamberger Str.",
+ "Baumberger Str.",
+ "Bebelstr.",
+ "Beckers Kämpchen",
+ "Beerenstr.",
+ "Beethovenstr.",
+ "Behringstr.",
+ "Bendenweg",
+ "Bensberger Str.",
+ "Benzstr.",
+ "Bergische Landstr.",
+ "Bergstr.",
+ "Berliner Platz",
+ "Berliner Str.",
+ "Bernhard-Letterhaus-Str.",
+ "Bernhard-Lichtenberg-Str.",
+ "Bernhard-Ridder-Str.",
+ "Bernsteinstr.",
+ "Bertha-Middelhauve-Str.",
+ "Bertha-von-Suttner-Str.",
+ "Bertolt-Brecht-Str.",
+ "Berzeliusstr.",
+ "Bielertstr.",
+ "Biesenbach",
+ "Billrothstr.",
+ "Birkenbergstr.",
+ "Birkengartenstr.",
+ "Birkenweg",
+ "Bismarckstr.",
+ "Bitterfelder Str.",
+ "Blankenburg",
+ "Blaukehlchenweg",
+ "Blütenstr.",
+ "Boberstr.",
+ "Böcklerstr.",
+ "Bodelschwinghstr.",
+ "Bodestr.",
+ "Bogenstr.",
+ "Bohnenkampsweg",
+ "Bohofsweg",
+ "Bonifatiusstr.",
+ "Bonner Str.",
+ "Borkumstr.",
+ "Bornheimer Str.",
+ "Borsigstr.",
+ "Borussiastr.",
+ "Bracknellstr.",
+ "Brahmsweg",
+ "Brandenburger Str.",
+ "Breidenbachstr.",
+ "Breslauer Str.",
+ "Bruchhauser Str.",
+ "Brückenstr.",
+ "Brucknerstr.",
+ "Brüder-Bonhoeffer-Str.",
+ "Buchenweg",
+ "Bürgerbuschweg",
+ "Burgloch",
+ "Burgplatz",
+ "Burgstr.",
+ "Burgweg",
+ "Bürriger Weg",
+ "Burscheider Str.",
+ "Buschkämpchen",
+ "Butterheider Str.",
+ "Carl-Duisberg-Platz",
+ "Carl-Duisberg-Str.",
+ "Carl-Leverkus-Str.",
+ "Carl-Maria-von-Weber-Platz",
+ "Carl-Maria-von-Weber-Str.",
+ "Carlo-Mierendorff-Str.",
+ "Carl-Rumpff-Str.",
+ "Carl-von-Ossietzky-Str.",
+ "Charlottenburger Str.",
+ "Christian-Heß-Str.",
+ "Claasbruch",
+ "Clemens-Winkler-Str.",
+ "Concordiastr.",
+ "Cranachstr.",
+ "Dahlemer Str.",
+ "Daimlerstr.",
+ "Damaschkestr.",
+ "Danziger Str.",
+ "Debengasse",
+ "Dechant-Fein-Str.",
+ "Dechant-Krey-Str.",
+ "Deichtorstr.",
+ "Dhünnberg",
+ "Dhünnstr.",
+ "Dianastr.",
+ "Diedenhofener Str.",
+ "Diepental",
+ "Diepenthaler Str.",
+ "Dieselstr.",
+ "Dillinger Str.",
+ "Distelkamp",
+ "Dohrgasse",
+ "Domblick",
+ "Dönhoffstr.",
+ "Dornierstr.",
+ "Drachenfelsstr.",
+ "Dr.-August-Blank-Str.",
+ "Dresdener Str.",
+ "Driescher Hecke",
+ "Drosselweg",
+ "Dudweilerstr.",
+ "Dünenweg",
+ "Dünfelder Str.",
+ "Dünnwalder Grenzweg",
+ "Düppeler Str.",
+ "Dürerstr.",
+ "Dürscheider Weg",
+ "Düsseldorfer Str.",
+ "Edelrather Weg",
+ "Edmund-Husserl-Str.",
+ "Eduard-Spranger-Str.",
+ "Ehrlichstr.",
+ "Eichenkamp",
+ "Eichenweg",
+ "Eidechsenweg",
+ "Eifelstr.",
+ "Eifgenstr.",
+ "Eintrachtstr.",
+ "Elbestr.",
+ "Elisabeth-Langgässer-Str.",
+ "Elisabethstr.",
+ "Elisabeth-von-Thadden-Str.",
+ "Elisenstr.",
+ "Elsa-Brändström-Str.",
+ "Elsbachstr.",
+ "Else-Lasker-Schüler-Str.",
+ "Elsterstr.",
+ "Emil-Fischer-Str.",
+ "Emil-Nolde-Str.",
+ "Engelbertstr.",
+ "Engstenberger Weg",
+ "Entenpfuhl",
+ "Erbelegasse",
+ "Erftstr.",
+ "Erfurter Str.",
+ "Erich-Heckel-Str.",
+ "Erich-Klausener-Str.",
+ "Erich-Ollenhauer-Str.",
+ "Erlenweg",
+ "Ernst-Bloch-Str.",
+ "Ernst-Ludwig-Kirchner-Str.",
+ "Erzbergerstr.",
+ "Eschenallee",
+ "Eschenweg",
+ "Esmarchstr.",
+ "Espenweg",
+ "Euckenstr.",
+ "Eulengasse",
+ "Eulenkamp",
+ "Ewald-Flamme-Str.",
+ "Ewald-Röll-Str.",
+ "Fährstr.",
+ "Farnweg",
+ "Fasanenweg",
+ "Faßbacher Hof",
+ "Felderstr.",
+ "Feldkampstr.",
+ "Feldsiefer Weg",
+ "Feldsiefer Wiesen",
+ "Feldstr.",
+ "Feldtorstr.",
+ "Felix-von-Roll-Str.",
+ "Ferdinand-Lassalle-Str.",
+ "Fester Weg",
+ "Feuerbachstr.",
+ "Feuerdornweg",
+ "Fichtenweg",
+ "Fichtestr.",
+ "Finkelsteinstr.",
+ "Finkenweg",
+ "Fixheider Str.",
+ "Flabbenhäuschen",
+ "Flensburger Str.",
+ "Fliederweg",
+ "Florastr.",
+ "Florianweg",
+ "Flotowstr.",
+ "Flurstr.",
+ "Föhrenweg",
+ "Fontanestr.",
+ "Forellental",
+ "Fortunastr.",
+ "Franz-Esser-Str.",
+ "Franz-Hitze-Str.",
+ "Franz-Kail-Str.",
+ "Franz-Marc-Str.",
+ "Freiburger Str.",
+ "Freiheitstr.",
+ "Freiherr-vom-Stein-Str.",
+ "Freudenthal",
+ "Freudenthaler Weg",
+ "Fridtjof-Nansen-Str.",
+ "Friedenberger Str.",
+ "Friedensstr.",
+ "Friedhofstr.",
+ "Friedlandstr.",
+ "Friedlieb-Ferdinand-Runge-Str.",
+ "Friedrich-Bayer-Str.",
+ "Friedrich-Bergius-Platz",
+ "Friedrich-Ebert-Platz",
+ "Friedrich-Ebert-Str.",
+ "Friedrich-Engels-Str.",
+ "Friedrich-List-Str.",
+ "Friedrich-Naumann-Str.",
+ "Friedrich-Sertürner-Str.",
+ "Friedrichstr.",
+ "Friedrich-Weskott-Str.",
+ "Friesenweg",
+ "Frischenberg",
+ "Fritz-Erler-Str.",
+ "Fritz-Henseler-Str.",
+ "Fröbelstr.",
+ "Fürstenbergplatz",
+ "Fürstenbergstr.",
+ "Gabriele-Münter-Str.",
+ "Gartenstr.",
+ "Gebhardstr.",
+ "Geibelstr.",
+ "Gellertstr.",
+ "Georg-von-Vollmar-Str.",
+ "Gerhard-Domagk-Str.",
+ "Gerhart-Hauptmann-Str.",
+ "Gerichtsstr.",
+ "Geschwister-Scholl-Str.",
+ "Gezelinallee",
+ "Gierener Weg",
+ "Ginsterweg",
+ "Gisbert-Cremer-Str.",
+ "Glücksburger Str.",
+ "Gluckstr.",
+ "Gneisenaustr.",
+ "Goetheplatz",
+ "Goethestr.",
+ "Golo-Mann-Str.",
+ "Görlitzer Str.",
+ "Görresstr.",
+ "Graebestr.",
+ "Graf-Galen-Platz",
+ "Gregor-Mendel-Str.",
+ "Greifswalder Str.",
+ "Grillenweg",
+ "Gronenborner Weg",
+ "Große Kirchstr.",
+ "Grunder Wiesen",
+ "Grundermühle",
+ "Grundermühlenhof",
+ "Grundermühlenweg",
+ "Grüner Weg",
+ "Grunewaldstr.",
+ "Grünstr.",
+ "Günther-Weisenborn-Str.",
+ "Gustav-Freytag-Str.",
+ "Gustav-Heinemann-Str.",
+ "Gustav-Radbruch-Str.",
+ "Gut Reuschenberg",
+ "Gutenbergstr.",
+ "Haberstr.",
+ "Habichtgasse",
+ "Hafenstr.",
+ "Hagenauer Str.",
+ "Hahnenblecher",
+ "Halenseestr.",
+ "Halfenleimbach",
+ "Hallesche Str.",
+ "Halligstr.",
+ "Hamberger Str.",
+ "Hammerweg",
+ "Händelstr.",
+ "Hannah-Höch-Str.",
+ "Hans-Arp-Str.",
+ "Hans-Gerhard-Str.",
+ "Hans-Sachs-Str.",
+ "Hans-Schlehahn-Str.",
+ "Hans-von-Dohnanyi-Str.",
+ "Hardenbergstr.",
+ "Haselweg",
+ "Hauptstr.",
+ "Haus-Vorster-Str.",
+ "Hauweg",
+ "Havelstr.",
+ "Havensteinstr.",
+ "Haydnstr.",
+ "Hebbelstr.",
+ "Heckenweg",
+ "Heerweg",
+ "Hegelstr.",
+ "Heidberg",
+ "Heidehöhe",
+ "Heidestr.",
+ "Heimstättenweg",
+ "Heinrich-Böll-Str.",
+ "Heinrich-Brüning-Str.",
+ "Heinrich-Claes-Str.",
+ "Heinrich-Heine-Str.",
+ "Heinrich-Hörlein-Str.",
+ "Heinrich-Lübke-Str.",
+ "Heinrich-Lützenkirchen-Weg",
+ "Heinrichstr.",
+ "Heinrich-Strerath-Str.",
+ "Heinrich-von-Kleist-Str.",
+ "Heinrich-von-Stephan-Str.",
+ "Heisterbachstr.",
+ "Helenenstr.",
+ "Helmestr.",
+ "Hemmelrather Weg",
+ "Henry-T.-v.-Böttinger-Str.",
+ "Herderstr.",
+ "Heribertstr.",
+ "Hermann-Ehlers-Str.",
+ "Hermann-Hesse-Str.",
+ "Hermann-König-Str.",
+ "Hermann-Löns-Str.",
+ "Hermann-Milde-Str.",
+ "Hermann-Nörrenberg-Str.",
+ "Hermann-von-Helmholtz-Str.",
+ "Hermann-Waibel-Str.",
+ "Herzogstr.",
+ "Heymannstr.",
+ "Hindenburgstr.",
+ "Hirzenberg",
+ "Hitdorfer Kirchweg",
+ "Hitdorfer Str.",
+ "Höfer Mühle",
+ "Höfer Weg",
+ "Hohe Str.",
+ "Höhenstr.",
+ "Höltgestal",
+ "Holunderweg",
+ "Holzer Weg",
+ "Holzer Wiesen",
+ "Hornpottweg",
+ "Hubertusweg",
+ "Hufelandstr.",
+ "Hufer Weg",
+ "Humboldtstr.",
+ "Hummelsheim",
+ "Hummelweg",
+ "Humperdinckstr.",
+ "Hüscheider Gärten",
+ "Hüscheider Str.",
+ "Hütte",
+ "Ilmstr.",
+ "Im Bergischen Heim",
+ "Im Bruch",
+ "Im Buchenhain",
+ "Im Bühl",
+ "Im Burgfeld",
+ "Im Dorf",
+ "Im Eisholz",
+ "Im Friedenstal",
+ "Im Frohental",
+ "Im Grunde",
+ "Im Hederichsfeld",
+ "Im Jücherfeld",
+ "Im Kalkfeld",
+ "Im Kirberg",
+ "Im Kirchfeld",
+ "Im Kreuzbruch",
+ "Im Mühlenfeld",
+ "Im Nesselrader Kamp",
+ "Im Oberdorf",
+ "Im Oberfeld",
+ "Im Rosengarten",
+ "Im Rottland",
+ "Im Scheffengarten",
+ "Im Staderfeld",
+ "Im Steinfeld",
+ "Im Weidenblech",
+ "Im Winkel",
+ "Im Ziegelfeld",
+ "Imbach",
+ "Imbacher Weg",
+ "Immenweg",
+ "In den Blechenhöfen",
+ "In den Dehlen",
+ "In der Birkenau",
+ "In der Dasladen",
+ "In der Felderhütten",
+ "In der Hartmannswiese",
+ "In der Höhle",
+ "In der Schaafsdellen",
+ "In der Wasserkuhl",
+ "In der Wüste",
+ "In Holzhausen",
+ "Insterstr.",
+ "Jacob-Fröhlen-Str.",
+ "Jägerstr.",
+ "Jahnstr.",
+ "Jakob-Eulenberg-Weg",
+ "Jakobistr.",
+ "Jakob-Kaiser-Str.",
+ "Jenaer Str.",
+ "Johannes-Baptist-Str.",
+ "Johannes-Dott-Str.",
+ "Johannes-Popitz-Str.",
+ "Johannes-Wislicenus-Str.",
+ "Johannisburger Str.",
+ "Johann-Janssen-Str.",
+ "Johann-Wirtz-Weg",
+ "Josefstr.",
+ "Jüch",
+ "Julius-Doms-Str.",
+ "Julius-Leber-Str.",
+ "Kaiserplatz",
+ "Kaiserstr.",
+ "Kaiser-Wilhelm-Allee",
+ "Kalkstr.",
+ "Kämpchenstr.",
+ "Kämpenwiese",
+ "Kämper Weg",
+ "Kamptalweg",
+ "Kanalstr.",
+ "Kandinskystr.",
+ "Kantstr.",
+ "Kapellenstr.",
+ "Karl-Arnold-Str.",
+ "Karl-Bosch-Str.",
+ "Karl-Bückart-Str.",
+ "Karl-Carstens-Ring",
+ "Karl-Friedrich-Goerdeler-Str.",
+ "Karl-Jaspers-Str.",
+ "Karl-König-Str.",
+ "Karl-Krekeler-Str.",
+ "Karl-Marx-Str.",
+ "Karlstr.",
+ "Karl-Ulitzka-Str.",
+ "Karl-Wichmann-Str.",
+ "Karl-Wingchen-Str.",
+ "Käsenbrod",
+ "Käthe-Kollwitz-Str.",
+ "Katzbachstr.",
+ "Kerschensteinerstr.",
+ "Kiefernweg",
+ "Kieler Str.",
+ "Kieselstr.",
+ "Kiesweg",
+ "Kinderhausen",
+ "Kleiberweg",
+ "Kleine Kirchstr.",
+ "Kleingansweg",
+ "Kleinheider Weg",
+ "Klief",
+ "Kneippstr.",
+ "Knochenbergsweg",
+ "Kochergarten",
+ "Kocherstr.",
+ "Kockelsberg",
+ "Kolberger Str.",
+ "Kolmarer Str.",
+ "Kölner Gasse",
+ "Kölner Str.",
+ "Kolpingstr.",
+ "Königsberger Platz",
+ "Konrad-Adenauer-Platz",
+ "Köpenicker Str.",
+ "Kopernikusstr.",
+ "Körnerstr.",
+ "Köschenberg",
+ "Köttershof",
+ "Kreuzbroicher Str.",
+ "Kreuzkamp",
+ "Krummer Weg",
+ "Kruppstr.",
+ "Kuhlmannweg",
+ "Kump",
+ "Kumper Weg",
+ "Kunstfeldstr.",
+ "Küppersteger Str.",
+ "Kursiefen",
+ "Kursiefer Weg",
+ "Kurtekottenweg",
+ "Kurt-Schumacher-Ring",
+ "Kyllstr.",
+ "Langenfelder Str.",
+ "Längsleimbach",
+ "Lärchenweg",
+ "Legienstr.",
+ "Lehner Mühle",
+ "Leichlinger Str.",
+ "Leimbacher Hof",
+ "Leinestr.",
+ "Leineweberstr.",
+ "Leipziger Str.",
+ "Lerchengasse",
+ "Lessingstr.",
+ "Libellenweg",
+ "Lichstr.",
+ "Liebigstr.",
+ "Lindenstr.",
+ "Lingenfeld",
+ "Linienstr.",
+ "Lippe",
+ "Löchergraben",
+ "Löfflerstr.",
+ "Loheweg",
+ "Lohrbergstr.",
+ "Lohrstr.",
+ "Löhstr.",
+ "Lortzingstr.",
+ "Lötzener Str.",
+ "Löwenburgstr.",
+ "Lucasstr.",
+ "Ludwig-Erhard-Platz",
+ "Ludwig-Girtler-Str.",
+ "Ludwig-Knorr-Str.",
+ "Luisenstr.",
+ "Lupinenweg",
+ "Lurchenweg",
+ "Lützenkirchener Str.",
+ "Lycker Str.",
+ "Maashofstr.",
+ "Manforter Str.",
+ "Marc-Chagall-Str.",
+ "Maria-Dresen-Str.",
+ "Maria-Terwiel-Str.",
+ "Marie-Curie-Str.",
+ "Marienburger Str.",
+ "Mariendorfer Str.",
+ "Marienwerderstr.",
+ "Marie-Schlei-Str.",
+ "Marktplatz",
+ "Markusweg",
+ "Martin-Buber-Str.",
+ "Martin-Heidegger-Str.",
+ "Martin-Luther-Str.",
+ "Masurenstr.",
+ "Mathildenweg",
+ "Maurinusstr.",
+ "Mauspfad",
+ "Max-Beckmann-Str.",
+ "Max-Delbrück-Str.",
+ "Max-Ernst-Str.",
+ "Max-Holthausen-Platz",
+ "Max-Horkheimer-Str.",
+ "Max-Liebermann-Str.",
+ "Max-Pechstein-Str.",
+ "Max-Planck-Str.",
+ "Max-Scheler-Str.",
+ "Max-Schönenberg-Str.",
+ "Maybachstr.",
+ "Meckhofer Feld",
+ "Meisenweg",
+ "Memelstr.",
+ "Menchendahler Str.",
+ "Mendelssohnstr.",
+ "Merziger Str.",
+ "Mettlacher Str.",
+ "Metzer Str.",
+ "Michaelsweg",
+ "Miselohestr.",
+ "Mittelstr.",
+ "Mohlenstr.",
+ "Moltkestr.",
+ "Monheimer Str.",
+ "Montanusstr.",
+ "Montessoriweg",
+ "Moosweg",
+ "Morsbroicher Str.",
+ "Moselstr.",
+ "Moskauer Str.",
+ "Mozartstr.",
+ "Mühlenweg",
+ "Muhrgasse",
+ "Muldestr.",
+ "Mülhausener Str.",
+ "Mülheimer Str.",
+ "Münsters Gäßchen",
+ "Münzstr.",
+ "Müritzstr.",
+ "Myliusstr.",
+ "Nachtigallenweg",
+ "Nauener Str.",
+ "Neißestr.",
+ "Nelly-Sachs-Str.",
+ "Netzestr.",
+ "Neuendriesch",
+ "Neuenhausgasse",
+ "Neuenkamp",
+ "Neujudenhof",
+ "Neukronenberger Str.",
+ "Neustadtstr.",
+ "Nicolai-Hartmann-Str.",
+ "Niederblecher",
+ "Niederfeldstr.",
+ "Nietzschestr.",
+ "Nikolaus-Groß-Str.",
+ "Nobelstr.",
+ "Norderneystr.",
+ "Nordstr.",
+ "Ober dem Hof",
+ "Obere Lindenstr.",
+ "Obere Str.",
+ "Oberölbach",
+ "Odenthaler Str.",
+ "Oderstr.",
+ "Okerstr.",
+ "Olof-Palme-Str.",
+ "Ophovener Str.",
+ "Opladener Platz",
+ "Opladener Str.",
+ "Ortelsburger Str.",
+ "Oskar-Moll-Str.",
+ "Oskar-Schlemmer-Str.",
+ "Oststr.",
+ "Oswald-Spengler-Str.",
+ "Otto-Dix-Str.",
+ "Otto-Grimm-Str.",
+ "Otto-Hahn-Str.",
+ "Otto-Müller-Str.",
+ "Otto-Stange-Str.",
+ "Ottostr.",
+ "Otto-Varnhagen-Str.",
+ "Otto-Wels-Str.",
+ "Ottweilerstr.",
+ "Oulustr.",
+ "Overfeldweg",
+ "Pappelweg",
+ "Paracelsusstr.",
+ "Parkstr.",
+ "Pastor-Louis-Str.",
+ "Pastor-Scheibler-Str.",
+ "Pastorskamp",
+ "Paul-Klee-Str.",
+ "Paul-Löbe-Str.",
+ "Paulstr.",
+ "Peenestr.",
+ "Pescher Busch",
+ "Peschstr.",
+ "Pestalozzistr.",
+ "Peter-Grieß-Str.",
+ "Peter-Joseph-Lenné-Str.",
+ "Peter-Neuenheuser-Str.",
+ "Petersbergstr.",
+ "Peterstr.",
+ "Pfarrer-Jekel-Str.",
+ "Pfarrer-Klein-Str.",
+ "Pfarrer-Röhr-Str.",
+ "Pfeilshofstr.",
+ "Philipp-Ott-Str.",
+ "Piet-Mondrian-Str.",
+ "Platanenweg",
+ "Pommernstr.",
+ "Porschestr.",
+ "Poststr.",
+ "Potsdamer Str.",
+ "Pregelstr.",
+ "Prießnitzstr.",
+ "Pützdelle",
+ "Quarzstr.",
+ "Quettinger Str.",
+ "Rat-Deycks-Str.",
+ "Rathenaustr.",
+ "Ratherkämp",
+ "Ratiborer Str.",
+ "Raushofstr.",
+ "Regensburger Str.",
+ "Reinickendorfer Str.",
+ "Renkgasse",
+ "Rennbaumplatz",
+ "Rennbaumstr.",
+ "Reuschenberger Str.",
+ "Reusrather Str.",
+ "Reuterstr.",
+ "Rheinallee",
+ "Rheindorfer Str.",
+ "Rheinstr.",
+ "Rhein-Wupper-Platz",
+ "Richard-Wagner-Str.",
+ "Rilkestr.",
+ "Ringstr.",
+ "Robert-Blum-Str.",
+ "Robert-Koch-Str.",
+ "Robert-Medenwald-Str.",
+ "Rolandstr.",
+ "Romberg",
+ "Röntgenstr.",
+ "Roonstr.",
+ "Ropenstall",
+ "Ropenstaller Weg",
+ "Rosenthal",
+ "Rostocker Str.",
+ "Rotdornweg",
+ "Röttgerweg",
+ "Rückertstr.",
+ "Rudolf-Breitscheid-Str.",
+ "Rudolf-Mann-Platz",
+ "Rudolf-Stracke-Str.",
+ "Ruhlachplatz",
+ "Ruhlachstr.",
+ "Rüttersweg",
+ "Saalestr.",
+ "Saarbrücker Str.",
+ "Saarlauterner Str.",
+ "Saarstr.",
+ "Salamanderweg",
+ "Samlandstr.",
+ "Sanddornstr.",
+ "Sandstr.",
+ "Sauerbruchstr.",
+ "Schäfershütte",
+ "Scharnhorststr.",
+ "Scheffershof",
+ "Scheidemannstr.",
+ "Schellingstr.",
+ "Schenkendorfstr.",
+ "Schießbergstr.",
+ "Schillerstr.",
+ "Schlangenhecke",
+ "Schlebuscher Heide",
+ "Schlebuscher Str.",
+ "Schlebuschrath",
+ "Schlehdornstr.",
+ "Schleiermacherstr.",
+ "Schloßstr.",
+ "Schmalenbruch",
+ "Schnepfenflucht",
+ "Schöffenweg",
+ "Schöllerstr.",
+ "Schöne Aussicht",
+ "Schöneberger Str.",
+ "Schopenhauerstr.",
+ "Schubertplatz",
+ "Schubertstr.",
+ "Schulberg",
+ "Schulstr.",
+ "Schumannstr.",
+ "Schwalbenweg",
+ "Schwarzastr.",
+ "Sebastianusweg",
+ "Semmelweisstr.",
+ "Siebelplatz",
+ "Siemensstr.",
+ "Solinger Str.",
+ "Sonderburger Str.",
+ "Spandauer Str.",
+ "Speestr.",
+ "Sperberweg",
+ "Sperlingsweg",
+ "Spitzwegstr.",
+ "Sporrenberger Mühle",
+ "Spreestr.",
+ "St. Ingberter Str.",
+ "Starenweg",
+ "Stauffenbergstr.",
+ "Stefan-Zweig-Str.",
+ "Stegerwaldstr.",
+ "Steglitzer Str.",
+ "Steinbücheler Feld",
+ "Steinbücheler Str.",
+ "Steinstr.",
+ "Steinweg",
+ "Stephan-Lochner-Str.",
+ "Stephanusstr.",
+ "Stettiner Str.",
+ "Stixchesstr.",
+ "Stöckenstr.",
+ "Stralsunder Str.",
+ "Straßburger Str.",
+ "Stresemannplatz",
+ "Strombergstr.",
+ "Stromstr.",
+ "Stüttekofener Str.",
+ "Sudestr.",
+ "Sürderstr.",
+ "Syltstr.",
+ "Talstr.",
+ "Tannenbergstr.",
+ "Tannenweg",
+ "Taubenweg",
+ "Teitscheider Weg",
+ "Telegrafenstr.",
+ "Teltower Str.",
+ "Tempelhofer Str.",
+ "Theodor-Adorno-Str.",
+ "Theodor-Fliedner-Str.",
+ "Theodor-Gierath-Str.",
+ "Theodor-Haubach-Str.",
+ "Theodor-Heuss-Ring",
+ "Theodor-Storm-Str.",
+ "Theodorstr.",
+ "Thomas-Dehler-Str.",
+ "Thomas-Morus-Str.",
+ "Thomas-von-Aquin-Str.",
+ "Tönges Feld",
+ "Torstr.",
+ "Treptower Str.",
+ "Treuburger Str.",
+ "Uhlandstr.",
+ "Ulmenweg",
+ "Ulmer Str.",
+ "Ulrichstr.",
+ "Ulrich-von-Hassell-Str.",
+ "Umlag",
+ "Unstrutstr.",
+ "Unter dem Schildchen",
+ "Unterölbach",
+ "Unterstr.",
+ "Uppersberg",
+ "Van\\'t-Hoff-Str.",
+ "Veit-Stoß-Str.",
+ "Vereinsstr.",
+ "Viktor-Meyer-Str.",
+ "Vincent-van-Gogh-Str.",
+ "Virchowstr.",
+ "Voigtslach",
+ "Volhardstr.",
+ "Völklinger Str.",
+ "Von-Brentano-Str.",
+ "Von-Diergardt-Str.",
+ "Von-Eichendorff-Str.",
+ "Von-Ketteler-Str.",
+ "Von-Knoeringen-Str.",
+ "Von-Pettenkofer-Str.",
+ "Von-Siebold-Str.",
+ "Wacholderweg",
+ "Waldstr.",
+ "Walter-Flex-Str.",
+ "Walter-Hempel-Str.",
+ "Walter-Hochapfel-Str.",
+ "Walter-Nernst-Str.",
+ "Wannseestr.",
+ "Warnowstr.",
+ "Warthestr.",
+ "Weddigenstr.",
+ "Weichselstr.",
+ "Weidenstr.",
+ "Weidfeldstr.",
+ "Weiherfeld",
+ "Weiherstr.",
+ "Weinhäuser Str.",
+ "Weißdornweg",
+ "Weißenseestr.",
+ "Weizkamp",
+ "Werftstr.",
+ "Werkstättenstr.",
+ "Werner-Heisenberg-Str.",
+ "Werrastr.",
+ "Weyerweg",
+ "Widdauener Str.",
+ "Wiebertshof",
+ "Wiehbachtal",
+ "Wiembachallee",
+ "Wiesdorfer Platz",
+ "Wiesenstr.",
+ "Wilhelm-Busch-Str.",
+ "Wilhelm-Hastrich-Str.",
+ "Wilhelm-Leuschner-Str.",
+ "Wilhelm-Liebknecht-Str.",
+ "Wilhelmsgasse",
+ "Wilhelmstr.",
+ "Willi-Baumeister-Str.",
+ "Willy-Brandt-Ring",
+ "Winand-Rossi-Str.",
+ "Windthorststr.",
+ "Winkelweg",
+ "Winterberg",
+ "Wittenbergstr.",
+ "Wolf-Vostell-Str.",
+ "Wolkenburgstr.",
+ "Wupperstr.",
+ "Wuppertalstr.",
+ "Wüstenhof",
+ "Yitzhak-Rabin-Str.",
+ "Zauberkuhle",
+ "Zedernweg",
+ "Zehlendorfer Str.",
+ "Zehntenweg",
+ "Zeisigweg",
+ "Zeppelinstr.",
+ "Zschopaustr.",
+ "Zum Claashäuschen",
+ "Zündhütchenweg",
+ "Zur Alten Brauerei",
+ "Zur alten Fabrik",
+ },
+ "cell_phone.formats": []string{
+ "+49-1##-#######",
+ "+49-1###-########",
+ },
+ "name.first_name": []string{
+ "Aaron",
+ "Abdul",
+ "Abdullah",
+ "Adam",
+ "Adrian",
+ "Adriano",
+ "Ahmad",
+ "Ahmed",
+ "Ahmet",
+ "Alan",
+ "Albert",
+ "Alessandro",
+ "Alessio",
+ "Alex",
+ "Alexander",
+ "Alfred",
+ "Ali",
+ "Amar",
+ "Amir",
+ "Amon",
+ "Andre",
+ "Andreas",
+ "Andrew",
+ "Angelo",
+ "Ansgar",
+ "Anthony",
+ "Anton",
+ "Antonio",
+ "Arda",
+ "Arian",
+ "Armin",
+ "Arne",
+ "Arno",
+ "Arthur",
+ "Artur",
+ "Arved",
+ "Arvid",
+ "Ayman",
+ "Baran",
+ "Baris",
+ "Bastian",
+ "Batuhan",
+ "Bela",
+ "Ben",
+ "Benedikt",
+ "Benjamin",
+ "Bennet",
+ "Bennett",
+ "Benno",
+ "Bent",
+ "Berat",
+ "Berkay",
+ "Bernd",
+ "Bilal",
+ "Bjarne",
+ "Björn",
+ "Bo",
+ "Boris",
+ "Brandon",
+ "Brian",
+ "Bruno",
+ "Bryan",
+ "Burak",
+ "Calvin",
+ "Can",
+ "Carl",
+ "Carlo",
+ "Carlos",
+ "Caspar",
+ "Cedric",
+ "Cedrik",
+ "Cem",
+ "Charlie",
+ "Chris",
+ "Christian",
+ "Christiano",
+ "Christoph",
+ "Christopher",
+ "Claas",
+ "Clemens",
+ "Colin",
+ "Collin",
+ "Conner",
+ "Connor",
+ "Constantin",
+ "Corvin",
+ "Curt",
+ "Damian",
+ "Damien",
+ "Daniel",
+ "Danilo",
+ "Danny",
+ "Darian",
+ "Dario",
+ "Darius",
+ "Darren",
+ "David",
+ "Davide",
+ "Davin",
+ "Dean",
+ "Deniz",
+ "Dennis",
+ "Denny",
+ "Devin",
+ "Diego",
+ "Dion",
+ "Domenic",
+ "Domenik",
+ "Dominic",
+ "Dominik",
+ "Dorian",
+ "Dustin",
+ "Dylan",
+ "Ecrin",
+ "Eddi",
+ "Eddy",
+ "Edgar",
+ "Edwin",
+ "Efe",
+ "Ege",
+ "Elia",
+ "Eliah",
+ "Elias",
+ "Elijah",
+ "Emanuel",
+ "Emil",
+ "Emilian",
+ "Emilio",
+ "Emir",
+ "Emirhan",
+ "Emre",
+ "Enes",
+ "Enno",
+ "Enrico",
+ "Eren",
+ "Eric",
+ "Erik",
+ "Etienne",
+ "Fabian",
+ "Fabien",
+ "Fabio",
+ "Fabrice",
+ "Falk",
+ "Felix",
+ "Ferdinand",
+ "Fiete",
+ "Filip",
+ "Finlay",
+ "Finley",
+ "Finn",
+ "Finnley",
+ "Florian",
+ "Francesco",
+ "Franz",
+ "Frederic",
+ "Frederick",
+ "Frederik",
+ "Friedrich",
+ "Fritz",
+ "Furkan",
+ "Fynn",
+ "Gabriel",
+ "Georg",
+ "Gerrit",
+ "Gian",
+ "Gianluca",
+ "Gino",
+ "Giuliano",
+ "Giuseppe",
+ "Gregor",
+ "Gustav",
+ "Hagen",
+ "Hamza",
+ "Hannes",
+ "Hanno",
+ "Hans",
+ "Hasan",
+ "Hassan",
+ "Hauke",
+ "Hendrik",
+ "Hennes",
+ "Henning",
+ "Henri",
+ "Henrick",
+ "Henrik",
+ "Henry",
+ "Hugo",
+ "Hussein",
+ "Ian",
+ "Ibrahim",
+ "Ilias",
+ "Ilja",
+ "Ilyas",
+ "Immanuel",
+ "Ismael",
+ "Ismail",
+ "Ivan",
+ "Iven",
+ "Jack",
+ "Jacob",
+ "Jaden",
+ "Jakob",
+ "Jamal",
+ "James",
+ "Jamie",
+ "Jan",
+ "Janek",
+ "Janis",
+ "Janne",
+ "Jannek",
+ "Jannes",
+ "Jannik",
+ "Jannis",
+ "Jano",
+ "Janosch",
+ "Jared",
+ "Jari",
+ "Jarne",
+ "Jarno",
+ "Jaron",
+ "Jason",
+ "Jasper",
+ "Jay",
+ "Jayden",
+ "Jayson",
+ "Jean",
+ "Jens",
+ "Jeremias",
+ "Jeremie",
+ "Jeremy",
+ "Jermaine",
+ "Jerome",
+ "Jesper",
+ "Jesse",
+ "Jim",
+ "Jimmy",
+ "Joe",
+ "Joel",
+ "Joey",
+ "Johann",
+ "Johannes",
+ "John",
+ "Johnny",
+ "Jon",
+ "Jona",
+ "Jonah",
+ "Jonas",
+ "Jonathan",
+ "Jonte",
+ "Joost",
+ "Jordan",
+ "Joris",
+ "Joscha",
+ "Joschua",
+ "Josef",
+ "Joseph",
+ "Josh",
+ "Joshua",
+ "Josua",
+ "Juan",
+ "Julian",
+ "Julien",
+ "Julius",
+ "Juri",
+ "Justin",
+ "Justus",
+ "Kaan",
+ "Kai",
+ "Kalle",
+ "Karim",
+ "Karl",
+ "Karlo",
+ "Kay",
+ "Keanu",
+ "Kenan",
+ "Kenny",
+ "Keno",
+ "Kerem",
+ "Kerim",
+ "Kevin",
+ "Kian",
+ "Kilian",
+ "Kim",
+ "Kimi",
+ "Kjell",
+ "Klaas",
+ "Klemens",
+ "Konrad",
+ "Konstantin",
+ "Koray",
+ "Korbinian",
+ "Kurt",
+ "Lars",
+ "Lasse",
+ "Laurence",
+ "Laurens",
+ "Laurenz",
+ "Laurin",
+ "Lean",
+ "Leander",
+ "Leandro",
+ "Leif",
+ "Len",
+ "Lenn",
+ "Lennard",
+ "Lennart",
+ "Lennert",
+ "Lennie",
+ "Lennox",
+ "Lenny",
+ "Leo",
+ "Leon",
+ "Leonard",
+ "Leonardo",
+ "Leonhard",
+ "Leonidas",
+ "Leopold",
+ "Leroy",
+ "Levent",
+ "Levi",
+ "Levin",
+ "Lewin",
+ "Lewis",
+ "Liam",
+ "Lian",
+ "Lias",
+ "Lino",
+ "Linus",
+ "Lio",
+ "Lion",
+ "Lionel",
+ "Logan",
+ "Lorenz",
+ "Lorenzo",
+ "Loris",
+ "Louis",
+ "Luan",
+ "Luc",
+ "Luca",
+ "Lucas",
+ "Lucian",
+ "Lucien",
+ "Ludwig",
+ "Luis",
+ "Luiz",
+ "Luk",
+ "Luka",
+ "Lukas",
+ "Luke",
+ "Lutz",
+ "Maddox",
+ "Mads",
+ "Magnus",
+ "Maik",
+ "Maksim",
+ "Malik",
+ "Malte",
+ "Manuel",
+ "Marc",
+ "Marcel",
+ "Marco",
+ "Marcus",
+ "Marek",
+ "Marian",
+ "Mario",
+ "Marius",
+ "Mark",
+ "Marko",
+ "Markus",
+ "Marlo",
+ "Marlon",
+ "Marten",
+ "Martin",
+ "Marvin",
+ "Marwin",
+ "Mateo",
+ "Mathis",
+ "Matis",
+ "Mats",
+ "Matteo",
+ "Mattes",
+ "Matthias",
+ "Matthis",
+ "Matti",
+ "Mattis",
+ "Maurice",
+ "Max",
+ "Maxim",
+ "Maximilian",
+ "Mehmet",
+ "Meik",
+ "Melvin",
+ "Merlin",
+ "Mert",
+ "Michael",
+ "Michel",
+ "Mick",
+ "Miguel",
+ "Mika",
+ "Mikail",
+ "Mike",
+ "Milan",
+ "Milo",
+ "Mio",
+ "Mirac",
+ "Mirco",
+ "Mirko",
+ "Mohamed",
+ "Mohammad",
+ "Mohammed",
+ "Moritz",
+ "Morten",
+ "Muhammed",
+ "Murat",
+ "Mustafa",
+ "Nathan",
+ "Nathanael",
+ "Nelson",
+ "Neo",
+ "Nevio",
+ "Nick",
+ "Niclas",
+ "Nico",
+ "Nicolai",
+ "Nicolas",
+ "Niels",
+ "Nikita",
+ "Niklas",
+ "Niko",
+ "Nikolai",
+ "Nikolas",
+ "Nils",
+ "Nino",
+ "Noah",
+ "Noel",
+ "Norman",
+ "Odin",
+ "Oke",
+ "Ole",
+ "Oliver",
+ "Omar",
+ "Onur",
+ "Oscar",
+ "Oskar",
+ "Pascal",
+ "Patrice",
+ "Patrick",
+ "Paul",
+ "Peer",
+ "Pepe",
+ "Peter",
+ "Phil",
+ "Philip",
+ "Philipp",
+ "Pierre",
+ "Piet",
+ "Pit",
+ "Pius",
+ "Quentin",
+ "Quirin",
+ "Rafael",
+ "Raik",
+ "Ramon",
+ "Raphael",
+ "Rasmus",
+ "Raul",
+ "Rayan",
+ "René",
+ "Ricardo",
+ "Riccardo",
+ "Richard",
+ "Rick",
+ "Rico",
+ "Robert",
+ "Robin",
+ "Rocco",
+ "Roman",
+ "Romeo",
+ "Ron",
+ "Ruben",
+ "Ryan",
+ "Said",
+ "Salih",
+ "Sam",
+ "Sami",
+ "Sammy",
+ "Samuel",
+ "Sandro",
+ "Santino",
+ "Sascha",
+ "Sean",
+ "Sebastian",
+ "Selim",
+ "Semih",
+ "Shawn",
+ "Silas",
+ "Simeon",
+ "Simon",
+ "Sinan",
+ "Sky",
+ "Stefan",
+ "Steffen",
+ "Stephan",
+ "Steve",
+ "Steven",
+ "Sven",
+ "Sönke",
+ "Sören",
+ "Taha",
+ "Tamino",
+ "Tammo",
+ "Tarik",
+ "Tayler",
+ "Taylor",
+ "Teo",
+ "Theo",
+ "Theodor",
+ "Thies",
+ "Thilo",
+ "Thomas",
+ "Thorben",
+ "Thore",
+ "Thorge",
+ "Tiago",
+ "Til",
+ "Till",
+ "Tillmann",
+ "Tim",
+ "Timm",
+ "Timo",
+ "Timon",
+ "Timothy",
+ "Tino",
+ "Titus",
+ "Tizian",
+ "Tjark",
+ "Tobias",
+ "Tom",
+ "Tommy",
+ "Toni",
+ "Tony",
+ "Torben",
+ "Tore",
+ "Tristan",
+ "Tyler",
+ "Tyron",
+ "Umut",
+ "Valentin",
+ "Valentino",
+ "Veit",
+ "Victor",
+ "Viktor",
+ "Vin",
+ "Vincent",
+ "Vito",
+ "Vitus",
+ "Wilhelm",
+ "Willi",
+ "William",
+ "Willy",
+ "Xaver",
+ "Yannic",
+ "Yannick",
+ "Yannik",
+ "Yannis",
+ "Yasin",
+ "Youssef",
+ "Yunus",
+ "Yusuf",
+ "Yven",
+ "Yves",
+ "Ömer",
+ "Aaliyah",
+ "Abby",
+ "Abigail",
+ "Ada",
+ "Adelina",
+ "Adriana",
+ "Aileen",
+ "Aimee",
+ "Alana",
+ "Alea",
+ "Alena",
+ "Alessa",
+ "Alessia",
+ "Alexa",
+ "Alexandra",
+ "Alexia",
+ "Alexis",
+ "Aleyna",
+ "Alia",
+ "Alica",
+ "Alice",
+ "Alicia",
+ "Alina",
+ "Alisa",
+ "Alisha",
+ "Alissa",
+ "Aliya",
+ "Aliyah",
+ "Allegra",
+ "Alma",
+ "Alyssa",
+ "Amalia",
+ "Amanda",
+ "Amelia",
+ "Amelie",
+ "Amina",
+ "Amira",
+ "Amy",
+ "Ana",
+ "Anabel",
+ "Anastasia",
+ "Andrea",
+ "Angela",
+ "Angelina",
+ "Angelique",
+ "Anja",
+ "Ann",
+ "Anna",
+ "Annabel",
+ "Annabell",
+ "Annabelle",
+ "Annalena",
+ "Anne",
+ "Anneke",
+ "Annelie",
+ "Annemarie",
+ "Anni",
+ "Annie",
+ "Annika",
+ "Anny",
+ "Anouk",
+ "Antonia",
+ "Arda",
+ "Ariana",
+ "Ariane",
+ "Arwen",
+ "Ashley",
+ "Asya",
+ "Aurelia",
+ "Aurora",
+ "Ava",
+ "Ayleen",
+ "Aylin",
+ "Ayse",
+ "Azra",
+ "Betty",
+ "Bianca",
+ "Bianka",
+ "Caitlin",
+ "Cara",
+ "Carina",
+ "Carla",
+ "Carlotta",
+ "Carmen",
+ "Carolin",
+ "Carolina",
+ "Caroline",
+ "Cassandra",
+ "Catharina",
+ "Catrin",
+ "Cecile",
+ "Cecilia",
+ "Celia",
+ "Celina",
+ "Celine",
+ "Ceyda",
+ "Ceylin",
+ "Chantal",
+ "Charleen",
+ "Charlotta",
+ "Charlotte",
+ "Chayenne",
+ "Cheyenne",
+ "Chiara",
+ "Christin",
+ "Christina",
+ "Cindy",
+ "Claire",
+ "Clara",
+ "Clarissa",
+ "Colleen",
+ "Collien",
+ "Cora",
+ "Corinna",
+ "Cosima",
+ "Dana",
+ "Daniela",
+ "Daria",
+ "Darleen",
+ "Defne",
+ "Delia",
+ "Denise",
+ "Diana",
+ "Dilara",
+ "Dina",
+ "Dorothea",
+ "Ecrin",
+ "Eda",
+ "Eileen",
+ "Ela",
+ "Elaine",
+ "Elanur",
+ "Elea",
+ "Elena",
+ "Eleni",
+ "Eleonora",
+ "Eliana",
+ "Elif",
+ "Elina",
+ "Elisa",
+ "Elisabeth",
+ "Ella",
+ "Ellen",
+ "Elli",
+ "Elly",
+ "Elsa",
+ "Emelie",
+ "Emely",
+ "Emilia",
+ "Emilie",
+ "Emily",
+ "Emma",
+ "Emmely",
+ "Emmi",
+ "Emmy",
+ "Enie",
+ "Enna",
+ "Enya",
+ "Esma",
+ "Estelle",
+ "Esther",
+ "Eva",
+ "Evelin",
+ "Evelina",
+ "Eveline",
+ "Evelyn",
+ "Fabienne",
+ "Fatima",
+ "Fatma",
+ "Felicia",
+ "Felicitas",
+ "Felina",
+ "Femke",
+ "Fenja",
+ "Fine",
+ "Finia",
+ "Finja",
+ "Finnja",
+ "Fiona",
+ "Flora",
+ "Florentine",
+ "Francesca",
+ "Franka",
+ "Franziska",
+ "Frederike",
+ "Freya",
+ "Frida",
+ "Frieda",
+ "Friederike",
+ "Giada",
+ "Gina",
+ "Giulia",
+ "Giuliana",
+ "Greta",
+ "Hailey",
+ "Hana",
+ "Hanna",
+ "Hannah",
+ "Heidi",
+ "Helen",
+ "Helena",
+ "Helene",
+ "Helin",
+ "Henriette",
+ "Henrike",
+ "Hermine",
+ "Ida",
+ "Ilayda",
+ "Imke",
+ "Ina",
+ "Ines",
+ "Inga",
+ "Inka",
+ "Irem",
+ "Isa",
+ "Isabel",
+ "Isabell",
+ "Isabella",
+ "Isabelle",
+ "Ivonne",
+ "Jacqueline",
+ "Jamie",
+ "Jamila",
+ "Jana",
+ "Jane",
+ "Janin",
+ "Janina",
+ "Janine",
+ "Janna",
+ "Janne",
+ "Jara",
+ "Jasmin",
+ "Jasmina",
+ "Jasmine",
+ "Jella",
+ "Jenna",
+ "Jennifer",
+ "Jenny",
+ "Jessica",
+ "Jessy",
+ "Jette",
+ "Jil",
+ "Jill",
+ "Joana",
+ "Joanna",
+ "Joelina",
+ "Joeline",
+ "Joelle",
+ "Johanna",
+ "Joleen",
+ "Jolie",
+ "Jolien",
+ "Jolin",
+ "Jolina",
+ "Joline",
+ "Jona",
+ "Jonah",
+ "Jonna",
+ "Josefin",
+ "Josefine",
+ "Josephin",
+ "Josephine",
+ "Josie",
+ "Josy",
+ "Joy",
+ "Joyce",
+ "Judith",
+ "Judy",
+ "Jule",
+ "Julia",
+ "Juliana",
+ "Juliane",
+ "Julie",
+ "Julienne",
+ "Julika",
+ "Julina",
+ "Juna",
+ "Justine",
+ "Kaja",
+ "Karina",
+ "Karla",
+ "Karlotta",
+ "Karolina",
+ "Karoline",
+ "Kassandra",
+ "Katarina",
+ "Katharina",
+ "Kathrin",
+ "Katja",
+ "Katrin",
+ "Kaya",
+ "Kayra",
+ "Kiana",
+ "Kiara",
+ "Kim",
+ "Kimberley",
+ "Kimberly",
+ "Kira",
+ "Klara",
+ "Korinna",
+ "Kristin",
+ "Kyra",
+ "Laila",
+ "Lana",
+ "Lara",
+ "Larissa",
+ "Laura",
+ "Laureen",
+ "Lavinia",
+ "Lea",
+ "Leah",
+ "Leana",
+ "Leandra",
+ "Leann",
+ "Lee",
+ "Leila",
+ "Lena",
+ "Lene",
+ "Leni",
+ "Lenia",
+ "Lenja",
+ "Lenya",
+ "Leona",
+ "Leoni",
+ "Leonie",
+ "Leonora",
+ "Leticia",
+ "Letizia",
+ "Levke",
+ "Leyla",
+ "Lia",
+ "Liah",
+ "Liana",
+ "Lili",
+ "Lilia",
+ "Lilian",
+ "Liliana",
+ "Lilith",
+ "Lilli",
+ "Lillian",
+ "Lilly",
+ "Lily",
+ "Lina",
+ "Linda",
+ "Lindsay",
+ "Line",
+ "Linn",
+ "Linnea",
+ "Lisa",
+ "Lisann",
+ "Lisanne",
+ "Liv",
+ "Livia",
+ "Liz",
+ "Lola",
+ "Loreen",
+ "Lorena",
+ "Lotta",
+ "Lotte",
+ "Louisa",
+ "Louise",
+ "Luana",
+ "Luca",
+ "Lucia",
+ "Lucie",
+ "Lucienne",
+ "Lucy",
+ "Luisa",
+ "Luise",
+ "Luka",
+ "Luna",
+ "Luzie",
+ "Lya",
+ "Lydia",
+ "Lyn",
+ "Lynn",
+ "Madeleine",
+ "Madita",
+ "Madleen",
+ "Madlen",
+ "Magdalena",
+ "Maike",
+ "Mailin",
+ "Maira",
+ "Maja",
+ "Malena",
+ "Malia",
+ "Malin",
+ "Malina",
+ "Mandy",
+ "Mara",
+ "Marah",
+ "Mareike",
+ "Maren",
+ "Maria",
+ "Mariam",
+ "Marie",
+ "Marieke",
+ "Mariella",
+ "Marika",
+ "Marina",
+ "Marisa",
+ "Marissa",
+ "Marit",
+ "Marla",
+ "Marleen",
+ "Marlen",
+ "Marlena",
+ "Marlene",
+ "Marta",
+ "Martha",
+ "Mary",
+ "Maryam",
+ "Mathilda",
+ "Mathilde",
+ "Matilda",
+ "Maxi",
+ "Maxima",
+ "Maxine",
+ "Maya",
+ "Mayra",
+ "Medina",
+ "Medine",
+ "Meike",
+ "Melanie",
+ "Melek",
+ "Melike",
+ "Melina",
+ "Melinda",
+ "Melis",
+ "Melisa",
+ "Melissa",
+ "Merle",
+ "Merve",
+ "Meryem",
+ "Mette",
+ "Mia",
+ "Michaela",
+ "Michelle",
+ "Mieke",
+ "Mila",
+ "Milana",
+ "Milena",
+ "Milla",
+ "Mina",
+ "Mira",
+ "Miray",
+ "Miriam",
+ "Mirja",
+ "Mona",
+ "Monique",
+ "Nadine",
+ "Nadja",
+ "Naemi",
+ "Nancy",
+ "Naomi",
+ "Natalia",
+ "Natalie",
+ "Nathalie",
+ "Neele",
+ "Nela",
+ "Nele",
+ "Nelli",
+ "Nelly",
+ "Nia",
+ "Nicole",
+ "Nika",
+ "Nike",
+ "Nikita",
+ "Nila",
+ "Nina",
+ "Nisa",
+ "Noemi",
+ "Nora",
+ "Olivia",
+ "Patricia",
+ "Patrizia",
+ "Paula",
+ "Paulina",
+ "Pauline",
+ "Penelope",
+ "Philine",
+ "Phoebe",
+ "Pia",
+ "Rahel",
+ "Rania",
+ "Rebecca",
+ "Rebekka",
+ "Riana",
+ "Rieke",
+ "Rike",
+ "Romina",
+ "Romy",
+ "Ronja",
+ "Rosa",
+ "Rosalie",
+ "Ruby",
+ "Sabrina",
+ "Sahra",
+ "Sally",
+ "Salome",
+ "Samantha",
+ "Samia",
+ "Samira",
+ "Sandra",
+ "Sandy",
+ "Sanja",
+ "Saphira",
+ "Sara",
+ "Sarah",
+ "Saskia",
+ "Selin",
+ "Selina",
+ "Selma",
+ "Sena",
+ "Sidney",
+ "Sienna",
+ "Silja",
+ "Sina",
+ "Sinja",
+ "Smilla",
+ "Sofia",
+ "Sofie",
+ "Sonja",
+ "Sophia",
+ "Sophie",
+ "Soraya",
+ "Stefanie",
+ "Stella",
+ "Stephanie",
+ "Stina",
+ "Sude",
+ "Summer",
+ "Susanne",
+ "Svea",
+ "Svenja",
+ "Sydney",
+ "Tabea",
+ "Talea",
+ "Talia",
+ "Tamara",
+ "Tamia",
+ "Tamina",
+ "Tanja",
+ "Tara",
+ "Tarja",
+ "Teresa",
+ "Tessa",
+ "Thalea",
+ "Thalia",
+ "Thea",
+ "Theresa",
+ "Tia",
+ "Tina",
+ "Tomke",
+ "Tuana",
+ "Valentina",
+ "Valeria",
+ "Valerie",
+ "Vanessa",
+ "Vera",
+ "Veronika",
+ "Victoria",
+ "Viktoria",
+ "Viola",
+ "Vivian",
+ "Vivien",
+ "Vivienne",
+ "Wibke",
+ "Wiebke",
+ "Xenia",
+ "Yara",
+ "Yaren",
+ "Yasmin",
+ "Ylvi",
+ "Ylvie",
+ "Yvonne",
+ "Zara",
+ "Zehra",
+ "Zeynep",
+ "Zoe",
+ "Zoey",
+ "Zoé",
+ },
+ "name.prefix": []string{
+ "Hr.",
+ "Fr.",
+ "Dr.",
+ "Prof. Dr.",
+ },
+ },
+ "en-us": map[string][]string{
+ "internet.domain_suffix": []string{
+ "com",
+ "us",
+ "biz",
+ "info",
+ "name",
+ "net",
+ "org",
+ },
+ },
+ "en": map[string][]string{
+ "company.suffix": []string{
+ "Inc",
+ "and Sons",
+ "LLC",
+ "Group",
+ },
+ "address.secondary_address": []string{
+ "Apt. ###",
+ "Suite ###",
+ },
+ "address.city_suffix": []string{
+ "town",
+ "ton",
+ "land",
+ "ville",
+ "berg",
+ "burgh",
+ "borough",
+ "bury",
+ "view",
+ "port",
+ "mouth",
+ "stad",
+ "furt",
+ "chester",
+ "mouth",
+ "fort",
+ "haven",
+ "side",
+ "shire",
+ },
+ "address.city": []string{
+ "#{city_prefix} #{Name.first_name}#{city_suffix}",
+ "#{city_prefix} #{Name.first_name}",
+ "#{Name.first_name}#{city_suffix}",
+ "#{Name.last_name}#{city_suffix}",
+ },
+ "name.title.descriptor": []string{
+ "Central",
+ "Chief",
+ "Corporate",
+ "Customer",
+ "Direct",
+ "District",
+ "Dynamic",
+ "Dynamic",
+ "Forward",
+ "Future",
+ "Global",
+ "Human",
+ "Internal",
+ "International",
+ "Investor",
+ "Lead",
+ "Legacy",
+ "National",
+ "Principal",
+ "Product",
+ "Regional",
+ "Senior",
+ },
+ "address.country": []string{
+ "Afghanistan",
+ "Albania",
+ "Algeria",
+ "American Samoa",
+ "Andorra",
+ "Angola",
+ "Anguilla",
+ "Antarctica (the territory South of 60 deg S)",
+ "Antigua and Barbuda",
+ "Argentina",
+ "Armenia",
+ "Aruba",
+ "Australia",
+ "Austria",
+ "Azerbaijan",
+ "Bahamas",
+ "Bahrain",
+ "Bangladesh",
+ "Barbados",
+ "Belarus",
+ "Belgium",
+ "Belize",
+ "Benin",
+ "Bermuda",
+ "Bhutan",
+ "Bolivia",
+ "Bosnia and Herzegovina",
+ "Botswana",
+ "Bouvet Island (Bouvetoya)",
+ "Brazil",
+ "British Indian Ocean Territory (Chagos Archipelago)",
+ "British Virgin Islands",
+ "Brunei Darussalam",
+ "Bulgaria",
+ "Burkina Faso",
+ "Burundi",
+ "Cambodia",
+ "Cameroon",
+ "Canada",
+ "Cape Verde",
+ "Cayman Islands",
+ "Central African Republic",
+ "Chad",
+ "Chile",
+ "China",
+ "Christmas Island",
+ "Cocos (Keeling) Islands",
+ "Colombia",
+ "Comoros",
+ "Congo",
+ "Congo",
+ "Cook Islands",
+ "Costa Rica",
+ "Cote d'Ivoire",
+ "Croatia",
+ "Cuba",
+ "Cyprus",
+ "Czech Republic",
+ "Denmark",
+ "Djibouti",
+ "Dominica",
+ "Dominican Republic",
+ "Ecuador",
+ "Egypt",
+ "El Salvador",
+ "Equatorial Guinea",
+ "Eritrea",
+ "Estonia",
+ "Ethiopia",
+ "Faroe Islands",
+ "Falkland Islands (Malvinas)",
+ "Fiji",
+ "Finland",
+ "France",
+ "French Guiana",
+ "French Polynesia",
+ "French Southern Territories",
+ "Gabon",
+ "Gambia",
+ "Georgia",
+ "Germany",
+ "Ghana",
+ "Gibraltar",
+ "Greece",
+ "Greenland",
+ "Grenada",
+ "Guadeloupe",
+ "Guam",
+ "Guatemala",
+ "Guernsey",
+ "Guinea",
+ "Guinea-Bissau",
+ "Guyana",
+ "Haiti",
+ "Heard Island and McDonald Islands",
+ "Holy See (Vatican City State)",
+ "Honduras",
+ "Hong Kong",
+ "Hungary",
+ "Iceland",
+ "India",
+ "Indonesia",
+ "Iran",
+ "Iraq",
+ "Ireland",
+ "Isle of Man",
+ "Israel",
+ "Italy",
+ "Jamaica",
+ "Japan",
+ "Jersey",
+ "Jordan",
+ "Kazakhstan",
+ "Kenya",
+ "Kiribati",
+ "Korea",
+ "Korea",
+ "Kuwait",
+ "Kyrgyz Republic",
+ "Lao People's Democratic Republic",
+ "Latvia",
+ "Lebanon",
+ "Lesotho",
+ "Liberia",
+ "Libyan Arab Jamahiriya",
+ "Liechtenstein",
+ "Lithuania",
+ "Luxembourg",
+ "Macao",
+ "Macedonia",
+ "Madagascar",
+ "Malawi",
+ "Malaysia",
+ "Maldives",
+ "Mali",
+ "Malta",
+ "Marshall Islands",
+ "Martinique",
+ "Mauritania",
+ "Mauritius",
+ "Mayotte",
+ "Mexico",
+ "Micronesia",
+ "Moldova",
+ "Monaco",
+ "Mongolia",
+ "Montenegro",
+ "Montserrat",
+ "Morocco",
+ "Mozambique",
+ "Myanmar",
+ "Namibia",
+ "Nauru",
+ "Nepal",
+ "Netherlands Antilles",
+ "Netherlands",
+ "New Caledonia",
+ "New Zealand",
+ "Nicaragua",
+ "Niger",
+ "Nigeria",
+ "Niue",
+ "Norfolk Island",
+ "Northern Mariana Islands",
+ "Norway",
+ "Oman",
+ "Pakistan",
+ "Palau",
+ "Palestinian Territory",
+ "Panama",
+ "Papua New Guinea",
+ "Paraguay",
+ "Peru",
+ "Philippines",
+ "Pitcairn Islands",
+ "Poland",
+ "Portugal",
+ "Puerto Rico",
+ "Qatar",
+ "Reunion",
+ "Romania",
+ "Russian Federation",
+ "Rwanda",
+ "Saint Barthelemy",
+ "Saint Helena",
+ "Saint Kitts and Nevis",
+ "Saint Lucia",
+ "Saint Martin",
+ "Saint Pierre and Miquelon",
+ "Saint Vincent and the Grenadines",
+ "Samoa",
+ "San Marino",
+ "Sao Tome and Principe",
+ "Saudi Arabia",
+ "Senegal",
+ "Serbia",
+ "Seychelles",
+ "Sierra Leone",
+ "Singapore",
+ "Slovakia (Slovak Republic)",
+ "Slovenia",
+ "Solomon Islands",
+ "Somalia",
+ "South Africa",
+ "South Georgia and the South Sandwich Islands",
+ "Spain",
+ "Sri Lanka",
+ "Sudan",
+ "Suriname",
+ "Svalbard & Jan Mayen Islands",
+ "Swaziland",
+ "Sweden",
+ "Switzerland",
+ "Syrian Arab Republic",
+ "Taiwan",
+ "Tajikistan",
+ "Tanzania",
+ "Thailand",
+ "Timor-Leste",
+ "Togo",
+ "Tokelau",
+ "Tonga",
+ "Trinidad and Tobago",
+ "Tunisia",
+ "Turkey",
+ "Turkmenistan",
+ "Turks and Caicos Islands",
+ "Tuvalu",
+ "Uganda",
+ "Ukraine",
+ "United Arab Emirates",
+ "United Kingdom",
+ "United States of America",
+ "United States Minor Outlying Islands",
+ "United States Virgin Islands",
+ "Uruguay",
+ "Uzbekistan",
+ "Vanuatu",
+ "Venezuela",
+ "Vietnam",
+ "Wallis and Futuna",
+ "Western Sahara",
+ "Yemen",
+ "Zambia",
+ "Zimbabwe",
+ },
+ "name.first_name": []string{
+ "Aaliyah",
+ "Aaron",
+ "Abagail",
+ "Abbey",
+ "Abbie",
+ "Abbigail",
+ "Abby",
+ "Abdiel",
+ "Abdul",
+ "Abdullah",
+ "Abe",
+ "Abel",
+ "Abelardo",
+ "Abigail",
+ "Abigale",
+ "Abigayle",
+ "Abner",
+ "Abraham",
+ "Ada",
+ "Adah",
+ "Adalberto",
+ "Adaline",
+ "Adam",
+ "Adan",
+ "Addie",
+ "Addison",
+ "Adela",
+ "Adelbert",
+ "Adele",
+ "Adelia",
+ "Adeline",
+ "Adell",
+ "Adella",
+ "Adelle",
+ "Aditya",
+ "Adolf",
+ "Adolfo",
+ "Adolph",
+ "Adolphus",
+ "Adonis",
+ "Adrain",
+ "Adrian",
+ "Adriana",
+ "Adrianna",
+ "Adriel",
+ "Adrien",
+ "Adrienne",
+ "Afton",
+ "Aglae",
+ "Agnes",
+ "Agustin",
+ "Agustina",
+ "Ahmad",
+ "Ahmed",
+ "Aida",
+ "Aidan",
+ "Aiden",
+ "Aileen",
+ "Aimee",
+ "Aisha",
+ "Aiyana",
+ "Akeem",
+ "Al",
+ "Alaina",
+ "Alan",
+ "Alana",
+ "Alanis",
+ "Alanna",
+ "Alayna",
+ "Alba",
+ "Albert",
+ "Alberta",
+ "Albertha",
+ "Alberto",
+ "Albin",
+ "Albina",
+ "Alda",
+ "Alden",
+ "Alec",
+ "Aleen",
+ "Alejandra",
+ "Alejandrin",
+ "Alek",
+ "Alena",
+ "Alene",
+ "Alessandra",
+ "Alessandro",
+ "Alessia",
+ "Aletha",
+ "Alex",
+ "Alexa",
+ "Alexander",
+ "Alexandra",
+ "Alexandre",
+ "Alexandrea",
+ "Alexandria",
+ "Alexandrine",
+ "Alexandro",
+ "Alexane",
+ "Alexanne",
+ "Alexie",
+ "Alexis",
+ "Alexys",
+ "Alexzander",
+ "Alf",
+ "Alfonso",
+ "Alfonzo",
+ "Alford",
+ "Alfred",
+ "Alfreda",
+ "Alfredo",
+ "Ali",
+ "Alia",
+ "Alice",
+ "Alicia",
+ "Alisa",
+ "Alisha",
+ "Alison",
+ "Alivia",
+ "Aliya",
+ "Aliyah",
+ "Aliza",
+ "Alize",
+ "Allan",
+ "Allen",
+ "Allene",
+ "Allie",
+ "Allison",
+ "Ally",
+ "Alphonso",
+ "Alta",
+ "Althea",
+ "Alva",
+ "Alvah",
+ "Alvena",
+ "Alvera",
+ "Alverta",
+ "Alvina",
+ "Alvis",
+ "Alyce",
+ "Alycia",
+ "Alysa",
+ "Alysha",
+ "Alyson",
+ "Alysson",
+ "Amalia",
+ "Amanda",
+ "Amani",
+ "Amara",
+ "Amari",
+ "Amaya",
+ "Amber",
+ "Ambrose",
+ "Amelia",
+ "Amelie",
+ "Amely",
+ "America",
+ "Americo",
+ "Amie",
+ "Amina",
+ "Amir",
+ "Amira",
+ "Amiya",
+ "Amos",
+ "Amparo",
+ "Amy",
+ "Amya",
+ "Ana",
+ "Anabel",
+ "Anabelle",
+ "Anahi",
+ "Anais",
+ "Anastacio",
+ "Anastasia",
+ "Anderson",
+ "Andre",
+ "Andreane",
+ "Andreanne",
+ "Andres",
+ "Andrew",
+ "Andy",
+ "Angel",
+ "Angela",
+ "Angelica",
+ "Angelina",
+ "Angeline",
+ "Angelita",
+ "Angelo",
+ "Angie",
+ "Angus",
+ "Anibal",
+ "Anika",
+ "Anissa",
+ "Anita",
+ "Aniya",
+ "Aniyah",
+ "Anjali",
+ "Anna",
+ "Annabel",
+ "Annabell",
+ "Annabelle",
+ "Annalise",
+ "Annamae",
+ "Annamarie",
+ "Anne",
+ "Annetta",
+ "Annette",
+ "Annie",
+ "Ansel",
+ "Ansley",
+ "Anthony",
+ "Antoinette",
+ "Antone",
+ "Antonetta",
+ "Antonette",
+ "Antonia",
+ "Antonietta",
+ "Antonina",
+ "Antonio",
+ "Antwan",
+ "Antwon",
+ "Anya",
+ "April",
+ "Ara",
+ "Araceli",
+ "Aracely",
+ "Arch",
+ "Archibald",
+ "Ardella",
+ "Arden",
+ "Ardith",
+ "Arely",
+ "Ari",
+ "Ariane",
+ "Arianna",
+ "Aric",
+ "Ariel",
+ "Arielle",
+ "Arjun",
+ "Arlene",
+ "Arlie",
+ "Arlo",
+ "Armand",
+ "Armando",
+ "Armani",
+ "Arnaldo",
+ "Arne",
+ "Arno",
+ "Arnold",
+ "Arnoldo",
+ "Arnulfo",
+ "Aron",
+ "Art",
+ "Arthur",
+ "Arturo",
+ "Arvel",
+ "Arvid",
+ "Arvilla",
+ "Aryanna",
+ "Asa",
+ "Asha",
+ "Ashlee",
+ "Ashleigh",
+ "Ashley",
+ "Ashly",
+ "Ashlynn",
+ "Ashton",
+ "Ashtyn",
+ "Asia",
+ "Assunta",
+ "Astrid",
+ "Athena",
+ "Aubree",
+ "Aubrey",
+ "Audie",
+ "Audra",
+ "Audreanne",
+ "Audrey",
+ "August",
+ "Augusta",
+ "Augustine",
+ "Augustus",
+ "Aurelia",
+ "Aurelie",
+ "Aurelio",
+ "Aurore",
+ "Austen",
+ "Austin",
+ "Austyn",
+ "Autumn",
+ "Ava",
+ "Avery",
+ "Avis",
+ "Axel",
+ "Ayana",
+ "Ayden",
+ "Ayla",
+ "Aylin",
+ "Baby",
+ "Bailee",
+ "Bailey",
+ "Barbara",
+ "Barney",
+ "Baron",
+ "Barrett",
+ "Barry",
+ "Bart",
+ "Bartholome",
+ "Barton",
+ "Baylee",
+ "Beatrice",
+ "Beau",
+ "Beaulah",
+ "Bell",
+ "Bella",
+ "Belle",
+ "Ben",
+ "Benedict",
+ "Benjamin",
+ "Bennett",
+ "Bennie",
+ "Benny",
+ "Benton",
+ "Berenice",
+ "Bernadette",
+ "Bernadine",
+ "Bernard",
+ "Bernardo",
+ "Berneice",
+ "Bernhard",
+ "Bernice",
+ "Bernie",
+ "Berniece",
+ "Bernita",
+ "Berry",
+ "Bert",
+ "Berta",
+ "Bertha",
+ "Bertram",
+ "Bertrand",
+ "Beryl",
+ "Bessie",
+ "Beth",
+ "Bethany",
+ "Bethel",
+ "Betsy",
+ "Bette",
+ "Bettie",
+ "Betty",
+ "Bettye",
+ "Beulah",
+ "Beverly",
+ "Bianka",
+ "Bill",
+ "Billie",
+ "Billy",
+ "Birdie",
+ "Blair",
+ "Blaise",
+ "Blake",
+ "Blanca",
+ "Blanche",
+ "Blaze",
+ "Bo",
+ "Bobbie",
+ "Bobby",
+ "Bonita",
+ "Bonnie",
+ "Boris",
+ "Boyd",
+ "Brad",
+ "Braden",
+ "Bradford",
+ "Bradley",
+ "Bradly",
+ "Brady",
+ "Braeden",
+ "Brain",
+ "Brandi",
+ "Brando",
+ "Brandon",
+ "Brandt",
+ "Brandy",
+ "Brandyn",
+ "Brannon",
+ "Branson",
+ "Brant",
+ "Braulio",
+ "Braxton",
+ "Brayan",
+ "Breana",
+ "Breanna",
+ "Breanne",
+ "Brenda",
+ "Brendan",
+ "Brenden",
+ "Brendon",
+ "Brenna",
+ "Brennan",
+ "Brennon",
+ "Brent",
+ "Bret",
+ "Brett",
+ "Bria",
+ "Brian",
+ "Briana",
+ "Brianne",
+ "Brice",
+ "Bridget",
+ "Bridgette",
+ "Bridie",
+ "Brielle",
+ "Brigitte",
+ "Brionna",
+ "Brisa",
+ "Britney",
+ "Brittany",
+ "Brock",
+ "Broderick",
+ "Brody",
+ "Brook",
+ "Brooke",
+ "Brooklyn",
+ "Brooks",
+ "Brown",
+ "Bruce",
+ "Bryana",
+ "Bryce",
+ "Brycen",
+ "Bryon",
+ "Buck",
+ "Bud",
+ "Buddy",
+ "Buford",
+ "Bulah",
+ "Burdette",
+ "Burley",
+ "Burnice",
+ "Buster",
+ "Cade",
+ "Caden",
+ "Caesar",
+ "Caitlyn",
+ "Cale",
+ "Caleb",
+ "Caleigh",
+ "Cali",
+ "Calista",
+ "Callie",
+ "Camden",
+ "Cameron",
+ "Camila",
+ "Camilla",
+ "Camille",
+ "Camren",
+ "Camron",
+ "Camryn",
+ "Camylle",
+ "Candace",
+ "Candelario",
+ "Candice",
+ "Candida",
+ "Candido",
+ "Cara",
+ "Carey",
+ "Carissa",
+ "Carlee",
+ "Carleton",
+ "Carley",
+ "Carli",
+ "Carlie",
+ "Carlo",
+ "Carlos",
+ "Carlotta",
+ "Carmel",
+ "Carmela",
+ "Carmella",
+ "Carmelo",
+ "Carmen",
+ "Carmine",
+ "Carol",
+ "Carolanne",
+ "Carole",
+ "Carolina",
+ "Caroline",
+ "Carolyn",
+ "Carolyne",
+ "Carrie",
+ "Carroll",
+ "Carson",
+ "Carter",
+ "Cary",
+ "Casandra",
+ "Casey",
+ "Casimer",
+ "Casimir",
+ "Casper",
+ "Cassandra",
+ "Cassandre",
+ "Cassidy",
+ "Cassie",
+ "Catalina",
+ "Caterina",
+ "Catharine",
+ "Catherine",
+ "Cathrine",
+ "Cathryn",
+ "Cathy",
+ "Cayla",
+ "Ceasar",
+ "Cecelia",
+ "Cecil",
+ "Cecile",
+ "Cecilia",
+ "Cedrick",
+ "Celestine",
+ "Celestino",
+ "Celia",
+ "Celine",
+ "Cesar",
+ "Chad",
+ "Chadd",
+ "Chadrick",
+ "Chaim",
+ "Chance",
+ "Chandler",
+ "Chanel",
+ "Chanelle",
+ "Charity",
+ "Charlene",
+ "Charles",
+ "Charley",
+ "Charlie",
+ "Charlotte",
+ "Chase",
+ "Chasity",
+ "Chauncey",
+ "Chaya",
+ "Chaz",
+ "Chelsea",
+ "Chelsey",
+ "Chelsie",
+ "Chesley",
+ "Chester",
+ "Chet",
+ "Cheyanne",
+ "Cheyenne",
+ "Chloe",
+ "Chris",
+ "Christ",
+ "Christa",
+ "Christelle",
+ "Christian",
+ "Christiana",
+ "Christina",
+ "Christine",
+ "Christop",
+ "Christophe",
+ "Christopher",
+ "Christy",
+ "Chyna",
+ "Ciara",
+ "Cicero",
+ "Cielo",
+ "Cierra",
+ "Cindy",
+ "Citlalli",
+ "Clair",
+ "Claire",
+ "Clara",
+ "Clarabelle",
+ "Clare",
+ "Clarissa",
+ "Clark",
+ "Claud",
+ "Claude",
+ "Claudia",
+ "Claudie",
+ "Claudine",
+ "Clay",
+ "Clemens",
+ "Clement",
+ "Clementina",
+ "Clementine",
+ "Clemmie",
+ "Cleo",
+ "Cleora",
+ "Cleta",
+ "Cletus",
+ "Cleve",
+ "Cleveland",
+ "Clifford",
+ "Clifton",
+ "Clint",
+ "Clinton",
+ "Clotilde",
+ "Clovis",
+ "Cloyd",
+ "Clyde",
+ "Coby",
+ "Cody",
+ "Colby",
+ "Cole",
+ "Coleman",
+ "Colin",
+ "Colleen",
+ "Collin",
+ "Colt",
+ "Colten",
+ "Colton",
+ "Columbus",
+ "Concepcion",
+ "Conner",
+ "Connie",
+ "Connor",
+ "Conor",
+ "Conrad",
+ "Constance",
+ "Constantin",
+ "Consuelo",
+ "Cooper",
+ "Cora",
+ "Coralie",
+ "Corbin",
+ "Cordelia",
+ "Cordell",
+ "Cordia",
+ "Cordie",
+ "Corene",
+ "Corine",
+ "Cornelius",
+ "Cornell",
+ "Corrine",
+ "Cortez",
+ "Cortney",
+ "Cory",
+ "Coty",
+ "Courtney",
+ "Coy",
+ "Craig",
+ "Crawford",
+ "Creola",
+ "Cristal",
+ "Cristian",
+ "Cristina",
+ "Cristobal",
+ "Cristopher",
+ "Cruz",
+ "Crystal",
+ "Crystel",
+ "Cullen",
+ "Curt",
+ "Curtis",
+ "Cydney",
+ "Cynthia",
+ "Cyril",
+ "Cyrus",
+ "Dagmar",
+ "Dahlia",
+ "Daija",
+ "Daisha",
+ "Daisy",
+ "Dakota",
+ "Dale",
+ "Dallas",
+ "Dallin",
+ "Dalton",
+ "Damaris",
+ "Dameon",
+ "Damian",
+ "Damien",
+ "Damion",
+ "Damon",
+ "Dan",
+ "Dana",
+ "Dandre",
+ "Dane",
+ "D'angelo",
+ "Dangelo",
+ "Danial",
+ "Daniela",
+ "Daniella",
+ "Danielle",
+ "Danika",
+ "Dannie",
+ "Danny",
+ "Dante",
+ "Danyka",
+ "Daphne",
+ "Daphnee",
+ "Daphney",
+ "Darby",
+ "Daren",
+ "Darian",
+ "Dariana",
+ "Darien",
+ "Dario",
+ "Darion",
+ "Darius",
+ "Darlene",
+ "Daron",
+ "Darrel",
+ "Darrell",
+ "Darren",
+ "Darrick",
+ "Darrin",
+ "Darrion",
+ "Darron",
+ "Darryl",
+ "Darwin",
+ "Daryl",
+ "Dashawn",
+ "Dasia",
+ "Dave",
+ "David",
+ "Davin",
+ "Davion",
+ "Davon",
+ "Davonte",
+ "Dawn",
+ "Dawson",
+ "Dax",
+ "Dayana",
+ "Dayna",
+ "Dayne",
+ "Dayton",
+ "Dean",
+ "Deangelo",
+ "Deanna",
+ "Deborah",
+ "Declan",
+ "Dedric",
+ "Dedrick",
+ "Dee",
+ "Deion",
+ "Deja",
+ "Dejah",
+ "Dejon",
+ "Dejuan",
+ "Delaney",
+ "Delbert",
+ "Delfina",
+ "Delia",
+ "Delilah",
+ "Dell",
+ "Della",
+ "Delmer",
+ "Delores",
+ "Delpha",
+ "Delphia",
+ "Delphine",
+ "Delta",
+ "Demarco",
+ "Demarcus",
+ "Demario",
+ "Demetris",
+ "Demetrius",
+ "Demond",
+ "Dena",
+ "Denis",
+ "Dennis",
+ "Deon",
+ "Deondre",
+ "Deontae",
+ "Deonte",
+ "Dereck",
+ "Derek",
+ "Derick",
+ "Deron",
+ "Derrick",
+ "Deshaun",
+ "Deshawn",
+ "Desiree",
+ "Desmond",
+ "Dessie",
+ "Destany",
+ "Destin",
+ "Destinee",
+ "Destiney",
+ "Destini",
+ "Destiny",
+ "Devan",
+ "Devante",
+ "Deven",
+ "Devin",
+ "Devon",
+ "Devonte",
+ "Devyn",
+ "Dewayne",
+ "Dewitt",
+ "Dexter",
+ "Diamond",
+ "Diana",
+ "Dianna",
+ "Diego",
+ "Dillan",
+ "Dillon",
+ "Dimitri",
+ "Dina",
+ "Dino",
+ "Dion",
+ "Dixie",
+ "Dock",
+ "Dolly",
+ "Dolores",
+ "Domenic",
+ "Domenica",
+ "Domenick",
+ "Domenico",
+ "Domingo",
+ "Dominic",
+ "Dominique",
+ "Don",
+ "Donald",
+ "Donato",
+ "Donavon",
+ "Donna",
+ "Donnell",
+ "Donnie",
+ "Donny",
+ "Dora",
+ "Dorcas",
+ "Dorian",
+ "Doris",
+ "Dorothea",
+ "Dorothy",
+ "Dorris",
+ "Dortha",
+ "Dorthy",
+ "Doug",
+ "Douglas",
+ "Dovie",
+ "Doyle",
+ "Drake",
+ "Drew",
+ "Duane",
+ "Dudley",
+ "Dulce",
+ "Duncan",
+ "Durward",
+ "Dustin",
+ "Dusty",
+ "Dwight",
+ "Dylan",
+ "Earl",
+ "Earlene",
+ "Earline",
+ "Earnest",
+ "Earnestine",
+ "Easter",
+ "Easton",
+ "Ebba",
+ "Ebony",
+ "Ed",
+ "Eda",
+ "Edd",
+ "Eddie",
+ "Eden",
+ "Edgar",
+ "Edgardo",
+ "Edison",
+ "Edmond",
+ "Edmund",
+ "Edna",
+ "Eduardo",
+ "Edward",
+ "Edwardo",
+ "Edwin",
+ "Edwina",
+ "Edyth",
+ "Edythe",
+ "Effie",
+ "Efrain",
+ "Efren",
+ "Eileen",
+ "Einar",
+ "Eino",
+ "Eladio",
+ "Elaina",
+ "Elbert",
+ "Elda",
+ "Eldon",
+ "Eldora",
+ "Eldred",
+ "Eldridge",
+ "Eleanora",
+ "Eleanore",
+ "Eleazar",
+ "Electa",
+ "Elena",
+ "Elenor",
+ "Elenora",
+ "Eleonore",
+ "Elfrieda",
+ "Eli",
+ "Elian",
+ "Eliane",
+ "Elias",
+ "Eliezer",
+ "Elijah",
+ "Elinor",
+ "Elinore",
+ "Elisa",
+ "Elisabeth",
+ "Elise",
+ "Eliseo",
+ "Elisha",
+ "Elissa",
+ "Eliza",
+ "Elizabeth",
+ "Ella",
+ "Ellen",
+ "Ellie",
+ "Elliot",
+ "Elliott",
+ "Ellis",
+ "Ellsworth",
+ "Elmer",
+ "Elmira",
+ "Elmo",
+ "Elmore",
+ "Elna",
+ "Elnora",
+ "Elody",
+ "Eloisa",
+ "Eloise",
+ "Elouise",
+ "Eloy",
+ "Elroy",
+ "Elsa",
+ "Else",
+ "Elsie",
+ "Elta",
+ "Elton",
+ "Elva",
+ "Elvera",
+ "Elvie",
+ "Elvis",
+ "Elwin",
+ "Elwyn",
+ "Elyse",
+ "Elyssa",
+ "Elza",
+ "Emanuel",
+ "Emelia",
+ "Emelie",
+ "Emely",
+ "Emerald",
+ "Emerson",
+ "Emery",
+ "Emie",
+ "Emil",
+ "Emile",
+ "Emilia",
+ "Emiliano",
+ "Emilie",
+ "Emilio",
+ "Emily",
+ "Emma",
+ "Emmalee",
+ "Emmanuel",
+ "Emmanuelle",
+ "Emmet",
+ "Emmett",
+ "Emmie",
+ "Emmitt",
+ "Emmy",
+ "Emory",
+ "Ena",
+ "Enid",
+ "Enoch",
+ "Enola",
+ "Enos",
+ "Enrico",
+ "Enrique",
+ "Ephraim",
+ "Era",
+ "Eriberto",
+ "Eric",
+ "Erica",
+ "Erich",
+ "Erick",
+ "Ericka",
+ "Erik",
+ "Erika",
+ "Erin",
+ "Erling",
+ "Erna",
+ "Ernest",
+ "Ernestina",
+ "Ernestine",
+ "Ernesto",
+ "Ernie",
+ "Ervin",
+ "Erwin",
+ "Eryn",
+ "Esmeralda",
+ "Esperanza",
+ "Esta",
+ "Esteban",
+ "Estefania",
+ "Estel",
+ "Estell",
+ "Estella",
+ "Estelle",
+ "Estevan",
+ "Esther",
+ "Estrella",
+ "Etha",
+ "Ethan",
+ "Ethel",
+ "Ethelyn",
+ "Ethyl",
+ "Ettie",
+ "Eudora",
+ "Eugene",
+ "Eugenia",
+ "Eula",
+ "Eulah",
+ "Eulalia",
+ "Euna",
+ "Eunice",
+ "Eusebio",
+ "Eva",
+ "Evalyn",
+ "Evan",
+ "Evangeline",
+ "Evans",
+ "Eve",
+ "Eveline",
+ "Evelyn",
+ "Everardo",
+ "Everett",
+ "Everette",
+ "Evert",
+ "Evie",
+ "Ewald",
+ "Ewell",
+ "Ezekiel",
+ "Ezequiel",
+ "Ezra",
+ "Fabian",
+ "Fabiola",
+ "Fae",
+ "Fannie",
+ "Fanny",
+ "Fatima",
+ "Faustino",
+ "Fausto",
+ "Favian",
+ "Fay",
+ "Faye",
+ "Federico",
+ "Felicia",
+ "Felicita",
+ "Felicity",
+ "Felipa",
+ "Felipe",
+ "Felix",
+ "Felton",
+ "Fermin",
+ "Fern",
+ "Fernando",
+ "Ferne",
+ "Fidel",
+ "Filiberto",
+ "Filomena",
+ "Finn",
+ "Fiona",
+ "Flavie",
+ "Flavio",
+ "Fleta",
+ "Fletcher",
+ "Flo",
+ "Florence",
+ "Florencio",
+ "Florian",
+ "Florida",
+ "Florine",
+ "Flossie",
+ "Floy",
+ "Floyd",
+ "Ford",
+ "Forest",
+ "Forrest",
+ "Foster",
+ "Frances",
+ "Francesca",
+ "Francesco",
+ "Francis",
+ "Francisca",
+ "Francisco",
+ "Franco",
+ "Frank",
+ "Frankie",
+ "Franz",
+ "Fred",
+ "Freda",
+ "Freddie",
+ "Freddy",
+ "Frederic",
+ "Frederick",
+ "Frederik",
+ "Frederique",
+ "Fredrick",
+ "Fredy",
+ "Freeda",
+ "Freeman",
+ "Freida",
+ "Frida",
+ "Frieda",
+ "Friedrich",
+ "Fritz",
+ "Furman",
+ "Gabe",
+ "Gabriel",
+ "Gabriella",
+ "Gabrielle",
+ "Gaetano",
+ "Gage",
+ "Gail",
+ "Gardner",
+ "Garett",
+ "Garfield",
+ "Garland",
+ "Garnet",
+ "Garnett",
+ "Garret",
+ "Garrett",
+ "Garrick",
+ "Garrison",
+ "Garry",
+ "Garth",
+ "Gaston",
+ "Gavin",
+ "Gay",
+ "Gayle",
+ "Gaylord",
+ "Gene",
+ "General",
+ "Genesis",
+ "Genevieve",
+ "Gennaro",
+ "Genoveva",
+ "Geo",
+ "Geoffrey",
+ "George",
+ "Georgette",
+ "Georgiana",
+ "Georgianna",
+ "Geovanni",
+ "Geovanny",
+ "Geovany",
+ "Gerald",
+ "Geraldine",
+ "Gerard",
+ "Gerardo",
+ "Gerda",
+ "Gerhard",
+ "Germaine",
+ "German",
+ "Gerry",
+ "Gerson",
+ "Gertrude",
+ "Gia",
+ "Gianni",
+ "Gideon",
+ "Gilbert",
+ "Gilberto",
+ "Gilda",
+ "Giles",
+ "Gillian",
+ "Gina",
+ "Gino",
+ "Giovani",
+ "Giovanna",
+ "Giovanni",
+ "Giovanny",
+ "Gisselle",
+ "Giuseppe",
+ "Gladyce",
+ "Gladys",
+ "Glen",
+ "Glenda",
+ "Glenna",
+ "Glennie",
+ "Gloria",
+ "Godfrey",
+ "Golda",
+ "Golden",
+ "Gonzalo",
+ "Gordon",
+ "Grace",
+ "Gracie",
+ "Graciela",
+ "Grady",
+ "Graham",
+ "Grant",
+ "Granville",
+ "Grayce",
+ "Grayson",
+ "Green",
+ "Greg",
+ "Gregg",
+ "Gregoria",
+ "Gregorio",
+ "Gregory",
+ "Greta",
+ "Gretchen",
+ "Greyson",
+ "Griffin",
+ "Grover",
+ "Guadalupe",
+ "Gudrun",
+ "Guido",
+ "Guillermo",
+ "Guiseppe",
+ "Gunnar",
+ "Gunner",
+ "Gus",
+ "Gussie",
+ "Gust",
+ "Gustave",
+ "Guy",
+ "Gwen",
+ "Gwendolyn",
+ "Hadley",
+ "Hailee",
+ "Hailey",
+ "Hailie",
+ "Hal",
+ "Haleigh",
+ "Haley",
+ "Halie",
+ "Halle",
+ "Hallie",
+ "Hank",
+ "Hanna",
+ "Hannah",
+ "Hans",
+ "Hardy",
+ "Harley",
+ "Harmon",
+ "Harmony",
+ "Harold",
+ "Harrison",
+ "Harry",
+ "Harvey",
+ "Haskell",
+ "Hassan",
+ "Hassie",
+ "Hattie",
+ "Haven",
+ "Hayden",
+ "Haylee",
+ "Hayley",
+ "Haylie",
+ "Hazel",
+ "Hazle",
+ "Heath",
+ "Heather",
+ "Heaven",
+ "Heber",
+ "Hector",
+ "Heidi",
+ "Helen",
+ "Helena",
+ "Helene",
+ "Helga",
+ "Hellen",
+ "Helmer",
+ "Heloise",
+ "Henderson",
+ "Henri",
+ "Henriette",
+ "Henry",
+ "Herbert",
+ "Herman",
+ "Hermann",
+ "Hermina",
+ "Herminia",
+ "Herminio",
+ "Hershel",
+ "Herta",
+ "Hertha",
+ "Hester",
+ "Hettie",
+ "Hilario",
+ "Hilbert",
+ "Hilda",
+ "Hildegard",
+ "Hillard",
+ "Hillary",
+ "Hilma",
+ "Hilton",
+ "Hipolito",
+ "Hiram",
+ "Hobart",
+ "Holden",
+ "Hollie",
+ "Hollis",
+ "Holly",
+ "Hope",
+ "Horace",
+ "Horacio",
+ "Hortense",
+ "Hosea",
+ "Houston",
+ "Howard",
+ "Howell",
+ "Hoyt",
+ "Hubert",
+ "Hudson",
+ "Hugh",
+ "Hulda",
+ "Humberto",
+ "Hunter",
+ "Hyman",
+ "Ian",
+ "Ibrahim",
+ "Icie",
+ "Ida",
+ "Idell",
+ "Idella",
+ "Ignacio",
+ "Ignatius",
+ "Ike",
+ "Ila",
+ "Ilene",
+ "Iliana",
+ "Ima",
+ "Imani",
+ "Imelda",
+ "Immanuel",
+ "Imogene",
+ "Ines",
+ "Irma",
+ "Irving",
+ "Irwin",
+ "Isaac",
+ "Isabel",
+ "Isabell",
+ "Isabella",
+ "Isabelle",
+ "Isac",
+ "Isadore",
+ "Isai",
+ "Isaiah",
+ "Isaias",
+ "Isidro",
+ "Ismael",
+ "Isobel",
+ "Isom",
+ "Israel",
+ "Issac",
+ "Itzel",
+ "Iva",
+ "Ivah",
+ "Ivory",
+ "Ivy",
+ "Izabella",
+ "Izaiah",
+ "Jabari",
+ "Jace",
+ "Jacey",
+ "Jacinthe",
+ "Jacinto",
+ "Jack",
+ "Jackeline",
+ "Jackie",
+ "Jacklyn",
+ "Jackson",
+ "Jacky",
+ "Jaclyn",
+ "Jacquelyn",
+ "Jacques",
+ "Jacynthe",
+ "Jada",
+ "Jade",
+ "Jaden",
+ "Jadon",
+ "Jadyn",
+ "Jaeden",
+ "Jaida",
+ "Jaiden",
+ "Jailyn",
+ "Jaime",
+ "Jairo",
+ "Jakayla",
+ "Jake",
+ "Jakob",
+ "Jaleel",
+ "Jalen",
+ "Jalon",
+ "Jalyn",
+ "Jamaal",
+ "Jamal",
+ "Jamar",
+ "Jamarcus",
+ "Jamel",
+ "Jameson",
+ "Jamey",
+ "Jamie",
+ "Jamil",
+ "Jamir",
+ "Jamison",
+ "Jammie",
+ "Jan",
+ "Jana",
+ "Janae",
+ "Jane",
+ "Janelle",
+ "Janessa",
+ "Janet",
+ "Janice",
+ "Janick",
+ "Janie",
+ "Janis",
+ "Janiya",
+ "Jannie",
+ "Jany",
+ "Jaquan",
+ "Jaquelin",
+ "Jaqueline",
+ "Jared",
+ "Jaren",
+ "Jarod",
+ "Jaron",
+ "Jarred",
+ "Jarrell",
+ "Jarret",
+ "Jarrett",
+ "Jarrod",
+ "Jarvis",
+ "Jasen",
+ "Jasmin",
+ "Jason",
+ "Jasper",
+ "Jaunita",
+ "Javier",
+ "Javon",
+ "Javonte",
+ "Jay",
+ "Jayce",
+ "Jaycee",
+ "Jayda",
+ "Jayde",
+ "Jayden",
+ "Jaydon",
+ "Jaylan",
+ "Jaylen",
+ "Jaylin",
+ "Jaylon",
+ "Jayme",
+ "Jayne",
+ "Jayson",
+ "Jazlyn",
+ "Jazmin",
+ "Jazmyn",
+ "Jazmyne",
+ "Jean",
+ "Jeanette",
+ "Jeanie",
+ "Jeanne",
+ "Jed",
+ "Jedediah",
+ "Jedidiah",
+ "Jeff",
+ "Jefferey",
+ "Jeffery",
+ "Jeffrey",
+ "Jeffry",
+ "Jena",
+ "Jenifer",
+ "Jennie",
+ "Jennifer",
+ "Jennings",
+ "Jennyfer",
+ "Jensen",
+ "Jerad",
+ "Jerald",
+ "Jeramie",
+ "Jeramy",
+ "Jerel",
+ "Jeremie",
+ "Jeremy",
+ "Jermain",
+ "Jermaine",
+ "Jermey",
+ "Jerod",
+ "Jerome",
+ "Jeromy",
+ "Jerrell",
+ "Jerrod",
+ "Jerrold",
+ "Jerry",
+ "Jess",
+ "Jesse",
+ "Jessica",
+ "Jessie",
+ "Jessika",
+ "Jessy",
+ "Jessyca",
+ "Jesus",
+ "Jett",
+ "Jettie",
+ "Jevon",
+ "Jewel",
+ "Jewell",
+ "Jillian",
+ "Jimmie",
+ "Jimmy",
+ "Jo",
+ "Joan",
+ "Joana",
+ "Joanie",
+ "Joanne",
+ "Joannie",
+ "Joanny",
+ "Joany",
+ "Joaquin",
+ "Jocelyn",
+ "Jodie",
+ "Jody",
+ "Joe",
+ "Joel",
+ "Joelle",
+ "Joesph",
+ "Joey",
+ "Johan",
+ "Johann",
+ "Johanna",
+ "Johathan",
+ "John",
+ "Johnathan",
+ "Johnathon",
+ "Johnnie",
+ "Johnny",
+ "Johnpaul",
+ "Johnson",
+ "Jolie",
+ "Jon",
+ "Jonas",
+ "Jonatan",
+ "Jonathan",
+ "Jonathon",
+ "Jordan",
+ "Jordane",
+ "Jordi",
+ "Jordon",
+ "Jordy",
+ "Jordyn",
+ "Jorge",
+ "Jose",
+ "Josefa",
+ "Josefina",
+ "Joseph",
+ "Josephine",
+ "Josh",
+ "Joshua",
+ "Joshuah",
+ "Josiah",
+ "Josiane",
+ "Josianne",
+ "Josie",
+ "Josue",
+ "Jovan",
+ "Jovani",
+ "Jovanny",
+ "Jovany",
+ "Joy",
+ "Joyce",
+ "Juana",
+ "Juanita",
+ "Judah",
+ "Judd",
+ "Jude",
+ "Judge",
+ "Judson",
+ "Judy",
+ "Jules",
+ "Julia",
+ "Julian",
+ "Juliana",
+ "Julianne",
+ "Julie",
+ "Julien",
+ "Juliet",
+ "Julio",
+ "Julius",
+ "June",
+ "Junior",
+ "Junius",
+ "Justen",
+ "Justice",
+ "Justina",
+ "Justine",
+ "Juston",
+ "Justus",
+ "Justyn",
+ "Juvenal",
+ "Juwan",
+ "Kacey",
+ "Kaci",
+ "Kacie",
+ "Kade",
+ "Kaden",
+ "Kadin",
+ "Kaela",
+ "Kaelyn",
+ "Kaia",
+ "Kailee",
+ "Kailey",
+ "Kailyn",
+ "Kaitlin",
+ "Kaitlyn",
+ "Kale",
+ "Kaleb",
+ "Kaleigh",
+ "Kaley",
+ "Kali",
+ "Kallie",
+ "Kameron",
+ "Kamille",
+ "Kamren",
+ "Kamron",
+ "Kamryn",
+ "Kane",
+ "Kara",
+ "Kareem",
+ "Karelle",
+ "Karen",
+ "Kari",
+ "Kariane",
+ "Karianne",
+ "Karina",
+ "Karine",
+ "Karl",
+ "Karlee",
+ "Karley",
+ "Karli",
+ "Karlie",
+ "Karolann",
+ "Karson",
+ "Kasandra",
+ "Kasey",
+ "Kassandra",
+ "Katarina",
+ "Katelin",
+ "Katelyn",
+ "Katelynn",
+ "Katharina",
+ "Katherine",
+ "Katheryn",
+ "Kathleen",
+ "Kathlyn",
+ "Kathryn",
+ "Kathryne",
+ "Katlyn",
+ "Katlynn",
+ "Katrina",
+ "Katrine",
+ "Kattie",
+ "Kavon",
+ "Kay",
+ "Kaya",
+ "Kaycee",
+ "Kayden",
+ "Kayla",
+ "Kaylah",
+ "Kaylee",
+ "Kayleigh",
+ "Kayley",
+ "Kayli",
+ "Kaylie",
+ "Kaylin",
+ "Keagan",
+ "Keanu",
+ "Keara",
+ "Keaton",
+ "Keegan",
+ "Keeley",
+ "Keely",
+ "Keenan",
+ "Keira",
+ "Keith",
+ "Kellen",
+ "Kelley",
+ "Kelli",
+ "Kellie",
+ "Kelly",
+ "Kelsi",
+ "Kelsie",
+ "Kelton",
+ "Kelvin",
+ "Ken",
+ "Kendall",
+ "Kendra",
+ "Kendrick",
+ "Kenna",
+ "Kennedi",
+ "Kennedy",
+ "Kenneth",
+ "Kennith",
+ "Kenny",
+ "Kenton",
+ "Kenya",
+ "Kenyatta",
+ "Kenyon",
+ "Keon",
+ "Keshaun",
+ "Keshawn",
+ "Keven",
+ "Kevin",
+ "Kevon",
+ "Keyon",
+ "Keyshawn",
+ "Khalid",
+ "Khalil",
+ "Kian",
+ "Kiana",
+ "Kianna",
+ "Kiara",
+ "Kiarra",
+ "Kiel",
+ "Kiera",
+ "Kieran",
+ "Kiley",
+ "Kim",
+ "Kimberly",
+ "King",
+ "Kip",
+ "Kira",
+ "Kirk",
+ "Kirsten",
+ "Kirstin",
+ "Kitty",
+ "Kobe",
+ "Koby",
+ "Kody",
+ "Kolby",
+ "Kole",
+ "Korbin",
+ "Korey",
+ "Kory",
+ "Kraig",
+ "Kris",
+ "Krista",
+ "Kristian",
+ "Kristin",
+ "Kristina",
+ "Kristofer",
+ "Kristoffer",
+ "Kristopher",
+ "Kristy",
+ "Krystal",
+ "Krystel",
+ "Krystina",
+ "Kurt",
+ "Kurtis",
+ "Kyla",
+ "Kyle",
+ "Kylee",
+ "Kyleigh",
+ "Kyler",
+ "Kylie",
+ "Kyra",
+ "Lacey",
+ "Lacy",
+ "Ladarius",
+ "Lafayette",
+ "Laila",
+ "Laisha",
+ "Lamar",
+ "Lambert",
+ "Lamont",
+ "Lance",
+ "Landen",
+ "Lane",
+ "Laney",
+ "Larissa",
+ "Laron",
+ "Larry",
+ "Larue",
+ "Laura",
+ "Laurel",
+ "Lauren",
+ "Laurence",
+ "Lauretta",
+ "Lauriane",
+ "Laurianne",
+ "Laurie",
+ "Laurine",
+ "Laury",
+ "Lauryn",
+ "Lavada",
+ "Lavern",
+ "Laverna",
+ "Laverne",
+ "Lavina",
+ "Lavinia",
+ "Lavon",
+ "Lavonne",
+ "Lawrence",
+ "Lawson",
+ "Layla",
+ "Layne",
+ "Lazaro",
+ "Lea",
+ "Leann",
+ "Leanna",
+ "Leanne",
+ "Leatha",
+ "Leda",
+ "Lee",
+ "Leif",
+ "Leila",
+ "Leilani",
+ "Lela",
+ "Lelah",
+ "Leland",
+ "Lelia",
+ "Lempi",
+ "Lemuel",
+ "Lenna",
+ "Lennie",
+ "Lenny",
+ "Lenora",
+ "Lenore",
+ "Leo",
+ "Leola",
+ "Leon",
+ "Leonard",
+ "Leonardo",
+ "Leone",
+ "Leonel",
+ "Leonie",
+ "Leonor",
+ "Leonora",
+ "Leopold",
+ "Leopoldo",
+ "Leora",
+ "Lera",
+ "Lesley",
+ "Leslie",
+ "Lesly",
+ "Lessie",
+ "Lester",
+ "Leta",
+ "Letha",
+ "Letitia",
+ "Levi",
+ "Lew",
+ "Lewis",
+ "Lexi",
+ "Lexie",
+ "Lexus",
+ "Lia",
+ "Liam",
+ "Liana",
+ "Libbie",
+ "Libby",
+ "Lila",
+ "Lilian",
+ "Liliana",
+ "Liliane",
+ "Lilla",
+ "Lillian",
+ "Lilliana",
+ "Lillie",
+ "Lilly",
+ "Lily",
+ "Lilyan",
+ "Lina",
+ "Lincoln",
+ "Linda",
+ "Lindsay",
+ "Lindsey",
+ "Linnea",
+ "Linnie",
+ "Linwood",
+ "Lionel",
+ "Lisa",
+ "Lisandro",
+ "Lisette",
+ "Litzy",
+ "Liza",
+ "Lizeth",
+ "Lizzie",
+ "Llewellyn",
+ "Lloyd",
+ "Logan",
+ "Lois",
+ "Lola",
+ "Lolita",
+ "Loma",
+ "Lon",
+ "London",
+ "Lonie",
+ "Lonnie",
+ "Lonny",
+ "Lonzo",
+ "Lora",
+ "Loraine",
+ "Loren",
+ "Lorena",
+ "Lorenz",
+ "Lorenza",
+ "Lorenzo",
+ "Lori",
+ "Lorine",
+ "Lorna",
+ "Lottie",
+ "Lou",
+ "Louie",
+ "Louisa",
+ "Lourdes",
+ "Louvenia",
+ "Lowell",
+ "Loy",
+ "Loyal",
+ "Loyce",
+ "Lucas",
+ "Luciano",
+ "Lucie",
+ "Lucienne",
+ "Lucile",
+ "Lucinda",
+ "Lucio",
+ "Lucious",
+ "Lucius",
+ "Lucy",
+ "Ludie",
+ "Ludwig",
+ "Lue",
+ "Luella",
+ "Luigi",
+ "Luis",
+ "Luisa",
+ "Lukas",
+ "Lula",
+ "Lulu",
+ "Luna",
+ "Lupe",
+ "Lura",
+ "Lurline",
+ "Luther",
+ "Luz",
+ "Lyda",
+ "Lydia",
+ "Lyla",
+ "Lynn",
+ "Lyric",
+ "Lysanne",
+ "Mabel",
+ "Mabelle",
+ "Mable",
+ "Mac",
+ "Macey",
+ "Maci",
+ "Macie",
+ "Mack",
+ "Mackenzie",
+ "Macy",
+ "Madaline",
+ "Madalyn",
+ "Maddison",
+ "Madeline",
+ "Madelyn",
+ "Madelynn",
+ "Madge",
+ "Madie",
+ "Madilyn",
+ "Madisen",
+ "Madison",
+ "Madisyn",
+ "Madonna",
+ "Madyson",
+ "Mae",
+ "Maegan",
+ "Maeve",
+ "Mafalda",
+ "Magali",
+ "Magdalen",
+ "Magdalena",
+ "Maggie",
+ "Magnolia",
+ "Magnus",
+ "Maia",
+ "Maida",
+ "Maiya",
+ "Major",
+ "Makayla",
+ "Makenna",
+ "Makenzie",
+ "Malachi",
+ "Malcolm",
+ "Malika",
+ "Malinda",
+ "Mallie",
+ "Mallory",
+ "Malvina",
+ "Mandy",
+ "Manley",
+ "Manuel",
+ "Manuela",
+ "Mara",
+ "Marc",
+ "Marcel",
+ "Marcelina",
+ "Marcelino",
+ "Marcella",
+ "Marcelle",
+ "Marcellus",
+ "Marcelo",
+ "Marcia",
+ "Marco",
+ "Marcos",
+ "Marcus",
+ "Margaret",
+ "Margarete",
+ "Margarett",
+ "Margaretta",
+ "Margarette",
+ "Margarita",
+ "Marge",
+ "Margie",
+ "Margot",
+ "Margret",
+ "Marguerite",
+ "Maria",
+ "Mariah",
+ "Mariam",
+ "Marian",
+ "Mariana",
+ "Mariane",
+ "Marianna",
+ "Marianne",
+ "Mariano",
+ "Maribel",
+ "Marie",
+ "Mariela",
+ "Marielle",
+ "Marietta",
+ "Marilie",
+ "Marilou",
+ "Marilyne",
+ "Marina",
+ "Mario",
+ "Marion",
+ "Marisa",
+ "Marisol",
+ "Maritza",
+ "Marjolaine",
+ "Marjorie",
+ "Marjory",
+ "Mark",
+ "Markus",
+ "Marlee",
+ "Marlen",
+ "Marlene",
+ "Marley",
+ "Marlin",
+ "Marlon",
+ "Marques",
+ "Marquis",
+ "Marquise",
+ "Marshall",
+ "Marta",
+ "Martin",
+ "Martina",
+ "Martine",
+ "Marty",
+ "Marvin",
+ "Mary",
+ "Maryam",
+ "Maryjane",
+ "Maryse",
+ "Mason",
+ "Mateo",
+ "Mathew",
+ "Mathias",
+ "Mathilde",
+ "Matilda",
+ "Matilde",
+ "Matt",
+ "Matteo",
+ "Mattie",
+ "Maud",
+ "Maude",
+ "Maudie",
+ "Maureen",
+ "Maurice",
+ "Mauricio",
+ "Maurine",
+ "Maverick",
+ "Mavis",
+ "Max",
+ "Maxie",
+ "Maxime",
+ "Maximilian",
+ "Maximillia",
+ "Maximillian",
+ "Maximo",
+ "Maximus",
+ "Maxine",
+ "Maxwell",
+ "May",
+ "Maya",
+ "Maybell",
+ "Maybelle",
+ "Maye",
+ "Maymie",
+ "Maynard",
+ "Mayra",
+ "Mazie",
+ "Mckayla",
+ "Mckenna",
+ "Mckenzie",
+ "Meagan",
+ "Meaghan",
+ "Meda",
+ "Megane",
+ "Meggie",
+ "Meghan",
+ "Mekhi",
+ "Melany",
+ "Melba",
+ "Melisa",
+ "Melissa",
+ "Mellie",
+ "Melody",
+ "Melvin",
+ "Melvina",
+ "Melyna",
+ "Melyssa",
+ "Mercedes",
+ "Meredith",
+ "Merl",
+ "Merle",
+ "Merlin",
+ "Merritt",
+ "Mertie",
+ "Mervin",
+ "Meta",
+ "Mia",
+ "Micaela",
+ "Micah",
+ "Michael",
+ "Michaela",
+ "Michale",
+ "Micheal",
+ "Michel",
+ "Michele",
+ "Michelle",
+ "Miguel",
+ "Mikayla",
+ "Mike",
+ "Mikel",
+ "Milan",
+ "Miles",
+ "Milford",
+ "Miller",
+ "Millie",
+ "Milo",
+ "Milton",
+ "Mina",
+ "Minerva",
+ "Minnie",
+ "Miracle",
+ "Mireille",
+ "Mireya",
+ "Misael",
+ "Missouri",
+ "Misty",
+ "Mitchel",
+ "Mitchell",
+ "Mittie",
+ "Modesta",
+ "Modesto",
+ "Mohamed",
+ "Mohammad",
+ "Mohammed",
+ "Moises",
+ "Mollie",
+ "Molly",
+ "Mona",
+ "Monica",
+ "Monique",
+ "Monroe",
+ "Monserrat",
+ "Monserrate",
+ "Montana",
+ "Monte",
+ "Monty",
+ "Morgan",
+ "Moriah",
+ "Morris",
+ "Mortimer",
+ "Morton",
+ "Mose",
+ "Moses",
+ "Moshe",
+ "Mossie",
+ "Mozell",
+ "Mozelle",
+ "Muhammad",
+ "Muriel",
+ "Murl",
+ "Murphy",
+ "Murray",
+ "Mustafa",
+ "Mya",
+ "Myah",
+ "Mylene",
+ "Myles",
+ "Myra",
+ "Myriam",
+ "Myrl",
+ "Myrna",
+ "Myron",
+ "Myrtice",
+ "Myrtie",
+ "Myrtis",
+ "Myrtle",
+ "Nadia",
+ "Nakia",
+ "Name",
+ "Nannie",
+ "Naomi",
+ "Naomie",
+ "Napoleon",
+ "Narciso",
+ "Nash",
+ "Nasir",
+ "Nat",
+ "Natalia",
+ "Natalie",
+ "Natasha",
+ "Nathan",
+ "Nathanael",
+ "Nathanial",
+ "Nathaniel",
+ "Nathen",
+ "Nayeli",
+ "Neal",
+ "Ned",
+ "Nedra",
+ "Neha",
+ "Neil",
+ "Nelda",
+ "Nella",
+ "Nelle",
+ "Nellie",
+ "Nels",
+ "Nelson",
+ "Neoma",
+ "Nestor",
+ "Nettie",
+ "Neva",
+ "Newell",
+ "Newton",
+ "Nia",
+ "Nicholas",
+ "Nicholaus",
+ "Nichole",
+ "Nick",
+ "Nicklaus",
+ "Nickolas",
+ "Nico",
+ "Nicola",
+ "Nicolas",
+ "Nicole",
+ "Nicolette",
+ "Nigel",
+ "Nikita",
+ "Nikki",
+ "Nikko",
+ "Niko",
+ "Nikolas",
+ "Nils",
+ "Nina",
+ "Noah",
+ "Noble",
+ "Noe",
+ "Noel",
+ "Noelia",
+ "Noemi",
+ "Noemie",
+ "Noemy",
+ "Nola",
+ "Nolan",
+ "Nona",
+ "Nora",
+ "Norbert",
+ "Norberto",
+ "Norene",
+ "Norma",
+ "Norris",
+ "Norval",
+ "Norwood",
+ "Nova",
+ "Novella",
+ "Nya",
+ "Nyah",
+ "Nyasia",
+ "Obie",
+ "Oceane",
+ "Ocie",
+ "Octavia",
+ "Oda",
+ "Odell",
+ "Odessa",
+ "Odie",
+ "Ofelia",
+ "Okey",
+ "Ola",
+ "Olaf",
+ "Ole",
+ "Olen",
+ "Oleta",
+ "Olga",
+ "Olin",
+ "Oliver",
+ "Ollie",
+ "Oma",
+ "Omari",
+ "Omer",
+ "Ona",
+ "Onie",
+ "Opal",
+ "Ophelia",
+ "Ora",
+ "Oral",
+ "Oran",
+ "Oren",
+ "Orie",
+ "Orin",
+ "Orion",
+ "Orland",
+ "Orlando",
+ "Orlo",
+ "Orpha",
+ "Orrin",
+ "Orval",
+ "Orville",
+ "Osbaldo",
+ "Osborne",
+ "Oscar",
+ "Osvaldo",
+ "Oswald",
+ "Oswaldo",
+ "Otha",
+ "Otho",
+ "Otilia",
+ "Otis",
+ "Ottilie",
+ "Ottis",
+ "Otto",
+ "Ova",
+ "Owen",
+ "Ozella",
+ "Pablo",
+ "Paige",
+ "Palma",
+ "Pamela",
+ "Pansy",
+ "Paolo",
+ "Paris",
+ "Parker",
+ "Pascale",
+ "Pasquale",
+ "Pat",
+ "Patience",
+ "Patricia",
+ "Patrick",
+ "Patsy",
+ "Pattie",
+ "Paul",
+ "Paula",
+ "Pauline",
+ "Paxton",
+ "Payton",
+ "Pearl",
+ "Pearlie",
+ "Pearline",
+ "Pedro",
+ "Peggie",
+ "Penelope",
+ "Percival",
+ "Percy",
+ "Perry",
+ "Pete",
+ "Peter",
+ "Petra",
+ "Peyton",
+ "Philip",
+ "Phoebe",
+ "Phyllis",
+ "Pierce",
+ "Pierre",
+ "Pietro",
+ "Pink",
+ "Pinkie",
+ "Piper",
+ "Polly",
+ "Porter",
+ "Precious",
+ "Presley",
+ "Preston",
+ "Price",
+ "Prince",
+ "Princess",
+ "Priscilla",
+ "Providenci",
+ "Prudence",
+ "Queen",
+ "Queenie",
+ "Quentin",
+ "Quincy",
+ "Quinn",
+ "Quinten",
+ "Quinton",
+ "Rachael",
+ "Rachel",
+ "Rachelle",
+ "Rae",
+ "Raegan",
+ "Rafael",
+ "Rafaela",
+ "Raheem",
+ "Rahsaan",
+ "Rahul",
+ "Raina",
+ "Raleigh",
+ "Ralph",
+ "Ramiro",
+ "Ramon",
+ "Ramona",
+ "Randal",
+ "Randall",
+ "Randi",
+ "Randy",
+ "Ransom",
+ "Raoul",
+ "Raphael",
+ "Raphaelle",
+ "Raquel",
+ "Rashad",
+ "Rashawn",
+ "Rasheed",
+ "Raul",
+ "Raven",
+ "Ray",
+ "Raymond",
+ "Raymundo",
+ "Reagan",
+ "Reanna",
+ "Reba",
+ "Rebeca",
+ "Rebecca",
+ "Rebeka",
+ "Rebekah",
+ "Reece",
+ "Reed",
+ "Reese",
+ "Regan",
+ "Reggie",
+ "Reginald",
+ "Reid",
+ "Reilly",
+ "Reina",
+ "Reinhold",
+ "Remington",
+ "Rene",
+ "Renee",
+ "Ressie",
+ "Reta",
+ "Retha",
+ "Retta",
+ "Reuben",
+ "Reva",
+ "Rex",
+ "Rey",
+ "Reyes",
+ "Reymundo",
+ "Reyna",
+ "Reynold",
+ "Rhea",
+ "Rhett",
+ "Rhianna",
+ "Rhiannon",
+ "Rhoda",
+ "Ricardo",
+ "Richard",
+ "Richie",
+ "Richmond",
+ "Rick",
+ "Rickey",
+ "Rickie",
+ "Ricky",
+ "Rico",
+ "Rigoberto",
+ "Riley",
+ "Rita",
+ "River",
+ "Robb",
+ "Robbie",
+ "Robert",
+ "Roberta",
+ "Roberto",
+ "Robin",
+ "Robyn",
+ "Rocio",
+ "Rocky",
+ "Rod",
+ "Roderick",
+ "Rodger",
+ "Rodolfo",
+ "Rodrick",
+ "Rodrigo",
+ "Roel",
+ "Rogelio",
+ "Roger",
+ "Rogers",
+ "Rolando",
+ "Rollin",
+ "Roma",
+ "Romaine",
+ "Roman",
+ "Ron",
+ "Ronaldo",
+ "Ronny",
+ "Roosevelt",
+ "Rory",
+ "Rosa",
+ "Rosalee",
+ "Rosalia",
+ "Rosalind",
+ "Rosalinda",
+ "Rosalyn",
+ "Rosamond",
+ "Rosanna",
+ "Rosario",
+ "Roscoe",
+ "Rose",
+ "Rosella",
+ "Roselyn",
+ "Rosemarie",
+ "Rosemary",
+ "Rosendo",
+ "Rosetta",
+ "Rosie",
+ "Rosina",
+ "Roslyn",
+ "Ross",
+ "Rossie",
+ "Rowan",
+ "Rowena",
+ "Rowland",
+ "Roxane",
+ "Roxanne",
+ "Roy",
+ "Royal",
+ "Royce",
+ "Rozella",
+ "Ruben",
+ "Rubie",
+ "Ruby",
+ "Rubye",
+ "Rudolph",
+ "Rudy",
+ "Rupert",
+ "Russ",
+ "Russel",
+ "Russell",
+ "Rusty",
+ "Ruth",
+ "Ruthe",
+ "Ruthie",
+ "Ryan",
+ "Ryann",
+ "Ryder",
+ "Rylan",
+ "Rylee",
+ "Ryleigh",
+ "Ryley",
+ "Sabina",
+ "Sabrina",
+ "Sabryna",
+ "Sadie",
+ "Sadye",
+ "Sage",
+ "Saige",
+ "Sallie",
+ "Sally",
+ "Salma",
+ "Salvador",
+ "Salvatore",
+ "Sam",
+ "Samanta",
+ "Samantha",
+ "Samara",
+ "Samir",
+ "Sammie",
+ "Sammy",
+ "Samson",
+ "Sandra",
+ "Sandrine",
+ "Sandy",
+ "Sanford",
+ "Santa",
+ "Santiago",
+ "Santina",
+ "Santino",
+ "Santos",
+ "Sarah",
+ "Sarai",
+ "Sarina",
+ "Sasha",
+ "Saul",
+ "Savanah",
+ "Savanna",
+ "Savannah",
+ "Savion",
+ "Scarlett",
+ "Schuyler",
+ "Scot",
+ "Scottie",
+ "Scotty",
+ "Seamus",
+ "Sean",
+ "Sebastian",
+ "Sedrick",
+ "Selena",
+ "Selina",
+ "Selmer",
+ "Serena",
+ "Serenity",
+ "Seth",
+ "Shad",
+ "Shaina",
+ "Shakira",
+ "Shana",
+ "Shane",
+ "Shanel",
+ "Shanelle",
+ "Shania",
+ "Shanie",
+ "Shaniya",
+ "Shanna",
+ "Shannon",
+ "Shanny",
+ "Shanon",
+ "Shany",
+ "Sharon",
+ "Shaun",
+ "Shawn",
+ "Shawna",
+ "Shaylee",
+ "Shayna",
+ "Shayne",
+ "Shea",
+ "Sheila",
+ "Sheldon",
+ "Shemar",
+ "Sheridan",
+ "Sherman",
+ "Sherwood",
+ "Shirley",
+ "Shyann",
+ "Shyanne",
+ "Sibyl",
+ "Sid",
+ "Sidney",
+ "Sienna",
+ "Sierra",
+ "Sigmund",
+ "Sigrid",
+ "Sigurd",
+ "Silas",
+ "Sim",
+ "Simeon",
+ "Simone",
+ "Sincere",
+ "Sister",
+ "Skye",
+ "Skyla",
+ "Skylar",
+ "Sofia",
+ "Soledad",
+ "Solon",
+ "Sonia",
+ "Sonny",
+ "Sonya",
+ "Sophia",
+ "Sophie",
+ "Spencer",
+ "Stacey",
+ "Stacy",
+ "Stan",
+ "Stanford",
+ "Stanley",
+ "Stanton",
+ "Stefan",
+ "Stefanie",
+ "Stella",
+ "Stephan",
+ "Stephania",
+ "Stephanie",
+ "Stephany",
+ "Stephen",
+ "Stephon",
+ "Sterling",
+ "Steve",
+ "Stevie",
+ "Stewart",
+ "Stone",
+ "Stuart",
+ "Summer",
+ "Sunny",
+ "Susan",
+ "Susana",
+ "Susanna",
+ "Susie",
+ "Suzanne",
+ "Sven",
+ "Syble",
+ "Sydnee",
+ "Sydney",
+ "Sydni",
+ "Sydnie",
+ "Sylvan",
+ "Sylvester",
+ "Sylvia",
+ "Tabitha",
+ "Tad",
+ "Talia",
+ "Talon",
+ "Tamara",
+ "Tamia",
+ "Tania",
+ "Tanner",
+ "Tanya",
+ "Tara",
+ "Taryn",
+ "Tate",
+ "Tatum",
+ "Tatyana",
+ "Taurean",
+ "Tavares",
+ "Taya",
+ "Taylor",
+ "Teagan",
+ "Ted",
+ "Telly",
+ "Terence",
+ "Teresa",
+ "Terrance",
+ "Terrell",
+ "Terrence",
+ "Terrill",
+ "Terry",
+ "Tess",
+ "Tessie",
+ "Tevin",
+ "Thad",
+ "Thaddeus",
+ "Thalia",
+ "Thea",
+ "Thelma",
+ "Theo",
+ "Theodora",
+ "Theodore",
+ "Theresa",
+ "Therese",
+ "Theresia",
+ "Theron",
+ "Thomas",
+ "Thora",
+ "Thurman",
+ "Tia",
+ "Tiana",
+ "Tianna",
+ "Tiara",
+ "Tierra",
+ "Tiffany",
+ "Tillman",
+ "Timmothy",
+ "Timmy",
+ "Timothy",
+ "Tina",
+ "Tito",
+ "Titus",
+ "Tobin",
+ "Toby",
+ "Tod",
+ "Tom",
+ "Tomas",
+ "Tomasa",
+ "Tommie",
+ "Toney",
+ "Toni",
+ "Tony",
+ "Torey",
+ "Torrance",
+ "Torrey",
+ "Toy",
+ "Trace",
+ "Tracey",
+ "Tracy",
+ "Travis",
+ "Travon",
+ "Tre",
+ "Tremaine",
+ "Tremayne",
+ "Trent",
+ "Trenton",
+ "Tressa",
+ "Tressie",
+ "Treva",
+ "Trever",
+ "Trevion",
+ "Trevor",
+ "Trey",
+ "Trinity",
+ "Trisha",
+ "Tristian",
+ "Tristin",
+ "Triston",
+ "Troy",
+ "Trudie",
+ "Trycia",
+ "Trystan",
+ "Turner",
+ "Twila",
+ "Tyler",
+ "Tyra",
+ "Tyree",
+ "Tyreek",
+ "Tyrel",
+ "Tyrell",
+ "Tyrese",
+ "Tyrique",
+ "Tyshawn",
+ "Tyson",
+ "Ubaldo",
+ "Ulices",
+ "Ulises",
+ "Una",
+ "Unique",
+ "Urban",
+ "Uriah",
+ "Uriel",
+ "Ursula",
+ "Vada",
+ "Valentin",
+ "Valentina",
+ "Valentine",
+ "Valerie",
+ "Vallie",
+ "Van",
+ "Vance",
+ "Vanessa",
+ "Vaughn",
+ "Veda",
+ "Velda",
+ "Vella",
+ "Velma",
+ "Velva",
+ "Vena",
+ "Verda",
+ "Verdie",
+ "Vergie",
+ "Verla",
+ "Verlie",
+ "Vern",
+ "Verna",
+ "Verner",
+ "Vernice",
+ "Vernie",
+ "Vernon",
+ "Verona",
+ "Veronica",
+ "Vesta",
+ "Vicenta",
+ "Vicente",
+ "Vickie",
+ "Vicky",
+ "Victor",
+ "Victoria",
+ "Vida",
+ "Vidal",
+ "Vilma",
+ "Vince",
+ "Vincent",
+ "Vincenza",
+ "Vincenzo",
+ "Vinnie",
+ "Viola",
+ "Violet",
+ "Violette",
+ "Virgie",
+ "Virgil",
+ "Virginia",
+ "Virginie",
+ "Vita",
+ "Vito",
+ "Viva",
+ "Vivian",
+ "Viviane",
+ "Vivianne",
+ "Vivien",
+ "Vivienne",
+ "Vladimir",
+ "Wade",
+ "Waino",
+ "Waldo",
+ "Walker",
+ "Wallace",
+ "Walter",
+ "Walton",
+ "Wanda",
+ "Ward",
+ "Warren",
+ "Watson",
+ "Wava",
+ "Waylon",
+ "Wayne",
+ "Webster",
+ "Weldon",
+ "Wellington",
+ "Wendell",
+ "Wendy",
+ "Werner",
+ "Westley",
+ "Weston",
+ "Whitney",
+ "Wilber",
+ "Wilbert",
+ "Wilburn",
+ "Wiley",
+ "Wilford",
+ "Wilfred",
+ "Wilfredo",
+ "Wilfrid",
+ "Wilhelm",
+ "Wilhelmine",
+ "Will",
+ "Willa",
+ "Willard",
+ "William",
+ "Willie",
+ "Willis",
+ "Willow",
+ "Willy",
+ "Wilma",
+ "Wilmer",
+ "Wilson",
+ "Wilton",
+ "Winfield",
+ "Winifred",
+ "Winnifred",
+ "Winona",
+ "Winston",
+ "Woodrow",
+ "Wyatt",
+ "Wyman",
+ "Xander",
+ "Xavier",
+ "Xzavier",
+ "Yadira",
+ "Yasmeen",
+ "Yasmin",
+ "Yasmine",
+ "Yazmin",
+ "Yesenia",
+ "Yessenia",
+ "Yolanda",
+ "Yoshiko",
+ "Yvette",
+ "Yvonne",
+ "Zachariah",
+ "Zachary",
+ "Zachery",
+ "Zack",
+ "Zackary",
+ "Zackery",
+ "Zakary",
+ "Zander",
+ "Zane",
+ "Zaria",
+ "Zechariah",
+ "Zelda",
+ "Zella",
+ "Zelma",
+ "Zena",
+ "Zetta",
+ "Zion",
+ "Zita",
+ "Zoe",
+ "Zoey",
+ "Zoie",
+ "Zoila",
+ "Zola",
+ "Zora",
+ "Zula",
+ },
+ "name.title.job": []string{
+ "Administrator",
+ "Agent",
+ "Analyst",
+ "Architect",
+ "Assistant",
+ "Associate",
+ "Consultant",
+ "Coordinator",
+ "Designer",
+ "Developer",
+ "Director",
+ "Engineer",
+ "Executive",
+ "Facilitator",
+ "Liason",
+ "Manager",
+ "Officer",
+ "Orchestrator",
+ "Planner",
+ "Producer",
+ "Representative",
+ "Specialist",
+ "Strategist",
+ "Supervisor",
+ "Technician",
+ },
+ "name.prefix": []string{
+ "Mr.",
+ "Mrs.",
+ "Ms.",
+ "Miss",
+ "Dr.",
+ },
+ "company.bs.1": []string{
+ "24/365",
+ "24/7",
+ "B2B",
+ "B2C",
+ "back-end",
+ "best-of-breed",
+ "bleeding-edge",
+ "bricks-and-clicks",
+ "clicks-and-mortar",
+ "collaborative",
+ "compelling",
+ "cross-media",
+ "cross-platform",
+ "customized",
+ "cutting-edge",
+ "distributed",
+ "dot-com",
+ "dynamic",
+ "e-business",
+ "efficient",
+ "end-to-end",
+ "enterprise",
+ "extensible",
+ "frictionless",
+ "front-end",
+ "global",
+ "granular",
+ "holistic",
+ "impactful",
+ "innovative",
+ "integrated",
+ "interactive",
+ "intuitive",
+ "killer",
+ "leading-edge",
+ "magnetic",
+ "mission-critical",
+ "next-generation",
+ "one-to-one",
+ "open-source",
+ "out-of-the-box",
+ "plug-and-play",
+ "proactive",
+ "real-time",
+ "revolutionary",
+ "rich",
+ "robust",
+ "scalable",
+ "seamless",
+ "sexy",
+ "sticky",
+ "strategic",
+ "synergistic",
+ "transparent",
+ "turn-key",
+ "ubiquitous",
+ "user-centric",
+ "value-added",
+ "vertical",
+ "viral",
+ "virtual",
+ "visionary",
+ "web-enabled",
+ "wireless",
+ "world-class",
+ },
+ "company.bs.0": []string{
+ "aggregate",
+ "architect",
+ "benchmark",
+ "brand",
+ "cultivate",
+ "deliver",
+ "deploy",
+ "disintermediate",
+ "drive",
+ "e-enable",
+ "embrace",
+ "empower",
+ "enable",
+ "engage",
+ "engineer",
+ "enhance",
+ "envisioneer",
+ "evolve",
+ "expedite",
+ "exploit",
+ "extend",
+ "facilitate",
+ "generate",
+ "grow",
+ "harness",
+ "implement",
+ "incentivize",
+ "incubate",
+ "innovate",
+ "integrate",
+ "iterate",
+ "leverage",
+ "matrix",
+ "maximize",
+ "mesh",
+ "monetize",
+ "morph",
+ "optimize",
+ "orchestrate",
+ "productize",
+ "recontextualize",
+ "redefine",
+ "reintermediate",
+ "reinvent",
+ "repurpose",
+ "revolutionize",
+ "scale",
+ "seize",
+ "strategize",
+ "streamline",
+ "syndicate",
+ "synergize",
+ "synthesize",
+ "target",
+ "transform",
+ "transition",
+ "unleash",
+ "utilize",
+ "visualize",
+ "whiteboard",
+ },
+ "company.bs.2": []string{
+ "ROI",
+ "action-items",
+ "applications",
+ "architectures",
+ "bandwidth",
+ "channels",
+ "communities",
+ "content",
+ "convergence",
+ "deliverables",
+ "e-business",
+ "e-commerce",
+ "e-markets",
+ "e-services",
+ "e-tailers",
+ "experiences",
+ "eyeballs",
+ "functionalities",
+ "infomediaries",
+ "infrastructures",
+ "initiatives",
+ "interfaces",
+ "markets",
+ "methodologies",
+ "metrics",
+ "mindshare",
+ "models",
+ "networks",
+ "niches",
+ "paradigms",
+ "partnerships",
+ "platforms",
+ "portals",
+ "relationships",
+ "schemas",
+ "solutions",
+ "supply-chains",
+ "synergies",
+ "systems",
+ "technologies",
+ "users",
+ "vortals",
+ "web services",
+ "web-readiness",
+ },
+ "lorem.supplemental": []string{
+ "abbas",
+ "abduco",
+ "abeo",
+ "abscido",
+ "absconditus",
+ "absens",
+ "absorbeo",
+ "absque",
+ "abstergo",
+ "absum",
+ "abundans",
+ "abutor",
+ "accedo",
+ "accendo",
+ "acceptus",
+ "accipio",
+ "accommodo",
+ "accusator",
+ "acer",
+ "acerbitas",
+ "acervus",
+ "acidus",
+ "acies",
+ "acquiro",
+ "acsi",
+ "adamo",
+ "adaugeo",
+ "addo",
+ "adduco",
+ "ademptio",
+ "adeo",
+ "adeptio",
+ "adfectus",
+ "adfero",
+ "adficio",
+ "adflicto",
+ "adhaero",
+ "adhuc",
+ "adicio",
+ "adimpleo",
+ "adinventitias",
+ "adipiscor",
+ "adiuvo",
+ "administratio",
+ "admiratio",
+ "admitto",
+ "admoneo",
+ "admoveo",
+ "adnuo",
+ "adopto",
+ "adsidue",
+ "adstringo",
+ "adsuesco",
+ "adsum",
+ "adulatio",
+ "adulescens",
+ "adultus",
+ "aduro",
+ "advenio",
+ "adversus",
+ "advoco",
+ "aedificium",
+ "aeger",
+ "aegre",
+ "aegrotatio",
+ "aegrus",
+ "aeneus",
+ "aequitas",
+ "aequus",
+ "aer",
+ "aestas",
+ "aestivus",
+ "aestus",
+ "aetas",
+ "aeternus",
+ "ager",
+ "aggero",
+ "aggredior",
+ "agnitio",
+ "agnosco",
+ "ago",
+ "ait",
+ "aiunt",
+ "alienus",
+ "alii",
+ "alioqui",
+ "aliqua",
+ "alius",
+ "allatus",
+ "alo",
+ "alter",
+ "altus",
+ "alveus",
+ "amaritudo",
+ "ambitus",
+ "ambulo",
+ "amicitia",
+ "amiculum",
+ "amissio",
+ "amita",
+ "amitto",
+ "amo",
+ "amor",
+ "amoveo",
+ "amplexus",
+ "amplitudo",
+ "amplus",
+ "ancilla",
+ "angelus",
+ "angulus",
+ "angustus",
+ "animadverto",
+ "animi",
+ "animus",
+ "annus",
+ "anser",
+ "ante",
+ "antea",
+ "antepono",
+ "antiquus",
+ "aperio",
+ "aperte",
+ "apostolus",
+ "apparatus",
+ "appello",
+ "appono",
+ "appositus",
+ "approbo",
+ "apto",
+ "aptus",
+ "apud",
+ "aqua",
+ "ara",
+ "aranea",
+ "arbitro",
+ "arbor",
+ "arbustum",
+ "arca",
+ "arceo",
+ "arcesso",
+ "arcus",
+ "argentum",
+ "argumentum",
+ "arguo",
+ "arma",
+ "armarium",
+ "armo",
+ "aro",
+ "ars",
+ "articulus",
+ "artificiose",
+ "arto",
+ "arx",
+ "ascisco",
+ "ascit",
+ "asper",
+ "aspicio",
+ "asporto",
+ "assentator",
+ "astrum",
+ "atavus",
+ "ater",
+ "atqui",
+ "atrocitas",
+ "atrox",
+ "attero",
+ "attollo",
+ "attonbitus",
+ "auctor",
+ "auctus",
+ "audacia",
+ "audax",
+ "audentia",
+ "audeo",
+ "audio",
+ "auditor",
+ "aufero",
+ "aureus",
+ "auris",
+ "aurum",
+ "aut",
+ "autem",
+ "autus",
+ "auxilium",
+ "avaritia",
+ "avarus",
+ "aveho",
+ "averto",
+ "avoco",
+ "baiulus",
+ "balbus",
+ "barba",
+ "bardus",
+ "basium",
+ "beatus",
+ "bellicus",
+ "bellum",
+ "bene",
+ "beneficium",
+ "benevolentia",
+ "benigne",
+ "bestia",
+ "bibo",
+ "bis",
+ "blandior",
+ "bonus",
+ "bos",
+ "brevis",
+ "cado",
+ "caecus",
+ "caelestis",
+ "caelum",
+ "calamitas",
+ "calcar",
+ "calco",
+ "calculus",
+ "callide",
+ "campana",
+ "candidus",
+ "canis",
+ "canonicus",
+ "canto",
+ "capillus",
+ "capio",
+ "capitulus",
+ "capto",
+ "caput",
+ "carbo",
+ "carcer",
+ "careo",
+ "caries",
+ "cariosus",
+ "caritas",
+ "carmen",
+ "carpo",
+ "carus",
+ "casso",
+ "caste",
+ "casus",
+ "catena",
+ "caterva",
+ "cattus",
+ "cauda",
+ "causa",
+ "caute",
+ "caveo",
+ "cavus",
+ "cedo",
+ "celebrer",
+ "celer",
+ "celo",
+ "cena",
+ "cenaculum",
+ "ceno",
+ "censura",
+ "centum",
+ "cerno",
+ "cernuus",
+ "certe",
+ "certo",
+ "certus",
+ "cervus",
+ "cetera",
+ "charisma",
+ "chirographum",
+ "cibo",
+ "cibus",
+ "cicuta",
+ "cilicium",
+ "cimentarius",
+ "ciminatio",
+ "cinis",
+ "circumvenio",
+ "cito",
+ "civis",
+ "civitas",
+ "clam",
+ "clamo",
+ "claro",
+ "clarus",
+ "claudeo",
+ "claustrum",
+ "clementia",
+ "clibanus",
+ "coadunatio",
+ "coaegresco",
+ "coepi",
+ "coerceo",
+ "cogito",
+ "cognatus",
+ "cognomen",
+ "cogo",
+ "cohaero",
+ "cohibeo",
+ "cohors",
+ "colligo",
+ "colloco",
+ "collum",
+ "colo",
+ "color",
+ "coma",
+ "combibo",
+ "comburo",
+ "comedo",
+ "comes",
+ "cometes",
+ "comis",
+ "comitatus",
+ "commemoro",
+ "comminor",
+ "commodo",
+ "communis",
+ "comparo",
+ "compello",
+ "complectus",
+ "compono",
+ "comprehendo",
+ "comptus",
+ "conatus",
+ "concedo",
+ "concido",
+ "conculco",
+ "condico",
+ "conduco",
+ "confero",
+ "confido",
+ "conforto",
+ "confugo",
+ "congregatio",
+ "conicio",
+ "coniecto",
+ "conitor",
+ "coniuratio",
+ "conor",
+ "conqueror",
+ "conscendo",
+ "conservo",
+ "considero",
+ "conspergo",
+ "constans",
+ "consuasor",
+ "contabesco",
+ "contego",
+ "contigo",
+ "contra",
+ "conturbo",
+ "conventus",
+ "convoco",
+ "copia",
+ "copiose",
+ "cornu",
+ "corona",
+ "corpus",
+ "correptius",
+ "corrigo",
+ "corroboro",
+ "corrumpo",
+ "coruscus",
+ "cotidie",
+ "crapula",
+ "cras",
+ "crastinus",
+ "creator",
+ "creber",
+ "crebro",
+ "credo",
+ "creo",
+ "creptio",
+ "crepusculum",
+ "cresco",
+ "creta",
+ "cribro",
+ "crinis",
+ "cruciamentum",
+ "crudelis",
+ "cruentus",
+ "crur",
+ "crustulum",
+ "crux",
+ "cubicularis",
+ "cubitum",
+ "cubo",
+ "cui",
+ "cuius",
+ "culpa",
+ "culpo",
+ "cultellus",
+ "cultura",
+ "cum",
+ "cunabula",
+ "cunae",
+ "cunctatio",
+ "cupiditas",
+ "cupio",
+ "cuppedia",
+ "cupressus",
+ "cur",
+ "cura",
+ "curatio",
+ "curia",
+ "curiositas",
+ "curis",
+ "curo",
+ "curriculum",
+ "currus",
+ "cursim",
+ "curso",
+ "cursus",
+ "curto",
+ "curtus",
+ "curvo",
+ "curvus",
+ "custodia",
+ "damnatio",
+ "damno",
+ "dapifer",
+ "debeo",
+ "debilito",
+ "decens",
+ "decerno",
+ "decet",
+ "decimus",
+ "decipio",
+ "decor",
+ "decretum",
+ "decumbo",
+ "dedecor",
+ "dedico",
+ "deduco",
+ "defaeco",
+ "defendo",
+ "defero",
+ "defessus",
+ "defetiscor",
+ "deficio",
+ "defigo",
+ "defleo",
+ "defluo",
+ "defungo",
+ "degenero",
+ "degero",
+ "degusto",
+ "deinde",
+ "delectatio",
+ "delego",
+ "deleo",
+ "delibero",
+ "delicate",
+ "delinquo",
+ "deludo",
+ "demens",
+ "demergo",
+ "demitto",
+ "demo",
+ "demonstro",
+ "demoror",
+ "demulceo",
+ "demum",
+ "denego",
+ "denique",
+ "dens",
+ "denuncio",
+ "denuo",
+ "deorsum",
+ "depereo",
+ "depono",
+ "depopulo",
+ "deporto",
+ "depraedor",
+ "deprecator",
+ "deprimo",
+ "depromo",
+ "depulso",
+ "deputo",
+ "derelinquo",
+ "derideo",
+ "deripio",
+ "desidero",
+ "desino",
+ "desipio",
+ "desolo",
+ "desparatus",
+ "despecto",
+ "despirmatio",
+ "infit",
+ "inflammatio",
+ "paens",
+ "patior",
+ "patria",
+ "patrocinor",
+ "patruus",
+ "pauci",
+ "paulatim",
+ "pauper",
+ "pax",
+ "peccatus",
+ "pecco",
+ "pecto",
+ "pectus",
+ "pecunia",
+ "pecus",
+ "peior",
+ "pel",
+ "ocer",
+ "socius",
+ "sodalitas",
+ "sol",
+ "soleo",
+ "solio",
+ "solitudo",
+ "solium",
+ "sollers",
+ "sollicito",
+ "solum",
+ "solus",
+ "solutio",
+ "solvo",
+ "somniculosus",
+ "somnus",
+ "sonitus",
+ "sono",
+ "sophismata",
+ "sopor",
+ "sordeo",
+ "sortitus",
+ "spargo",
+ "speciosus",
+ "spectaculum",
+ "speculum",
+ "sperno",
+ "spero",
+ "spes",
+ "spiculum",
+ "spiritus",
+ "spoliatio",
+ "sponte",
+ "stabilis",
+ "statim",
+ "statua",
+ "stella",
+ "stillicidium",
+ "stipes",
+ "stips",
+ "sto",
+ "strenuus",
+ "strues",
+ "studio",
+ "stultus",
+ "suadeo",
+ "suasoria",
+ "sub",
+ "subito",
+ "subiungo",
+ "sublime",
+ "subnecto",
+ "subseco",
+ "substantia",
+ "subvenio",
+ "succedo",
+ "succurro",
+ "sufficio",
+ "suffoco",
+ "suffragium",
+ "suggero",
+ "sui",
+ "sulum",
+ "sum",
+ "summa",
+ "summisse",
+ "summopere",
+ "sumo",
+ "sumptus",
+ "supellex",
+ "super",
+ "suppellex",
+ "supplanto",
+ "suppono",
+ "supra",
+ "surculus",
+ "surgo",
+ "sursum",
+ "suscipio",
+ "suspendo",
+ "sustineo",
+ "suus",
+ "synagoga",
+ "tabella",
+ "tabernus",
+ "tabesco",
+ "tabgo",
+ "tabula",
+ "taceo",
+ "tactus",
+ "taedium",
+ "talio",
+ "talis",
+ "talus",
+ "tam",
+ "tamdiu",
+ "tamen",
+ "tametsi",
+ "tamisium",
+ "tamquam",
+ "tandem",
+ "tantillus",
+ "tantum",
+ "tardus",
+ "tego",
+ "temeritas",
+ "temperantia",
+ "templum",
+ "temptatio",
+ "tempus",
+ "tenax",
+ "tendo",
+ "teneo",
+ "tener",
+ "tenuis",
+ "tenus",
+ "tepesco",
+ "tepidus",
+ "ter",
+ "terebro",
+ "teres",
+ "terga",
+ "tergeo",
+ "tergiversatio",
+ "tergo",
+ "tergum",
+ "termes",
+ "terminatio",
+ "tero",
+ "terra",
+ "terreo",
+ "territo",
+ "terror",
+ "tersus",
+ "tertius",
+ "testimonium",
+ "texo",
+ "textilis",
+ "textor",
+ "textus",
+ "thalassinus",
+ "theatrum",
+ "theca",
+ "thema",
+ "theologus",
+ "thermae",
+ "thesaurus",
+ "thesis",
+ "thorax",
+ "thymbra",
+ "thymum",
+ "tibi",
+ "timidus",
+ "timor",
+ "titulus",
+ "tolero",
+ "tollo",
+ "tondeo",
+ "tonsor",
+ "torqueo",
+ "torrens",
+ "tot",
+ "totidem",
+ "toties",
+ "totus",
+ "tracto",
+ "trado",
+ "traho",
+ "trans",
+ "tredecim",
+ "tremo",
+ "trepide",
+ "tres",
+ "tribuo",
+ "tricesimus",
+ "triduana",
+ "triginta",
+ "tripudio",
+ "tristis",
+ "triumphus",
+ "trucido",
+ "truculenter",
+ "tubineus",
+ "tui",
+ "tum",
+ "tumultus",
+ "tunc",
+ "turba",
+ "turbo",
+ "turpe",
+ "turpis",
+ "tutamen",
+ "tutis",
+ "tyrannus",
+ "uberrime",
+ "ubi",
+ "ulciscor",
+ "ullus",
+ "ulterius",
+ "ultio",
+ "ultra",
+ "umbra",
+ "umerus",
+ "umquam",
+ "una",
+ "unde",
+ "undique",
+ "universe",
+ "unus",
+ "urbanus",
+ "urbs",
+ "uredo",
+ "usitas",
+ "usque",
+ "ustilo",
+ "ustulo",
+ "usus",
+ "uter",
+ "uterque",
+ "utilis",
+ "utique",
+ "utor",
+ "utpote",
+ "utrimque",
+ "utroque",
+ "utrum",
+ "uxor",
+ "vaco",
+ "vacuus",
+ "vado",
+ "vae",
+ "valde",
+ "valens",
+ "valeo",
+ "valetudo",
+ "validus",
+ "vallum",
+ "vapulus",
+ "varietas",
+ "varius",
+ "vehemens",
+ "vel",
+ "velociter",
+ "velum",
+ "velut",
+ "venia",
+ "venio",
+ "ventito",
+ "ventosus",
+ "ventus",
+ "venustas",
+ "ver",
+ "verbera",
+ "verbum",
+ "vere",
+ "verecundia",
+ "vereor",
+ "vergo",
+ "veritas",
+ "vero",
+ "versus",
+ "verto",
+ "verumtamen",
+ "verus",
+ "vesco",
+ "vesica",
+ "vesper",
+ "vespillo",
+ "vester",
+ "vestigium",
+ "vestrum",
+ "vetus",
+ "via",
+ "vicinus",
+ "vicissitudo",
+ "victoria",
+ "victus",
+ "videlicet",
+ "video",
+ "viduata",
+ "viduo",
+ "vigilo",
+ "vigor",
+ "vilicus",
+ "vilis",
+ "vilitas",
+ "villa",
+ "vinco",
+ "vinculum",
+ "vindico",
+ "vinitor",
+ "vinum",
+ "vir",
+ "virga",
+ "virgo",
+ "viridis",
+ "viriliter",
+ "virtus",
+ "vis",
+ "viscus",
+ "vita",
+ "vitiosus",
+ "vitium",
+ "vito",
+ "vivo",
+ "vix",
+ "vobis",
+ "vociferor",
+ "voco",
+ "volaticus",
+ "volo",
+ "volubilis",
+ "voluntarius",
+ "volup",
+ "volutabrum",
+ "volva",
+ "vomer",
+ "vomica",
+ "vomito",
+ "vorago",
+ "vorax",
+ "voro",
+ "vos",
+ "votum",
+ "voveo",
+ "vox",
+ "vulariter",
+ "vulgaris",
+ "vulgivagus",
+ "vulgo",
+ "vulgus",
+ "vulnero",
+ "vulnus",
+ "vulpes",
+ "vulticulus",
+ "vultuosus",
+ "xiphias",
+ },
+ "internet.free_email": []string{
+ "gmail.com",
+ "yahoo.com",
+ "hotmail.com",
+ },
+ "lorem.words": []string{
+ "alias",
+ "consequatur",
+ "aut",
+ "perferendis",
+ "sit",
+ "voluptatem",
+ "accusantium",
+ "doloremque",
+ "aperiam",
+ "eaque",
+ "ipsa",
+ "quae",
+ "ab",
+ "illo",
+ "inventore",
+ "veritatis",
+ "et",
+ "quasi",
+ "architecto",
+ "beatae",
+ "vitae",
+ "dicta",
+ "sunt",
+ "explicabo",
+ "aspernatur",
+ "aut",
+ "odit",
+ "aut",
+ "fugit",
+ "sed",
+ "quia",
+ "consequuntur",
+ "magni",
+ "dolores",
+ "eos",
+ "qui",
+ "ratione",
+ "voluptatem",
+ "sequi",
+ "nesciunt",
+ "neque",
+ "dolorem",
+ "ipsum",
+ "quia",
+ "dolor",
+ "sit",
+ "amet",
+ "consectetur",
+ "adipisci",
+ "velit",
+ "sed",
+ "quia",
+ "non",
+ "numquam",
+ "eius",
+ "modi",
+ "tempora",
+ "incidunt",
+ "ut",
+ "labore",
+ "et",
+ "dolore",
+ "magnam",
+ "aliquam",
+ "quaerat",
+ "voluptatem",
+ "ut",
+ "enim",
+ "ad",
+ "minima",
+ "veniam",
+ "quis",
+ "nostrum",
+ "exercitationem",
+ "ullam",
+ "corporis",
+ "nemo",
+ "enim",
+ "ipsam",
+ "voluptatem",
+ "quia",
+ "voluptas",
+ "sit",
+ "suscipit",
+ "laboriosam",
+ "nisi",
+ "ut",
+ "aliquid",
+ "ex",
+ "ea",
+ "commodi",
+ "consequatur",
+ "quis",
+ "autem",
+ "vel",
+ "eum",
+ "iure",
+ "reprehenderit",
+ "qui",
+ "in",
+ "ea",
+ "voluptate",
+ "velit",
+ "esse",
+ "quam",
+ "nihil",
+ "molestiae",
+ "et",
+ "iusto",
+ "odio",
+ "dignissimos",
+ "ducimus",
+ "qui",
+ "blanditiis",
+ "praesentium",
+ "laudantium",
+ "totam",
+ "rem",
+ "voluptatum",
+ "deleniti",
+ "atque",
+ "corrupti",
+ "quos",
+ "dolores",
+ "et",
+ "quas",
+ "molestias",
+ "excepturi",
+ "sint",
+ "occaecati",
+ "cupiditate",
+ "non",
+ "provident",
+ "sed",
+ "ut",
+ "perspiciatis",
+ "unde",
+ "omnis",
+ "iste",
+ "natus",
+ "error",
+ "similique",
+ "sunt",
+ "in",
+ "culpa",
+ "qui",
+ "officia",
+ "deserunt",
+ "mollitia",
+ "animi",
+ "id",
+ "est",
+ "laborum",
+ "et",
+ "dolorum",
+ "fuga",
+ "et",
+ "harum",
+ "quidem",
+ "rerum",
+ "facilis",
+ "est",
+ "et",
+ "expedita",
+ "distinctio",
+ "nam",
+ "libero",
+ "tempore",
+ "cum",
+ "soluta",
+ "nobis",
+ "est",
+ "eligendi",
+ "optio",
+ "cumque",
+ "nihil",
+ "impedit",
+ "quo",
+ "porro",
+ "quisquam",
+ "est",
+ "qui",
+ "minus",
+ "id",
+ "quod",
+ "maxime",
+ "placeat",
+ "facere",
+ "possimus",
+ "omnis",
+ "voluptas",
+ "assumenda",
+ "est",
+ "omnis",
+ "dolor",
+ "repellendus",
+ "temporibus",
+ "autem",
+ "quibusdam",
+ "et",
+ "aut",
+ "consequatur",
+ "vel",
+ "illum",
+ "qui",
+ "dolorem",
+ "eum",
+ "fugiat",
+ "quo",
+ "voluptas",
+ "nulla",
+ "pariatur",
+ "at",
+ "vero",
+ "eos",
+ "et",
+ "accusamus",
+ "officiis",
+ "debitis",
+ "aut",
+ "rerum",
+ "necessitatibus",
+ "saepe",
+ "eveniet",
+ "ut",
+ "et",
+ "voluptates",
+ "repudiandae",
+ "sint",
+ "et",
+ "molestiae",
+ "non",
+ "recusandae",
+ "itaque",
+ "earum",
+ "rerum",
+ "hic",
+ "tenetur",
+ "a",
+ "sapiente",
+ "delectus",
+ "ut",
+ "aut",
+ "reiciendis",
+ "voluptatibus",
+ "maiores",
+ "doloribus",
+ "asperiores",
+ "repellat",
+ },
+ "name.name": []string{
+ "#{prefix} #{first_name} #{last_name}",
+ "#{first_name} #{last_name} #{suffix}",
+ "#{first_name} #{last_name}",
+ "#{first_name} #{last_name}",
+ "#{first_name} #{last_name}",
+ "#{first_name} #{last_name}",
+ },
+ "address.building_number": []string{
+ "#####",
+ "####",
+ "###",
+ },
+ "phone_number.formats": []string{
+ "###-###-####",
+ "(###)###-####",
+ "1-###-###-####",
+ "###.###.####",
+ "###-###-####",
+ "(###)###-####",
+ "1-###-###-####",
+ "###.###.####",
+ "###-###-#### x###",
+ "(###)###-#### x###",
+ "1-###-###-#### x###",
+ "###.###.#### x###",
+ "###-###-#### x####",
+ "(###)###-#### x####",
+ "1-###-###-#### x####",
+ "###.###.#### x####",
+ "###-###-#### x#####",
+ "(###)###-#### x#####",
+ "1-###-###-#### x#####",
+ "###.###.#### x#####",
+ },
+ "address.street_name": []string{
+ "#{Name.first_name} #{street_suffix}",
+ "#{Name.last_name} #{street_suffix}",
+ },
+ "company.buzzwords.2": []string{
+ "Graphic Interface",
+ "Graphical User Interface",
+ "ability",
+ "access",
+ "adapter",
+ "algorithm",
+ "alliance",
+ "analyzer",
+ "application",
+ "approach",
+ "architecture",
+ "archive",
+ "array",
+ "artificial intelligence",
+ "attitude",
+ "benchmark",
+ "budgetary management",
+ "capability",
+ "capacity",
+ "challenge",
+ "circuit",
+ "collaboration",
+ "complexity",
+ "concept",
+ "conglomeration",
+ "contingency",
+ "core",
+ "customer loyalty",
+ "data-warehouse",
+ "database",
+ "definition",
+ "emulation",
+ "encoding",
+ "encryption",
+ "extranet",
+ "firmware",
+ "flexibility",
+ "focus group",
+ "forecast",
+ "frame",
+ "framework",
+ "function",
+ "functionalities",
+ "groupware",
+ "hardware",
+ "help-desk",
+ "hierarchy",
+ "hub",
+ "implementation",
+ "info-mediaries",
+ "infrastructure",
+ "initiative",
+ "installation",
+ "instruction set",
+ "interface",
+ "internet solution",
+ "intranet",
+ "knowledge base",
+ "knowledge user",
+ "leverage",
+ "local area network",
+ "matrices",
+ "matrix",
+ "methodology",
+ "middleware",
+ "migration",
+ "model",
+ "moderator",
+ "monitoring",
+ "moratorium",
+ "neural-net",
+ "open architecture",
+ "open system",
+ "orchestration",
+ "paradigm",
+ "parallelism",
+ "policy",
+ "portal",
+ "pricing structure",
+ "process improvement",
+ "product",
+ "productivity",
+ "project",
+ "projection",
+ "protocol",
+ "secured line",
+ "service-desk",
+ "software",
+ "solution",
+ "standardization",
+ "strategy",
+ "structure",
+ "success",
+ "superstructure",
+ "support",
+ "synergy",
+ "system engine",
+ "task-force",
+ "throughput",
+ "time-frame",
+ "toolset",
+ "utilisation",
+ "website",
+ "workforce",
+ },
+ "company.buzzwords.0": []string{
+ "Adaptive",
+ "Advanced",
+ "Ameliorated",
+ "Assimilated",
+ "Automated",
+ "Balanced",
+ "Business-focused",
+ "Centralized",
+ "Cloned",
+ "Compatible",
+ "Configurable",
+ "Cross-group",
+ "Cross-platform",
+ "Customer-focused",
+ "Customizable",
+ "De-engineered",
+ "Decentralized",
+ "Devolved",
+ "Digitized",
+ "Distributed",
+ "Diverse",
+ "Down-sized",
+ "Enhanced",
+ "Enterprise-wide",
+ "Ergonomic",
+ "Exclusive",
+ "Expanded",
+ "Extended",
+ "Face to face",
+ "Focused",
+ "Front-line",
+ "Fully-configurable",
+ "Function-based",
+ "Fundamental",
+ "Future-proofed",
+ "Grass-roots",
+ "Horizontal",
+ "Implemented",
+ "Innovative",
+ "Integrated",
+ "Intuitive",
+ "Inverse",
+ "Managed",
+ "Mandatory",
+ "Monitored",
+ "Multi-channelled",
+ "Multi-lateral",
+ "Multi-layered",
+ "Multi-tiered",
+ "Networked",
+ "Object-based",
+ "Open-architected",
+ "Open-source",
+ "Operative",
+ "Optimized",
+ "Optional",
+ "Organic",
+ "Organized",
+ "Persevering",
+ "Persistent",
+ "Phased",
+ "Polarised",
+ "Pre-emptive",
+ "Proactive",
+ "Profit-focused",
+ "Profound",
+ "Programmable",
+ "Progressive",
+ "Public-key",
+ "Quality-focused",
+ "Re-contextualized",
+ "Re-engineered",
+ "Reactive",
+ "Realigned",
+ "Reduced",
+ "Reverse-engineered",
+ "Right-sized",
+ "Robust",
+ "Seamless",
+ "Secured",
+ "Self-enabling",
+ "Sharable",
+ "Stand-alone",
+ "Streamlined",
+ "Switchable",
+ "Synchronised",
+ "Synergistic",
+ "Synergized",
+ "Team-oriented",
+ "Total",
+ "Triple-buffered",
+ "Universal",
+ "Up-sized",
+ "Upgradable",
+ "User-centric",
+ "User-friendly",
+ "Versatile",
+ "Virtual",
+ "Vision-oriented",
+ "Visionary",
+ },
+ "company.buzzwords.1": []string{
+ "24 hour",
+ "24/7",
+ "3rd generation",
+ "4th generation",
+ "5th generation",
+ "6th generation",
+ "actuating",
+ "analyzing",
+ "assymetric",
+ "asynchronous",
+ "attitude-oriented",
+ "background",
+ "bandwidth-monitored",
+ "bi-directional",
+ "bifurcated",
+ "bottom-line",
+ "clear-thinking",
+ "client-driven",
+ "client-server",
+ "coherent",
+ "cohesive",
+ "composite",
+ "content-based",
+ "context-sensitive",
+ "contextually-based",
+ "dedicated",
+ "demand-driven",
+ "didactic",
+ "directional",
+ "discrete",
+ "disintermediate",
+ "dynamic",
+ "eco-centric",
+ "empowering",
+ "encompassing",
+ "even-keeled",
+ "executive",
+ "explicit",
+ "exuding",
+ "fault-tolerant",
+ "foreground",
+ "fresh-thinking",
+ "full-range",
+ "global",
+ "grid-enabled",
+ "heuristic",
+ "high-level",
+ "holistic",
+ "homogeneous",
+ "human-resource",
+ "hybrid",
+ "impactful",
+ "incremental",
+ "intangible",
+ "interactive",
+ "intermediate",
+ "leading edge",
+ "local",
+ "logistical",
+ "maximized",
+ "methodical",
+ "mission-critical",
+ "mobile",
+ "modular",
+ "motivating",
+ "multi-state",
+ "multi-tasking",
+ "multimedia",
+ "national",
+ "needs-based",
+ "neutral",
+ "next generation",
+ "non-volatile",
+ "object-oriented",
+ "optimal",
+ "optimizing",
+ "radical",
+ "real-time",
+ "reciprocal",
+ "regional",
+ "responsive",
+ "scalable",
+ "secondary",
+ "solution-oriented",
+ "stable",
+ "static",
+ "system-worthy",
+ "systematic",
+ "systemic",
+ "tangible",
+ "tertiary",
+ "transitional",
+ "uniform",
+ "upward-trending",
+ "user-facing",
+ "value-added",
+ "web-enabled",
+ "well-modulated",
+ "zero administration",
+ "zero defect",
+ "zero tolerance",
+ },
+ "address.street_suffix": []string{
+ "Alley",
+ "Avenue",
+ "Branch",
+ "Bridge",
+ "Brook",
+ "Brooks",
+ "Burg",
+ "Burgs",
+ "Bypass",
+ "Camp",
+ "Canyon",
+ "Cape",
+ "Causeway",
+ "Center",
+ "Centers",
+ "Circle",
+ "Circles",
+ "Cliff",
+ "Cliffs",
+ "Club",
+ "Common",
+ "Corner",
+ "Corners",
+ "Course",
+ "Court",
+ "Courts",
+ "Cove",
+ "Coves",
+ "Creek",
+ "Crescent",
+ "Crest",
+ "Crossing",
+ "Crossroad",
+ "Curve",
+ "Dale",
+ "Dam",
+ "Divide",
+ "Drive",
+ "Drive",
+ "Drives",
+ "Estate",
+ "Estates",
+ "Expressway",
+ "Extension",
+ "Extensions",
+ "Fall",
+ "Falls",
+ "Ferry",
+ "Field",
+ "Fields",
+ "Flat",
+ "Flats",
+ "Ford",
+ "Fords",
+ "Forest",
+ "Forge",
+ "Forges",
+ "Fork",
+ "Forks",
+ "Fort",
+ "Freeway",
+ "Garden",
+ "Gardens",
+ "Gateway",
+ "Glen",
+ "Glens",
+ "Green",
+ "Greens",
+ "Grove",
+ "Groves",
+ "Harbor",
+ "Harbors",
+ "Haven",
+ "Heights",
+ "Highway",
+ "Hill",
+ "Hills",
+ "Hollow",
+ "Inlet",
+ "Inlet",
+ "Island",
+ "Island",
+ "Islands",
+ "Islands",
+ "Isle",
+ "Isle",
+ "Junction",
+ "Junctions",
+ "Key",
+ "Keys",
+ "Knoll",
+ "Knolls",
+ "Lake",
+ "Lakes",
+ "Land",
+ "Landing",
+ "Lane",
+ "Light",
+ "Lights",
+ "Loaf",
+ "Lock",
+ "Locks",
+ "Locks",
+ "Lodge",
+ "Lodge",
+ "Loop",
+ "Mall",
+ "Manor",
+ "Manors",
+ "Meadow",
+ "Meadows",
+ "Mews",
+ "Mill",
+ "Mills",
+ "Mission",
+ "Mission",
+ "Motorway",
+ "Mount",
+ "Mountain",
+ "Mountain",
+ "Mountains",
+ "Mountains",
+ "Neck",
+ "Orchard",
+ "Oval",
+ "Overpass",
+ "Park",
+ "Parks",
+ "Parkway",
+ "Parkways",
+ "Pass",
+ "Passage",
+ "Path",
+ "Pike",
+ "Pine",
+ "Pines",
+ "Place",
+ "Plain",
+ "Plains",
+ "Plains",
+ "Plaza",
+ "Plaza",
+ "Point",
+ "Points",
+ "Port",
+ "Port",
+ "Ports",
+ "Ports",
+ "Prairie",
+ "Prairie",
+ "Radial",
+ "Ramp",
+ "Ranch",
+ "Rapid",
+ "Rapids",
+ "Rest",
+ "Ridge",
+ "Ridges",
+ "River",
+ "Road",
+ "Road",
+ "Roads",
+ "Roads",
+ "Route",
+ "Row",
+ "Rue",
+ "Run",
+ "Shoal",
+ "Shoals",
+ "Shore",
+ "Shores",
+ "Skyway",
+ "Spring",
+ "Springs",
+ "Springs",
+ "Spur",
+ "Spurs",
+ "Square",
+ "Square",
+ "Squares",
+ "Squares",
+ "Station",
+ "Station",
+ "Stravenue",
+ "Stravenue",
+ "Stream",
+ "Stream",
+ "Street",
+ "Street",
+ "Streets",
+ "Summit",
+ "Summit",
+ "Terrace",
+ "Throughway",
+ "Trace",
+ "Track",
+ "Trafficway",
+ "Trail",
+ "Trail",
+ "Tunnel",
+ "Tunnel",
+ "Turnpike",
+ "Turnpike",
+ "Underpass",
+ "Union",
+ "Unions",
+ "Valley",
+ "Valleys",
+ "Via",
+ "Viaduct",
+ "View",
+ "Views",
+ "Village",
+ "Village",
+ "Villages",
+ "Ville",
+ "Vista",
+ "Vista",
+ "Walk",
+ "Walks",
+ "Wall",
+ "Way",
+ "Ways",
+ "Well",
+ "Wells",
+ },
+ "company.name": []string{
+ "#{Name.last_name} #{suffix}",
+ "#{Name.last_name}-#{Name.last_name}",
+ "#{Name.last_name}, #{Name.last_name} and #{Name.last_name}",
+ },
+ "address.city_prefix": []string{
+ "North",
+ "East",
+ "West",
+ "South",
+ "New",
+ "Lake",
+ "Port",
+ },
+ "address.state": []string{
+ "Alabama",
+ "Alaska",
+ "Arizona",
+ "Arkansas",
+ "California",
+ "Colorado",
+ "Connecticut",
+ "Delaware",
+ "Florida",
+ "Georgia",
+ "Hawaii",
+ "Idaho",
+ "Illinois",
+ "Indiana",
+ "Iowa",
+ "Kansas",
+ "Kentucky",
+ "Louisiana",
+ "Maine",
+ "Maryland",
+ "Massachusetts",
+ "Michigan",
+ "Minnesota",
+ "Mississippi",
+ "Missouri",
+ "Montana",
+ "Nebraska",
+ "Nevada",
+ "New Hampshire",
+ "New Jersey",
+ "New Mexico",
+ "New York",
+ "North Carolina",
+ "North Dakota",
+ "Ohio",
+ "Oklahoma",
+ "Oregon",
+ "Pennsylvania",
+ "Rhode Island",
+ "South Carolina",
+ "South Dakota",
+ "Tennessee",
+ "Texas",
+ "Utah",
+ "Vermont",
+ "Virginia",
+ "Washington",
+ "West Virginia",
+ "Wisconsin",
+ "Wyoming",
+ },
+ "internet.domain_suffix": []string{
+ "com",
+ "biz",
+ "info",
+ "name",
+ "net",
+ "org",
+ },
+ "name.last_name": []string{
+ "Abbott",
+ "Abernathy",
+ "Abshire",
+ "Adams",
+ "Altenwerth",
+ "Anderson",
+ "Ankunding",
+ "Armstrong",
+ "Auer",
+ "Aufderhar",
+ "Bahringer",
+ "Bailey",
+ "Balistreri",
+ "Barrows",
+ "Bartell",
+ "Bartoletti",
+ "Barton",
+ "Bashirian",
+ "Batz",
+ "Bauch",
+ "Baumbach",
+ "Bayer",
+ "Beahan",
+ "Beatty",
+ "Bechtelar",
+ "Becker",
+ "Bednar",
+ "Beer",
+ "Beier",
+ "Berge",
+ "Bergnaum",
+ "Bergstrom",
+ "Bernhard",
+ "Bernier",
+ "Bins",
+ "Blanda",
+ "Blick",
+ "Block",
+ "Bode",
+ "Boehm",
+ "Bogan",
+ "Bogisich",
+ "Borer",
+ "Bosco",
+ "Botsford",
+ "Boyer",
+ "Boyle",
+ "Bradtke",
+ "Brakus",
+ "Braun",
+ "Breitenberg",
+ "Brekke",
+ "Brown",
+ "Bruen",
+ "Buckridge",
+ "Carroll",
+ "Carter",
+ "Cartwright",
+ "Casper",
+ "Cassin",
+ "Champlin",
+ "Christiansen",
+ "Cole",
+ "Collier",
+ "Collins",
+ "Conn",
+ "Connelly",
+ "Conroy",
+ "Considine",
+ "Corkery",
+ "Cormier",
+ "Corwin",
+ "Cremin",
+ "Crist",
+ "Crona",
+ "Cronin",
+ "Crooks",
+ "Cruickshank",
+ "Cummerata",
+ "Cummings",
+ "Dach",
+ "D'Amore",
+ "Daniel",
+ "Dare",
+ "Daugherty",
+ "Davis",
+ "Deckow",
+ "Denesik",
+ "Dibbert",
+ "Dickens",
+ "Dicki",
+ "Dickinson",
+ "Dietrich",
+ "Donnelly",
+ "Dooley",
+ "Douglas",
+ "Doyle",
+ "DuBuque",
+ "Durgan",
+ "Ebert",
+ "Effertz",
+ "Eichmann",
+ "Emard",
+ "Emmerich",
+ "Erdman",
+ "Ernser",
+ "Fadel",
+ "Fahey",
+ "Farrell",
+ "Fay",
+ "Feeney",
+ "Feest",
+ "Feil",
+ "Ferry",
+ "Fisher",
+ "Flatley",
+ "Frami",
+ "Franecki",
+ "Friesen",
+ "Fritsch",
+ "Funk",
+ "Gaylord",
+ "Gerhold",
+ "Gerlach",
+ "Gibson",
+ "Gislason",
+ "Gleason",
+ "Gleichner",
+ "Glover",
+ "Goldner",
+ "Goodwin",
+ "Gorczany",
+ "Gottlieb",
+ "Goyette",
+ "Grady",
+ "Graham",
+ "Grant",
+ "Green",
+ "Greenfelder",
+ "Greenholt",
+ "Grimes",
+ "Gulgowski",
+ "Gusikowski",
+ "Gutkowski",
+ "Gutmann",
+ "Haag",
+ "Hackett",
+ "Hagenes",
+ "Hahn",
+ "Haley",
+ "Halvorson",
+ "Hamill",
+ "Hammes",
+ "Hand",
+ "Hane",
+ "Hansen",
+ "Harber",
+ "Harris",
+ "Hartmann",
+ "Harvey",
+ "Hauck",
+ "Hayes",
+ "Heaney",
+ "Heathcote",
+ "Hegmann",
+ "Heidenreich",
+ "Heller",
+ "Herman",
+ "Hermann",
+ "Hermiston",
+ "Herzog",
+ "Hessel",
+ "Hettinger",
+ "Hickle",
+ "Hilll",
+ "Hills",
+ "Hilpert",
+ "Hintz",
+ "Hirthe",
+ "Hodkiewicz",
+ "Hoeger",
+ "Homenick",
+ "Hoppe",
+ "Howe",
+ "Howell",
+ "Hudson",
+ "Huel",
+ "Huels",
+ "Hyatt",
+ "Jacobi",
+ "Jacobs",
+ "Jacobson",
+ "Jakubowski",
+ "Jaskolski",
+ "Jast",
+ "Jenkins",
+ "Jerde",
+ "Jewess",
+ "Johns",
+ "Johnson",
+ "Johnston",
+ "Jones",
+ "Kassulke",
+ "Kautzer",
+ "Keebler",
+ "Keeling",
+ "Kemmer",
+ "Kerluke",
+ "Kertzmann",
+ "Kessler",
+ "Kiehn",
+ "Kihn",
+ "Kilback",
+ "King",
+ "Kirlin",
+ "Klein",
+ "Kling",
+ "Klocko",
+ "Koch",
+ "Koelpin",
+ "Koepp",
+ "Kohler",
+ "Konopelski",
+ "Koss",
+ "Kovacek",
+ "Kozey",
+ "Krajcik",
+ "Kreiger",
+ "Kris",
+ "Kshlerin",
+ "Kub",
+ "Kuhic",
+ "Kuhlman",
+ "Kuhn",
+ "Kulas",
+ "Kunde",
+ "Kunze",
+ "Kuphal",
+ "Kutch",
+ "Kuvalis",
+ "Labadie",
+ "Lakin",
+ "Lang",
+ "Langosh",
+ "Langworth",
+ "Larkin",
+ "Larson",
+ "Leannon",
+ "Lebsack",
+ "Ledner",
+ "Leffler",
+ "Legros",
+ "Lehner",
+ "Lemke",
+ "Lesch",
+ "Leuschke",
+ "Lind",
+ "Lindgren",
+ "Littel",
+ "Little",
+ "Lockman",
+ "Lowe",
+ "Lubowitz",
+ "Lueilwitz",
+ "Luettgen",
+ "Lynch",
+ "Macejkovic",
+ "Maggio",
+ "Mann",
+ "Mante",
+ "Marks",
+ "Marquardt",
+ "Marvin",
+ "Mayer",
+ "Mayert",
+ "McClure",
+ "McCullough",
+ "McDermott",
+ "McGlynn",
+ "McKenzie",
+ "McLaughlin",
+ "Medhurst",
+ "Mertz",
+ "Metz",
+ "Miller",
+ "Mills",
+ "Mitchell",
+ "Moen",
+ "Mohr",
+ "Monahan",
+ "Moore",
+ "Morar",
+ "Morissette",
+ "Mosciski",
+ "Mraz",
+ "Mueller",
+ "Muller",
+ "Murazik",
+ "Murphy",
+ "Murray",
+ "Nader",
+ "Nicolas",
+ "Nienow",
+ "Nikolaus",
+ "Nitzsche",
+ "Nolan",
+ "Oberbrunner",
+ "O'Connell",
+ "O'Conner",
+ "O'Hara",
+ "O'Keefe",
+ "O'Kon",
+ "Okuneva",
+ "Olson",
+ "Ondricka",
+ "O'Reilly",
+ "Orn",
+ "Ortiz",
+ "Osinski",
+ "Pacocha",
+ "Padberg",
+ "Pagac",
+ "Parisian",
+ "Parker",
+ "Paucek",
+ "Pfannerstill",
+ "Pfeffer",
+ "Pollich",
+ "Pouros",
+ "Powlowski",
+ "Predovic",
+ "Price",
+ "Prohaska",
+ "Prosacco",
+ "Purdy",
+ "Quigley",
+ "Quitzon",
+ "Rath",
+ "Ratke",
+ "Rau",
+ "Raynor",
+ "Reichel",
+ "Reichert",
+ "Reilly",
+ "Reinger",
+ "Rempel",
+ "Renner",
+ "Reynolds",
+ "Rice",
+ "Rippin",
+ "Ritchie",
+ "Robel",
+ "Roberts",
+ "Rodriguez",
+ "Rogahn",
+ "Rohan",
+ "Rolfson",
+ "Romaguera",
+ "Roob",
+ "Rosenbaum",
+ "Rowe",
+ "Ruecker",
+ "Runolfsdottir",
+ "Runolfsson",
+ "Runte",
+ "Russel",
+ "Rutherford",
+ "Ryan",
+ "Sanford",
+ "Satterfield",
+ "Sauer",
+ "Sawayn",
+ "Schaden",
+ "Schaefer",
+ "Schamberger",
+ "Schiller",
+ "Schimmel",
+ "Schinner",
+ "Schmeler",
+ "Schmidt",
+ "Schmitt",
+ "Schneider",
+ "Schoen",
+ "Schowalter",
+ "Schroeder",
+ "Schulist",
+ "Schultz",
+ "Schumm",
+ "Schuppe",
+ "Schuster",
+ "Senger",
+ "Shanahan",
+ "Shields",
+ "Simonis",
+ "Sipes",
+ "Skiles",
+ "Smith",
+ "Smitham",
+ "Spencer",
+ "Spinka",
+ "Sporer",
+ "Stamm",
+ "Stanton",
+ "Stark",
+ "Stehr",
+ "Steuber",
+ "Stiedemann",
+ "Stokes",
+ "Stoltenberg",
+ "Stracke",
+ "Streich",
+ "Stroman",
+ "Strosin",
+ "Swaniawski",
+ "Swift",
+ "Terry",
+ "Thiel",
+ "Thompson",
+ "Tillman",
+ "Torp",
+ "Torphy",
+ "Towne",
+ "Toy",
+ "Trantow",
+ "Tremblay",
+ "Treutel",
+ "Tromp",
+ "Turcotte",
+ "Turner",
+ "Ullrich",
+ "Upton",
+ "Vandervort",
+ "Veum",
+ "Volkman",
+ "Von",
+ "VonRueden",
+ "Waelchi",
+ "Walker",
+ "Walsh",
+ "Walter",
+ "Ward",
+ "Waters",
+ "Watsica",
+ "Weber",
+ "Wehner",
+ "Weimann",
+ "Weissnat",
+ "Welch",
+ "West",
+ "White",
+ "Wiegand",
+ "Wilderman",
+ "Wilkinson",
+ "Will",
+ "Williamson",
+ "Willms",
+ "Windler",
+ "Wintheiser",
+ "Wisoky",
+ "Wisozk",
+ "Witting",
+ "Wiza",
+ "Wolf",
+ "Wolff",
+ "Wuckert",
+ "Wunsch",
+ "Wyman",
+ "Yost",
+ "Yundt",
+ "Zboncak",
+ "Zemlak",
+ "Ziemann",
+ "Zieme",
+ "Zulauf",
+ },
+ "name.suffix": []string{
+ "Jr.",
+ "Sr.",
+ "I",
+ "II",
+ "III",
+ "IV",
+ "V",
+ "MD",
+ "DDS",
+ "PhD",
+ "DVM",
+ },
+ "address.postcode": []string{
+ "#####",
+ "#####-####",
+ },
+ "address.state_abbr": []string{
+ "AL",
+ "AK",
+ "AS",
+ "AZ",
+ "AR",
+ "CA",
+ "CO",
+ "CT",
+ "DE",
+ "DC",
+ "FM",
+ "FL",
+ "GA",
+ "GU",
+ "HI",
+ "ID",
+ "IL",
+ "IN",
+ "IA",
+ "KS",
+ "KY",
+ "LA",
+ "ME",
+ "MH",
+ "MD",
+ "MA",
+ "MI",
+ "MN",
+ "MS",
+ "MO",
+ "MT",
+ "NE",
+ "NV",
+ "NH",
+ "NJ",
+ "NM",
+ "NY",
+ "NC",
+ "ND",
+ "MP",
+ "OH",
+ "OK",
+ "OR",
+ "PW",
+ "PA",
+ "PR",
+ "RI",
+ "SC",
+ "SD",
+ "TN",
+ "TX",
+ "UT",
+ "VT",
+ "VI",
+ "VA",
+ "WA",
+ "WV",
+ "WI",
+ "WY",
+ "AE",
+ "AA",
+ "AP",
+ },
+ "address.street_address": []string{
+ "#{building_number} #{street_name}",
+ },
+ "name.title.level": []string{
+ "Accountability",
+ "Accounts",
+ "Applications",
+ "Assurance",
+ "Brand",
+ "Branding",
+ "Communications",
+ "Configuration",
+ "Creative",
+ "Data",
+ "Directives",
+ "Division",
+ "Factors",
+ "Functionality",
+ "Group",
+ "Identity",
+ "Implementation",
+ "Infrastructure",
+ "Integration",
+ "Interactions",
+ "Intranet",
+ "Marketing",
+ "Markets",
+ "Metrics",
+ "Mobility",
+ "Operations",
+ "Optimization",
+ "Paradigm",
+ "Program",
+ "Quality",
+ "Research",
+ "Response",
+ "Security",
+ "Solutions",
+ "Tactics",
+ "Usability",
+ "Web",
+ },
+ },
+ "no-nb": map[string][]string{
+ "name.prefix": []string{
+ "Dr.",
+ "Prof.",
+ },
+ "name.name": []string{
+ "#{prefix} #{first_name} #{last_name}",
+ "#{first_name} #{last_name} #{suffix}",
+ "#{feminine_name} #{feminine_name} #{last_name}",
+ "#{masculine_name} #{masculine_name} #{last_name}",
+ "#{first_name} #{last_name} #{last_name}",
+ "#{first_name} #{last_name}",
+ },
+ "address.city_root": []string{
+ "Fet",
+ "Gjes",
+ "Høy",
+ "Inn",
+ "Fager",
+ "Lille",
+ "Lo",
+ "Mal",
+ "Nord",
+ "Nær",
+ "Sand",
+ "Sme",
+ "Stav",
+ "Stor",
+ "Tand",
+ "Ut",
+ "Vest",
+ },
+ "address.building_number": []string{
+ "#",
+ "##",
+ },
+ "phone_number.formats": []string{
+ "########",
+ "## ## ## ##",
+ "### ## ###",
+ "+47 ## ## ## ##",
+ },
+ "address.street_name": []string{
+ "#{street_root}#{street_suffix}",
+ "#{street_suffix} #{street_root}#{street_suffix}",
+ "#{Name.first_name}#{common_street_suffix}",
+ "#{Name.last_name}#{common_street_suffix}",
+ },
+ "address.street_suffix": []string{
+ "alléen",
+ "bakken",
+ "berget",
+ "bråten",
+ "eggen",
+ "engen",
+ "ekra",
+ "faret",
+ "flata",
+ "gata",
+ "gjerdet",
+ "grenda",
+ "gropa",
+ "hagen",
+ "haugen",
+ "havna",
+ "holtet",
+ "høgda",
+ "jordet",
+ "kollen",
+ "kroken",
+ "lia",
+ "lunden",
+ "lyngen",
+ "løkka",
+ "marka",
+ "moen",
+ "myra",
+ "plassen",
+ "ringen",
+ "roa",
+ "røa",
+ "skogen",
+ "skrenten",
+ "spranget",
+ "stien",
+ "stranda",
+ "stubben",
+ "stykket",
+ "svingen",
+ "tjernet",
+ "toppen",
+ "tunet",
+ "vollen",
+ "vika",
+ "åsen",
+ },
+ "company.name": []string{
+ "#{Name.last_name} #{suffix}",
+ "#{Name.last_name}-#{Name.last_name}",
+ "#{Name.last_name}, #{Name.last_name} og #{Name.last_name}",
+ },
+ "address.common_street_suffix": []string{
+ "sgate",
+ "svei",
+ "s Gate",
+ "s Vei",
+ "gata",
+ "veien",
+ },
+ "address.state": []string{
+ "",
+ },
+ "name.feminine_name": []string{
+ "Emma",
+ "Sara",
+ "Thea",
+ "Ida",
+ "Julie",
+ "Nora",
+ "Emilie",
+ "Ingrid",
+ "Hanna",
+ "Maria",
+ "Sofie",
+ "Anna",
+ "Malin",
+ "Amalie",
+ "Vilde",
+ "Frida",
+ "Andrea",
+ "Tuva",
+ "Victoria",
+ "Mia",
+ "Karoline",
+ "Mathilde",
+ "Martine",
+ "Linnea",
+ "Marte",
+ "Hedda",
+ "Marie",
+ "Helene",
+ "Silje",
+ "Leah",
+ "Maja",
+ "Elise",
+ "Oda",
+ "Kristine",
+ "Aurora",
+ "Kaja",
+ "Camilla",
+ "Mari",
+ "Maren",
+ "Mina",
+ "Selma",
+ "Jenny",
+ "Celine",
+ "Eline",
+ "Sunniva",
+ "Natalie",
+ "Tiril",
+ "Synne",
+ "Sandra",
+ "Madeleine",
+ },
+ "internet.domain_suffix": []string{
+ "no",
+ "com",
+ "net",
+ "org",
+ },
+ "name.last_name": []string{
+ "Johansen",
+ "Hansen",
+ "Andersen",
+ "Kristiansen",
+ "Larsen",
+ "Olsen",
+ "Solberg",
+ "Andresen",
+ "Pedersen",
+ "Nilsen",
+ "Berg",
+ "Halvorsen",
+ "Karlsen",
+ "Svendsen",
+ "Jensen",
+ "Haugen",
+ "Martinsen",
+ "Eriksen",
+ "Sørensen",
+ "Johnsen",
+ "Myhrer",
+ "Johannessen",
+ "Nielsen",
+ "Hagen",
+ "Pettersen",
+ "Bakke",
+ "Skuterud",
+ "Løken",
+ "Gundersen",
+ "Strand",
+ "Jørgensen",
+ "Kvarme",
+ "Røed",
+ "Sæther",
+ "Stensrud",
+ "Moe",
+ "Kristoffersen",
+ "Jakobsen",
+ "Holm",
+ "Aas",
+ "Lie",
+ "Moen",
+ "Andreassen",
+ "Vedvik",
+ "Nguyen",
+ "Jacobsen",
+ "Torgersen",
+ "Ruud",
+ "Krogh",
+ "Christiansen",
+ "Bjerke",
+ "Aalerud",
+ "Borge",
+ "Sørlie",
+ "Berge",
+ "Østli",
+ "Ødegård",
+ "Torp",
+ "Henriksen",
+ "Haukelidsæter",
+ "Fjeld",
+ "Danielsen",
+ "Aasen",
+ "Fredriksen",
+ "Dahl",
+ "Berntsen",
+ "Arnesen",
+ "Wold",
+ "Thoresen",
+ "Solheim",
+ "Skoglund",
+ "Bakken",
+ "Amundsen",
+ "Solli",
+ "Smogeli",
+ "Kristensen",
+ "Glosli",
+ "Fossum",
+ "Evensen",
+ "Eide",
+ "Carlsen",
+ "Østby",
+ "Vegge",
+ "Tangen",
+ "Smedsrud",
+ "Olstad",
+ "Lunde",
+ "Kleven",
+ "Huseby",
+ "Bjørnstad",
+ "Ryan",
+ "Rasmussen",
+ "Nygård",
+ "Nordskaug",
+ "Nordby",
+ "Mathisen",
+ "Hopland",
+ "Gran",
+ "Finstad",
+ "Edvardsen",
+ },
+ "name.suffix": []string{
+ "Jr.",
+ "Sr.",
+ "I",
+ "II",
+ "III",
+ "IV",
+ "V",
+ },
+ "name.masculine_name": []string{
+ "Markus",
+ "Mathias",
+ "Kristian",
+ "Jonas",
+ "Andreas",
+ "Alexander",
+ "Martin",
+ "Sander",
+ "Daniel",
+ "Magnus",
+ "Henrik",
+ "Tobias",
+ "Kristoffer",
+ "Emil",
+ "Adrian",
+ "Sebastian",
+ "Marius",
+ "Elias",
+ "Fredrik",
+ "Thomas",
+ "Sondre",
+ "Benjamin",
+ "Jakob",
+ "Oliver",
+ "Lucas",
+ "Oskar",
+ "Nikolai",
+ "Filip",
+ "Mats",
+ "William",
+ "Erik",
+ "Simen",
+ "Ole",
+ "Eirik",
+ "Isak",
+ "Kasper",
+ "Noah",
+ "Lars",
+ "Joakim",
+ "Johannes",
+ "Håkon",
+ "Sindre",
+ "Jørgen",
+ "Herman",
+ "Anders",
+ "Jonathan",
+ "Even",
+ "Theodor",
+ "Mikkel",
+ "Aksel",
+ },
+ "address.postcode": []string{
+ "####",
+ "####",
+ "####",
+ "0###",
+ },
+ "address.street_address": []string{
+ "#{street_name} #{building_number}",
+ },
+ "company.suffix": []string{
+ "Gruppen",
+ "AS",
+ "ASA",
+ "BA",
+ "RFH",
+ "og Sønner",
+ },
+ "address.secondary_address": []string{
+ "Leil. ###",
+ "Oppgang A",
+ "Oppgang B",
+ },
+ "address.city_suffix": []string{
+ "berg",
+ "borg",
+ "by",
+ "bø",
+ "dal",
+ "eid",
+ "fjell",
+ "fjord",
+ "foss",
+ "grunn",
+ "hamn",
+ "havn",
+ "helle",
+ "mark",
+ "nes",
+ "odden",
+ "sand",
+ "sjøen",
+ "stad",
+ "strand",
+ "strøm",
+ "sund",
+ "vik",
+ "vær",
+ "våg",
+ "ø",
+ "øy",
+ "ås",
+ },
+ "address.city": []string{
+ "#{city_root}#{city_suffix}",
+ },
+ "address.street_root": []string{
+ "Eike",
+ "Bjørke",
+ "Gran",
+ "Vass",
+ "Furu",
+ "Litj",
+ "Lille",
+ "Høy",
+ "Fosse",
+ "Elve",
+ "Ku",
+ "Konvall",
+ "Soldugg",
+ "Hestemyr",
+ "Granitt",
+ "Hegge",
+ "Rogne",
+ "Fiol",
+ "Sol",
+ "Ting",
+ "Malm",
+ "Klokker",
+ "Preste",
+ "Dam",
+ "Geiterygg",
+ "Bekke",
+ "Berg",
+ "Kirke",
+ "Kors",
+ "Bru",
+ "Blåveis",
+ "Torg",
+ "Sjø",
+ },
+ "name.first_name": []string{
+ "Emma",
+ "Sara",
+ "Thea",
+ "Ida",
+ "Julie",
+ "Nora",
+ "Emilie",
+ "Ingrid",
+ "Hanna",
+ "Maria",
+ "Sofie",
+ "Anna",
+ "Malin",
+ "Amalie",
+ "Vilde",
+ "Frida",
+ "Andrea",
+ "Tuva",
+ "Victoria",
+ "Mia",
+ "Karoline",
+ "Mathilde",
+ "Martine",
+ "Linnea",
+ "Marte",
+ "Hedda",
+ "Marie",
+ "Helene",
+ "Silje",
+ "Leah",
+ "Maja",
+ "Elise",
+ "Oda",
+ "Kristine",
+ "Aurora",
+ "Kaja",
+ "Camilla",
+ "Mari",
+ "Maren",
+ "Mina",
+ "Selma",
+ "Jenny",
+ "Celine",
+ "Eline",
+ "Sunniva",
+ "Natalie",
+ "Tiril",
+ "Synne",
+ "Sandra",
+ "Madeleine",
+ "Markus",
+ "Mathias",
+ "Kristian",
+ "Jonas",
+ "Andreas",
+ "Alexander",
+ "Martin",
+ "Sander",
+ "Daniel",
+ "Magnus",
+ "Henrik",
+ "Tobias",
+ "Kristoffer",
+ "Emil",
+ "Adrian",
+ "Sebastian",
+ "Marius",
+ "Elias",
+ "Fredrik",
+ "Thomas",
+ "Sondre",
+ "Benjamin",
+ "Jakob",
+ "Oliver",
+ "Lucas",
+ "Oskar",
+ "Nikolai",
+ "Filip",
+ "Mats",
+ "William",
+ "Erik",
+ "Simen",
+ "Ole",
+ "Eirik",
+ "Isak",
+ "Kasper",
+ "Noah",
+ "Lars",
+ "Joakim",
+ "Johannes",
+ "Håkon",
+ "Sindre",
+ "Jørgen",
+ "Herman",
+ "Anders",
+ "Jonathan",
+ "Even",
+ "Theodor",
+ "Mikkel",
+ "Aksel",
+ },
+ "address.street_prefix": []string{
+ "Øvre",
+ "Nedre",
+ "Søndre",
+ "Gamle",
+ "Østre",
+ "Vestre",
+ },
+ },
+ "de-ch": map[string][]string{
+ "address.country_code": []string{
+ "CH",
+ "CH",
+ "CH",
+ "DE",
+ "AT",
+ "US",
+ "LI",
+ "US",
+ "HK",
+ "VN",
+ },
+ "phone_number.formats": []string{
+ "0800 ### ###",
+ "0800 ## ## ##",
+ "0## ### ## ##",
+ "0## ### ## ##",
+ "+41 ## ### ## ##",
+ "0900 ### ###",
+ "076 ### ## ##",
+ "+4178 ### ## ##",
+ "0041 79 ### ## ##",
+ },
+ "company.name": []string{
+ "#{Name.last_name} #{suffix}",
+ "#{Name.last_name}-#{Name.last_name}",
+ "#{Name.last_name}, #{Name.last_name} und #{Name.last_name}",
+ },
+ "internet.domain_suffix": []string{
+ "com",
+ "net",
+ "biz",
+ "ch",
+ "de",
+ "li",
+ "at",
+ "ch",
+ "ch",
+ },
+ "address.postcode": []string{
+ "1###",
+ "2###",
+ "3###",
+ "4###",
+ "5###",
+ "6###",
+ "7###",
+ "8###",
+ "9###",
+ },
+ "company.suffix": []string{
+ "AG",
+ "GmbH",
+ "und Söhne",
+ "und Partner",
+ "& Co.",
+ "Gruppe",
+ "LLC",
+ "Inc.",
+ },
+ },
+ "en-au": map[string][]string{
+ "address.state": []string{
+ "New South Wales",
+ "Queensland",
+ "Northern Territory",
+ "South Australia",
+ "Western Australia",
+ "Tasmania",
+ "Australian Capital Territory",
+ "Victoria",
+ },
+ "internet.domain_suffix": []string{
+ "com.au",
+ "com",
+ "net.au",
+ "net",
+ "org.au",
+ "org",
+ },
+ "name.last_name": []string{
+ "Smith",
+ "Jones",
+ "Williams",
+ "Brown",
+ "Wilson",
+ "Taylor",
+ "Johnson",
+ "White",
+ "Martin",
+ "Anderson",
+ "Thompson",
+ "Nguyen",
+ "Thomas",
+ "Walker",
+ "Harris",
+ "Lee",
+ "Ryan",
+ "Robinson",
+ "Kelly",
+ "King",
+ "Davis",
+ "Wright",
+ "Evans",
+ "Roberts",
+ "Green",
+ "Hall",
+ "Wood",
+ "Jackson",
+ "Clarke",
+ "Patel",
+ "Khan",
+ "Lewis",
+ "James",
+ "Phillips",
+ "Mason",
+ "Mitchell",
+ "Rose",
+ "Davies",
+ "Rodríguez",
+ "Cox",
+ "Alexander",
+ "Garden",
+ "Campbell",
+ "Johnston",
+ "Moore",
+ "Smyth",
+ "O’neill",
+ "Doherty",
+ "Stewart",
+ "Quinn",
+ "Murphy",
+ "Graham",
+ "Mclaughlin",
+ "Hamilton",
+ "Murray",
+ "Hughes",
+ "Robertson",
+ "Thomson",
+ "Scott",
+ "Macdonald",
+ "Reid",
+ "Clark",
+ "Ross",
+ "Young",
+ "Watson",
+ "Paterson",
+ "Morrison",
+ "Morgan",
+ "Griffiths",
+ "Edwards",
+ "Rees",
+ "Jenkins",
+ "Owen",
+ "Price",
+ "Moss",
+ "Richards",
+ "Abbott",
+ "Adams",
+ "Armstrong",
+ "Bahringer",
+ "Bailey",
+ "Barrows",
+ "Bartell",
+ "Bartoletti",
+ "Barton",
+ "Bauch",
+ "Baumbach",
+ "Bayer",
+ "Beahan",
+ "Beatty",
+ "Becker",
+ "Beier",
+ "Berge",
+ "Bergstrom",
+ "Bode",
+ "Bogan",
+ "Borer",
+ "Bosco",
+ "Botsford",
+ "Boyer",
+ "Boyle",
+ "Braun",
+ "Bruen",
+ "Carroll",
+ "Carter",
+ "Cartwright",
+ "Casper",
+ "Cassin",
+ "Champlin",
+ "Christiansen",
+ "Cole",
+ "Collier",
+ "Collins",
+ "Connelly",
+ "Conroy",
+ "Corkery",
+ "Cormier",
+ "Corwin",
+ "Cronin",
+ "Crooks",
+ "Cruickshank",
+ "Cummings",
+ "D'amore",
+ "Daniel",
+ "Dare",
+ "Daugherty",
+ "Dickens",
+ "Dickinson",
+ "Dietrich",
+ "Donnelly",
+ "Dooley",
+ "Douglas",
+ "Doyle",
+ "Durgan",
+ "Ebert",
+ "Emard",
+ "Emmerich",
+ "Erdman",
+ "Ernser",
+ "Fadel",
+ "Fahey",
+ "Farrell",
+ "Fay",
+ "Feeney",
+ "Feil",
+ "Ferry",
+ "Fisher",
+ "Flatley",
+ "Gibson",
+ "Gleason",
+ "Glover",
+ "Goldner",
+ "Goodwin",
+ "Grady",
+ "Grant",
+ "Greenfelder",
+ "Greenholt",
+ "Grimes",
+ "Gutmann",
+ "Hackett",
+ "Hahn",
+ "Haley",
+ "Hammes",
+ "Hand",
+ "Hane",
+ "Hansen",
+ "Harber",
+ "Hartmann",
+ "Harvey",
+ "Hayes",
+ "Heaney",
+ "Heathcote",
+ "Heller",
+ "Hermann",
+ "Hermiston",
+ "Hessel",
+ "Hettinger",
+ "Hickle",
+ "Hill",
+ "Hills",
+ "Hoppe",
+ "Howe",
+ "Howell",
+ "Hudson",
+ "Huel",
+ "Hyatt",
+ "Jacobi",
+ "Jacobs",
+ "Jacobson",
+ "Jerde",
+ "Johns",
+ "Keeling",
+ "Kemmer",
+ "Kessler",
+ "Kiehn",
+ "Kirlin",
+ "Klein",
+ "Koch",
+ "Koelpin",
+ "Kohler",
+ "Koss",
+ "Kovacek",
+ "Kreiger",
+ "Kris",
+ "Kuhlman",
+ "Kuhn",
+ "Kulas",
+ "Kunde",
+ "Kutch",
+ "Lakin",
+ "Lang",
+ "Langworth",
+ "Larkin",
+ "Larson",
+ "Leannon",
+ "Leffler",
+ "Little",
+ "Lockman",
+ "Lowe",
+ "Lynch",
+ "Mann",
+ "Marks",
+ "Marvin",
+ "Mayer",
+ "Mccullough",
+ "Mcdermott",
+ "Mckenzie",
+ "Miller",
+ "Mills",
+ "Monahan",
+ "Morissette",
+ "Mueller",
+ "Muller",
+ "Nader",
+ "Nicolas",
+ "Nolan",
+ "O'connell",
+ "O'conner",
+ "O'hara",
+ "O'keefe",
+ "Olson",
+ "O'reilly",
+ "Parisian",
+ "Parker",
+ "Quigley",
+ "Reilly",
+ "Reynolds",
+ "Rice",
+ "Ritchie",
+ "Rohan",
+ "Rolfson",
+ "Rowe",
+ "Russel",
+ "Rutherford",
+ "Sanford",
+ "Sauer",
+ "Schmidt",
+ "Schmitt",
+ "Schneider",
+ "Schroeder",
+ "Schultz",
+ "Shields",
+ "Smitham",
+ "Spencer",
+ "Stanton",
+ "Stark",
+ "Stokes",
+ "Swift",
+ "Tillman",
+ "Towne",
+ "Tremblay",
+ "Tromp",
+ "Turcotte",
+ "Turner",
+ "Walsh",
+ "Walter",
+ "Ward",
+ "Waters",
+ "Weber",
+ "Welch",
+ "West",
+ "Wilderman",
+ "Wilkinson",
+ "Williamson",
+ "Windler",
+ "Wolf",
+ },
+ "address.postcode": []string{
+ "0###",
+ "2###",
+ "3###",
+ "4###",
+ "5###",
+ "6###",
+ "7###",
+ },
+ "address.state_abbr": []string{
+ "NSW",
+ "QLD",
+ "NT",
+ "SA",
+ "WA",
+ "TAS",
+ "ACT",
+ "VIC",
+ },
+ "company.suffix": []string{
+ "Pty Ltd",
+ "and Sons",
+ "Corp",
+ "Group",
+ "Brothers",
+ "Partners",
+ },
+ "name.first_name": []string{
+ "William",
+ "Jack",
+ "Oliver",
+ "Joshua",
+ "Thomas",
+ "Lachlan",
+ "Cooper",
+ "Noah",
+ "Ethan",
+ "Lucas",
+ "James",
+ "Samuel",
+ "Jacob",
+ "Liam",
+ "Alexander",
+ "Benjamin",
+ "Max",
+ "Isaac",
+ "Daniel",
+ "Riley",
+ "Ryan",
+ "Charlie",
+ "Tyler",
+ "Jake",
+ "Matthew",
+ "Xavier",
+ "Harry",
+ "Jayden",
+ "Nicholas",
+ "Harrison",
+ "Levi",
+ "Luke",
+ "Adam",
+ "Henry",
+ "Aiden",
+ "Dylan",
+ "Oscar",
+ "Michael",
+ "Jackson",
+ "Logan",
+ "Joseph",
+ "Blake",
+ "Nathan",
+ "Connor",
+ "Elijah",
+ "Nate",
+ "Archie",
+ "Bailey",
+ "Marcus",
+ "Cameron",
+ "Jordan",
+ "Zachary",
+ "Caleb",
+ "Hunter",
+ "Ashton",
+ "Toby",
+ "Aidan",
+ "Hayden",
+ "Mason",
+ "Hamish",
+ "Edward",
+ "Angus",
+ "Eli",
+ "Sebastian",
+ "Christian",
+ "Patrick",
+ "Andrew",
+ "Anthony",
+ "Luca",
+ "Kai",
+ "Beau",
+ "Alex",
+ "George",
+ "Callum",
+ "Finn",
+ "Zac",
+ "Mitchell",
+ "Jett",
+ "Jesse",
+ "Gabriel",
+ "Leo",
+ "Declan",
+ "Charles",
+ "Jasper",
+ "Jonathan",
+ "Aaron",
+ "Hugo",
+ "David",
+ "Christopher",
+ "Chase",
+ "Owen",
+ "Justin",
+ "Ali",
+ "Darcy",
+ "Lincoln",
+ "Cody",
+ "Phoenix",
+ "Sam",
+ "John",
+ "Joel",
+ "Isabella",
+ "Ruby",
+ "Chloe",
+ "Olivia",
+ "Charlotte",
+ "Mia",
+ "Lily",
+ "Emily",
+ "Ella",
+ "Sienna",
+ "Sophie",
+ "Amelia",
+ "Grace",
+ "Ava",
+ "Zoe",
+ "Emma",
+ "Sophia",
+ "Matilda",
+ "Hannah",
+ "Jessica",
+ "Lucy",
+ "Georgia",
+ "Sarah",
+ "Abigail",
+ "Zara",
+ "Eva",
+ "Scarlett",
+ "Jasmine",
+ "Chelsea",
+ "Lilly",
+ "Ivy",
+ "Isla",
+ "Evie",
+ "Isabelle",
+ "Maddison",
+ "Layla",
+ "Summer",
+ "Annabelle",
+ "Alexis",
+ "Elizabeth",
+ "Bella",
+ "Holly",
+ "Lara",
+ "Madison",
+ "Alyssa",
+ "Maya",
+ "Tahlia",
+ "Claire",
+ "Hayley",
+ "Imogen",
+ "Jade",
+ "Ellie",
+ "Sofia",
+ "Addison",
+ "Molly",
+ "Phoebe",
+ "Alice",
+ "Savannah",
+ "Gabriella",
+ "Kayla",
+ "Mikayla",
+ "Abbey",
+ "Eliza",
+ "Willow",
+ "Alexandra",
+ "Poppy",
+ "Samantha",
+ "Stella",
+ "Amy",
+ "Amelie",
+ "Anna",
+ "Piper",
+ "Gemma",
+ "Isabel",
+ "Victoria",
+ "Stephanie",
+ "Caitlin",
+ "Heidi",
+ "Paige",
+ "Rose",
+ "Amber",
+ "Audrey",
+ "Claudia",
+ "Taylor",
+ "Madeline",
+ "Angelina",
+ "Natalie",
+ "Charli",
+ "Lauren",
+ "Ashley",
+ "Violet",
+ "Mackenzie",
+ "Abby",
+ "Skye",
+ "Lillian",
+ "Alana",
+ "Lola",
+ "Leah",
+ "Eve",
+ "Kiara",
+ },
+ "address.building_number": []string{
+ "####",
+ "###",
+ "##",
+ },
+ "phone_number.formats": []string{
+ "0# #### ####",
+ "+61 # #### ####",
+ "04## #### ####",
+ "+61 4## #### ####",
+ },
+ "address.street_suffix": []string{
+ "Avenue",
+ "Boulevard",
+ "Circle",
+ "Circuit",
+ "Court",
+ "Crescent",
+ "Crest",
+ "Drive",
+ "Estate Dr",
+ "Grove",
+ "Hill",
+ "Island",
+ "Junction",
+ "Knoll",
+ "Lane",
+ "Loop",
+ "Mall",
+ "Manor",
+ "Meadow",
+ "Mews",
+ "Parade",
+ "Parkway",
+ "Pass",
+ "Place",
+ "Plaza",
+ "Ridge",
+ "Road",
+ "Run",
+ "Square",
+ "Station St",
+ "Street",
+ "Summit",
+ "Terrace",
+ "Track",
+ "Trail",
+ "View Rd",
+ "Way",
+ },
+ },
+ "en-gb": map[string][]string{
+ "address.county": []string{
+ "Avon",
+ "Bedfordshire",
+ "Berkshire",
+ "Borders",
+ "Buckinghamshire",
+ "Cambridgeshire",
+ "Central",
+ "Cheshire",
+ "Cleveland",
+ "Clwyd",
+ "Cornwall",
+ "County Antrim",
+ "County Armagh",
+ "County Down",
+ "County Fermanagh",
+ "County Londonderry",
+ "County Tyrone",
+ "Cumbria",
+ "Derbyshire",
+ "Devon",
+ "Dorset",
+ "Dumfries and Galloway",
+ "Durham",
+ "Dyfed",
+ "East Sussex",
+ "Essex",
+ "Fife",
+ "Gloucestershire",
+ "Grampian",
+ "Greater Manchester",
+ "Gwent",
+ "Gwynedd County",
+ "Hampshire",
+ "Herefordshire",
+ "Hertfordshire",
+ "Highlands and Islands",
+ "Humberside",
+ "Isle of Wight",
+ "Kent",
+ "Lancashire",
+ "Leicestershire",
+ "Lincolnshire",
+ "Lothian",
+ "Merseyside",
+ "Mid Glamorgan",
+ "Norfolk",
+ "North Yorkshire",
+ "Northamptonshire",
+ "Northumberland",
+ "Nottinghamshire",
+ "Oxfordshire",
+ "Powys",
+ "Rutland",
+ "Shropshire",
+ "Somerset",
+ "South Glamorgan",
+ "South Yorkshire",
+ "Staffordshire",
+ "Strathclyde",
+ "Suffolk",
+ "Surrey",
+ "Tayside",
+ "Tyne and Wear",
+ "Warwickshire",
+ "West Glamorgan",
+ "West Midlands",
+ "West Sussex",
+ "West Yorkshire",
+ "Wiltshire",
+ "Worcestershire",
+ },
+ "address.uk_country": []string{
+ "England",
+ "Scotland",
+ "Wales",
+ "Northern Ireland",
+ },
+ "internet.domain_suffix": []string{
+ "co.uk",
+ "com",
+ "biz",
+ "info",
+ "name",
+ },
+ "address.postcode": []string{
+ "??# #??",
+ "??## #??",
+ },
+ },
+ "en-bork": map[string][]string{
+ "lorem.words": []string{
+ "Boot",
+ "I",
+ "Nu",
+ "Nur",
+ "Tu",
+ "Um",
+ "a",
+ "becoose-a",
+ "boot",
+ "bork",
+ "burn",
+ "chuuses",
+ "cumplete-a",
+ "cun",
+ "cunseqooences",
+ "curcoomstunces",
+ "dee",
+ "deeslikes",
+ "denuoonceeng",
+ "desures",
+ "du",
+ "eccuoont",
+ "ectooel",
+ "edfuntege-a",
+ "efueeds",
+ "egeeen",
+ "ell",
+ "ere-a",
+ "feend",
+ "foolt",
+ "frum",
+ "geefe-a",
+ "gesh",
+ "greet",
+ "heem",
+ "heppeeness",
+ "hes",
+ "hoo",
+ "hoomun",
+ "idea",
+ "ifer",
+ "in",
+ "incuoonter",
+ "injuy",
+ "itselff",
+ "ixcept",
+ "ixemple-a",
+ "ixerceese-a",
+ "ixpleeen",
+ "ixplurer",
+ "ixpuoond",
+ "ixtremely",
+ "knoo",
+ "lebureeuoos",
+ "lufes",
+ "meestekee",
+ "mester-booeelder",
+ "moost",
+ "mun",
+ "nu",
+ "nut",
+ "oobteeen",
+ "oocceseeunelly",
+ "ooccoor",
+ "ooff",
+ "oone-a",
+ "oor",
+ "peeen",
+ "peeenffool",
+ "physeecel",
+ "pleesoore-a",
+ "poorsooe-a",
+ "poorsooes",
+ "preeesing",
+ "prucoore-a",
+ "prudooces",
+ "reeght",
+ "reshunelly",
+ "resooltunt",
+ "sume-a",
+ "teecheengs",
+ "teke-a",
+ "thees",
+ "thet",
+ "thuse-a",
+ "treefiel",
+ "troot",
+ "tu",
+ "tueel",
+ "und",
+ "undertekes",
+ "unnuyeeng",
+ "uny",
+ "unyune-a",
+ "us",
+ "veell",
+ "veet",
+ "ves",
+ "vheech",
+ "vhu",
+ "yuoo",
+ "zee",
+ "zeere-a",
+ },
+ },
+ "nl": map[string][]string{
+ "address.postcode": []string{
+ "#### ??",
+ },
+ "company.suffix": []string{
+ "BV",
+ "V.O.F.",
+ "Group",
+ "en Zonen",
+ },
+ "address.secondary_address": []string{
+ "1 hoog",
+ "2 hoog",
+ "3 hoog",
+ "I",
+ "II",
+ "III",
+ "a",
+ "b",
+ "c",
+ },
+ "address.city_suffix": []string{
+ "dam",
+ "berg",
+ "aan de Rijn",
+ "aan de IJssel",
+ },
+ "address.country": []string{
+ "Afghanistan",
+ "Akrotiri",
+ "Albanië",
+ "Algerije",
+ "Amerikaanse Maagdeneilanden",
+ "Amerikaans-Samoa",
+ "Andorra",
+ "Angola",
+ "Anguilla",
+ "Antarctica",
+ "Antigua en Barbuda",
+ "Arctic Ocean",
+ "Argentinië",
+ "Armenië",
+ "Aruba",
+ "Ashmore and Cartier Islands",
+ "Atlantic Ocean",
+ "Australië",
+ "Azerbeidzjan",
+ "Bahama's",
+ "Bahrein",
+ "Bangladesh",
+ "Barbados",
+ "Belarus",
+ "België",
+ "Belize",
+ "Benin",
+ "Bermuda",
+ "Bhutan",
+ "Bolivië",
+ "Bosnië-Herzegovina",
+ "Botswana",
+ "Bouvet Island",
+ "Brazilië",
+ "British Indian Ocean Territory",
+ "Britse Maagdeneilanden",
+ "Brunei",
+ "Bulgarije",
+ "Burkina Faso",
+ "Burundi",
+ "Cambodja",
+ "Canada",
+ "Caymaneilanden",
+ "Centraal-Afrikaanse Republiek",
+ "Chili",
+ "China",
+ "Christmas Island",
+ "Clipperton Island",
+ "Cocos (Keeling) Islands",
+ "Colombia",
+ "Comoren (Unie)",
+ "Congo (Democratische Republiek)",
+ "Congo (Volksrepubliek)",
+ "Cook",
+ "Coral Sea Islands",
+ "Costa Rica",
+ "Cuba",
+ "Cyprus",
+ "Denemarken",
+ "Dhekelia",
+ "Djibouti",
+ "Dominica",
+ "Dominicaanse Republiek",
+ "Duitsland",
+ "Ecuador",
+ "Egypte",
+ "El Salvador",
+ "Equatoriaal-Guinea",
+ "Eritrea",
+ "Estland",
+ "Ethiopië",
+ "European Union",
+ "Falkland",
+ "Faroe Islands",
+ "Fiji",
+ "Filipijnen",
+ "Finland",
+ "Frankrijk",
+ "Frans-Polynesië",
+ "French Southern and Antarctic Lands",
+ "Gabon",
+ "Gambia",
+ "Gaza Strip",
+ "Georgië",
+ "Ghana",
+ "Gibraltar",
+ "Grenada",
+ "Griekenland",
+ "Groenland",
+ "Guam",
+ "Guatemala",
+ "Guernsey",
+ "Guinea",
+ "Guinee-Bissau",
+ "Guyana",
+ "Haïti",
+ "Heard Island and McDonald Islands",
+ "Heilige Stoel",
+ "Honduras",
+ "Hongarije",
+ "Hongkong",
+ "Ierland",
+ "IJsland",
+ "India",
+ "Indian Ocean",
+ "Indonesië",
+ "Irak",
+ "Iran",
+ "Isle of Man",
+ "Israël",
+ "Italië",
+ "Ivoorkust",
+ "Jamaica",
+ "Jan Mayen",
+ "Japan",
+ "Jemen",
+ "Jersey",
+ "Jordanië",
+ "Kaapverdië",
+ "Kameroen",
+ "Kazachstan",
+ "Kenia",
+ "Kirgizstan",
+ "Kiribati",
+ "Koeweit",
+ "Kroatië",
+ "Laos",
+ "Lesotho",
+ "Letland",
+ "Libanon",
+ "Liberia",
+ "Libië",
+ "Liechtenstein",
+ "Litouwen",
+ "Luxemburg",
+ "Macao",
+ "Macedonië",
+ "Madagaskar",
+ "Malawi",
+ "Maldiven",
+ "Maleisië",
+ "Mali",
+ "Malta",
+ "Marokko",
+ "Marshall Islands",
+ "Mauritanië",
+ "Mauritius",
+ "Mayotte",
+ "Mexico",
+ "Micronesia, Federated States of",
+ "Moldavië",
+ "Monaco",
+ "Mongolië",
+ "Montenegro",
+ "Montserrat",
+ "Mozambique",
+ "Myanmar",
+ "Namibië",
+ "Nauru",
+ "Navassa Island",
+ "Nederland",
+ "Nederlandse Antillen",
+ "Nepal",
+ "Ngwane",
+ "Nicaragua",
+ "Nieuw-Caledonië",
+ "Nieuw-Zeeland",
+ "Niger",
+ "Nigeria",
+ "Niue",
+ "Noordelijke Marianen",
+ "Noord-Korea",
+ "Noorwegen",
+ "Norfolk Island",
+ "Oekraïne",
+ "Oezbekistan",
+ "Oman",
+ "Oostenrijk",
+ "Pacific Ocean",
+ "Pakistan",
+ "Palau",
+ "Panama",
+ "Papoea-Nieuw-Guinea",
+ "Paracel Islands",
+ "Paraguay",
+ "Peru",
+ "Pitcairn",
+ "Polen",
+ "Portugal",
+ "Puerto Rico",
+ "Qatar",
+ "Roemenië",
+ "Rusland",
+ "Rwanda",
+ "Saint Helena",
+ "Saint Lucia",
+ "Saint Vincent en de Grenadines",
+ "Saint-Pierre en Miquelon",
+ "Salomon",
+ "Samoa",
+ "San Marino",
+ "São Tomé en Principe",
+ "Saudi-Arabië",
+ "Senegal",
+ "Servië",
+ "Seychellen",
+ "Sierra Leone",
+ "Singapore",
+ "Sint-Kitts en Nevis",
+ "Slovenië",
+ "Slowakije",
+ "Soedan",
+ "Somalië",
+ "South Georgia and the South Sandwich Islands",
+ "Southern Ocean",
+ "Spanje",
+ "Spratly Islands",
+ "Sri Lanka",
+ "Suriname",
+ "Svalbard",
+ "Syrië",
+ "Tadzjikistan",
+ "Taiwan",
+ "Tanzania",
+ "Thailand",
+ "Timor Leste",
+ "Togo",
+ "Tokelau",
+ "Tonga",
+ "Trinidad en Tobago",
+ "Tsjaad",
+ "Tsjechië",
+ "Tunesië",
+ "Turkije",
+ "Turkmenistan",
+ "Turks-en Caicoseilanden",
+ "Tuvalu",
+ "Uganda",
+ "Uruguay",
+ "Vanuatu",
+ "Venezuela",
+ "Verenigd Koninkrijk",
+ "Verenigde Arabische Emiraten",
+ "Verenigde Staten van Amerika",
+ "Vietnam",
+ "Wake Island",
+ "Wallis en Futuna",
+ "Wereld",
+ "West Bank",
+ "Westelijke Sahara",
+ "Zambia",
+ "Zimbabwe",
+ "Zuid-Afrika",
+ "Zuid-Korea",
+ "Zweden",
+ "Zwitserland",
+ },
+ "name.first_name": []string{
+ "Amber",
+ "Anna",
+ "Anne",
+ "Anouk",
+ "Bas",
+ "Bram",
+ "Britt",
+ "Daan",
+ "Emma",
+ "Eva",
+ "Femke",
+ "Finn",
+ "Fleur",
+ "Iris",
+ "Isa",
+ "Jan",
+ "Jasper",
+ "Jayden",
+ "Jesse",
+ "Johannes",
+ "Julia",
+ "Julian",
+ "Kevin",
+ "Lars",
+ "Lieke",
+ "Lisa",
+ "Lotte",
+ "Lucas",
+ "Luuk",
+ "Maud",
+ "Max",
+ "Mike",
+ "Milan",
+ "Nick",
+ "Niels",
+ "Noa",
+ "Rick",
+ "Roos",
+ "Ruben",
+ "Sander",
+ "Sanne",
+ "Sem",
+ "Sophie",
+ "Stijn",
+ "Sven",
+ "Thijs",
+ "Thijs",
+ "Thomas",
+ "Tim",
+ "Tom",
+ },
+ "name.prefix": []string{
+ "Dhr.",
+ "Mevr. Dr.",
+ "Bsc",
+ "Msc",
+ "Prof.",
+ },
+ "lorem.supplemental": []string{
+ "abbas",
+ "abduco",
+ "abeo",
+ "abscido",
+ "absconditus",
+ "absens",
+ "absorbeo",
+ "absque",
+ "abstergo",
+ "absum",
+ "abundans",
+ "abutor",
+ "accedo",
+ "accendo",
+ "acceptus",
+ "accipio",
+ "accommodo",
+ "accusator",
+ "acer",
+ "acerbitas",
+ "acervus",
+ "acidus",
+ "acies",
+ "acquiro",
+ "acsi",
+ "adamo",
+ "adaugeo",
+ "addo",
+ "adduco",
+ "ademptio",
+ "adeo",
+ "adeptio",
+ "adfectus",
+ "adfero",
+ "adficio",
+ "adflicto",
+ "adhaero",
+ "adhuc",
+ "adicio",
+ "adimpleo",
+ "adinventitias",
+ "adipiscor",
+ "adiuvo",
+ "administratio",
+ "admiratio",
+ "admitto",
+ "admoneo",
+ "admoveo",
+ "adnuo",
+ "adopto",
+ "adsidue",
+ "adstringo",
+ "adsuesco",
+ "adsum",
+ "adulatio",
+ "adulescens",
+ "adultus",
+ "aduro",
+ "advenio",
+ "adversus",
+ "advoco",
+ "aedificium",
+ "aeger",
+ "aegre",
+ "aegrotatio",
+ "aegrus",
+ "aeneus",
+ "aequitas",
+ "aequus",
+ "aer",
+ "aestas",
+ "aestivus",
+ "aestus",
+ "aetas",
+ "aeternus",
+ "ager",
+ "aggero",
+ "aggredior",
+ "agnitio",
+ "agnosco",
+ "ago",
+ "ait",
+ "aiunt",
+ "alienus",
+ "alii",
+ "alioqui",
+ "aliqua",
+ "alius",
+ "allatus",
+ "alo",
+ "alter",
+ "altus",
+ "alveus",
+ "amaritudo",
+ "ambitus",
+ "ambulo",
+ "amicitia",
+ "amiculum",
+ "amissio",
+ "amita",
+ "amitto",
+ "amo",
+ "amor",
+ "amoveo",
+ "amplexus",
+ "amplitudo",
+ "amplus",
+ "ancilla",
+ "angelus",
+ "angulus",
+ "angustus",
+ "animadverto",
+ "animi",
+ "animus",
+ "annus",
+ "anser",
+ "ante",
+ "antea",
+ "antepono",
+ "antiquus",
+ "aperio",
+ "aperte",
+ "apostolus",
+ "apparatus",
+ "appello",
+ "appono",
+ "appositus",
+ "approbo",
+ "apto",
+ "aptus",
+ "apud",
+ "aqua",
+ "ara",
+ "aranea",
+ "arbitro",
+ "arbor",
+ "arbustum",
+ "arca",
+ "arceo",
+ "arcesso",
+ "arcus",
+ "argentum",
+ "argumentum",
+ "arguo",
+ "arma",
+ "armarium",
+ "armo",
+ "aro",
+ "ars",
+ "articulus",
+ "artificiose",
+ "arto",
+ "arx",
+ "ascisco",
+ "ascit",
+ "asper",
+ "aspicio",
+ "asporto",
+ "assentator",
+ "astrum",
+ "atavus",
+ "ater",
+ "atqui",
+ "atrocitas",
+ "atrox",
+ "attero",
+ "attollo",
+ "attonbitus",
+ "auctor",
+ "auctus",
+ "audacia",
+ "audax",
+ "audentia",
+ "audeo",
+ "audio",
+ "auditor",
+ "aufero",
+ "aureus",
+ "auris",
+ "aurum",
+ "aut",
+ "autem",
+ "autus",
+ "auxilium",
+ "avaritia",
+ "avarus",
+ "aveho",
+ "averto",
+ "avoco",
+ "baiulus",
+ "balbus",
+ "barba",
+ "bardus",
+ "basium",
+ "beatus",
+ "bellicus",
+ "bellum",
+ "bene",
+ "beneficium",
+ "benevolentia",
+ "benigne",
+ "bestia",
+ "bibo",
+ "bis",
+ "blandior",
+ "bonus",
+ "bos",
+ "brevis",
+ "cado",
+ "caecus",
+ "caelestis",
+ "caelum",
+ "calamitas",
+ "calcar",
+ "calco",
+ "calculus",
+ "callide",
+ "campana",
+ "candidus",
+ "canis",
+ "canonicus",
+ "canto",
+ "capillus",
+ "capio",
+ "capitulus",
+ "capto",
+ "caput",
+ "carbo",
+ "carcer",
+ "careo",
+ "caries",
+ "cariosus",
+ "caritas",
+ "carmen",
+ "carpo",
+ "carus",
+ "casso",
+ "caste",
+ "casus",
+ "catena",
+ "caterva",
+ "cattus",
+ "cauda",
+ "causa",
+ "caute",
+ "caveo",
+ "cavus",
+ "cedo",
+ "celebrer",
+ "celer",
+ "celo",
+ "cena",
+ "cenaculum",
+ "ceno",
+ "censura",
+ "centum",
+ "cerno",
+ "cernuus",
+ "certe",
+ "certo",
+ "certus",
+ "cervus",
+ "cetera",
+ "charisma",
+ "chirographum",
+ "cibo",
+ "cibus",
+ "cicuta",
+ "cilicium",
+ "cimentarius",
+ "ciminatio",
+ "cinis",
+ "circumvenio",
+ "cito",
+ "civis",
+ "civitas",
+ "clam",
+ "clamo",
+ "claro",
+ "clarus",
+ "claudeo",
+ "claustrum",
+ "clementia",
+ "clibanus",
+ "coadunatio",
+ "coaegresco",
+ "coepi",
+ "coerceo",
+ "cogito",
+ "cognatus",
+ "cognomen",
+ "cogo",
+ "cohaero",
+ "cohibeo",
+ "cohors",
+ "colligo",
+ "colloco",
+ "collum",
+ "colo",
+ "color",
+ "coma",
+ "combibo",
+ "comburo",
+ "comedo",
+ "comes",
+ "cometes",
+ "comis",
+ "comitatus",
+ "commemoro",
+ "comminor",
+ "commodo",
+ "communis",
+ "comparo",
+ "compello",
+ "complectus",
+ "compono",
+ "comprehendo",
+ "comptus",
+ "conatus",
+ "concedo",
+ "concido",
+ "conculco",
+ "condico",
+ "conduco",
+ "confero",
+ "confido",
+ "conforto",
+ "confugo",
+ "congregatio",
+ "conicio",
+ "coniecto",
+ "conitor",
+ "coniuratio",
+ "conor",
+ "conqueror",
+ "conscendo",
+ "conservo",
+ "considero",
+ "conspergo",
+ "constans",
+ "consuasor",
+ "contabesco",
+ "contego",
+ "contigo",
+ "contra",
+ "conturbo",
+ "conventus",
+ "convoco",
+ "copia",
+ "copiose",
+ "cornu",
+ "corona",
+ "corpus",
+ "correptius",
+ "corrigo",
+ "corroboro",
+ "corrumpo",
+ "coruscus",
+ "cotidie",
+ "crapula",
+ "cras",
+ "crastinus",
+ "creator",
+ "creber",
+ "crebro",
+ "credo",
+ "creo",
+ "creptio",
+ "crepusculum",
+ "cresco",
+ "creta",
+ "cribro",
+ "crinis",
+ "cruciamentum",
+ "crudelis",
+ "cruentus",
+ "crur",
+ "crustulum",
+ "crux",
+ "cubicularis",
+ "cubitum",
+ "cubo",
+ "cui",
+ "cuius",
+ "culpa",
+ "culpo",
+ "cultellus",
+ "cultura",
+ "cum",
+ "cunabula",
+ "cunae",
+ "cunctatio",
+ "cupiditas",
+ "cupio",
+ "cuppedia",
+ "cupressus",
+ "cur",
+ "cura",
+ "curatio",
+ "curia",
+ "curiositas",
+ "curis",
+ "curo",
+ "curriculum",
+ "currus",
+ "cursim",
+ "curso",
+ "cursus",
+ "curto",
+ "curtus",
+ "curvo",
+ "curvus",
+ "custodia",
+ "damnatio",
+ "damno",
+ "dapifer",
+ "debeo",
+ "debilito",
+ "decens",
+ "decerno",
+ "decet",
+ "decimus",
+ "decipio",
+ "decor",
+ "decretum",
+ "decumbo",
+ "dedecor",
+ "dedico",
+ "deduco",
+ "defaeco",
+ "defendo",
+ "defero",
+ "defessus",
+ "defetiscor",
+ "deficio",
+ "defigo",
+ "defleo",
+ "defluo",
+ "defungo",
+ "degenero",
+ "degero",
+ "degusto",
+ "deinde",
+ "delectatio",
+ "delego",
+ "deleo",
+ "delibero",
+ "delicate",
+ "delinquo",
+ "deludo",
+ "demens",
+ "demergo",
+ "demitto",
+ "demo",
+ "demonstro",
+ "demoror",
+ "demulceo",
+ "demum",
+ "denego",
+ "denique",
+ "dens",
+ "denuncio",
+ "denuo",
+ "deorsum",
+ "depereo",
+ "depono",
+ "depopulo",
+ "deporto",
+ "depraedor",
+ "deprecator",
+ "deprimo",
+ "depromo",
+ "depulso",
+ "deputo",
+ "derelinquo",
+ "derideo",
+ "deripio",
+ "desidero",
+ "desino",
+ "desipio",
+ "desolo",
+ "desparatus",
+ "despecto",
+ "despirmatio",
+ "infit",
+ "inflammatio",
+ "paens",
+ "patior",
+ "patria",
+ "patrocinor",
+ "patruus",
+ "pauci",
+ "paulatim",
+ "pauper",
+ "pax",
+ "peccatus",
+ "pecco",
+ "pecto",
+ "pectus",
+ "pecunia",
+ "pecus",
+ "peior",
+ "pel",
+ "ocer",
+ "socius",
+ "sodalitas",
+ "sol",
+ "soleo",
+ "solio",
+ "solitudo",
+ "solium",
+ "sollers",
+ "sollicito",
+ "solum",
+ "solus",
+ "solutio",
+ "solvo",
+ "somniculosus",
+ "somnus",
+ "sonitus",
+ "sono",
+ "sophismata",
+ "sopor",
+ "sordeo",
+ "sortitus",
+ "spargo",
+ "speciosus",
+ "spectaculum",
+ "speculum",
+ "sperno",
+ "spero",
+ "spes",
+ "spiculum",
+ "spiritus",
+ "spoliatio",
+ "sponte",
+ "stabilis",
+ "statim",
+ "statua",
+ "stella",
+ "stillicidium",
+ "stipes",
+ "stips",
+ "sto",
+ "strenuus",
+ "strues",
+ "studio",
+ "stultus",
+ "suadeo",
+ "suasoria",
+ "sub",
+ "subito",
+ "subiungo",
+ "sublime",
+ "subnecto",
+ "subseco",
+ "substantia",
+ "subvenio",
+ "succedo",
+ "succurro",
+ "sufficio",
+ "suffoco",
+ "suffragium",
+ "suggero",
+ "sui",
+ "sulum",
+ "sum",
+ "summa",
+ "summisse",
+ "summopere",
+ "sumo",
+ "sumptus",
+ "supellex",
+ "super",
+ "suppellex",
+ "supplanto",
+ "suppono",
+ "supra",
+ "surculus",
+ "surgo",
+ "sursum",
+ "suscipio",
+ "suspendo",
+ "sustineo",
+ "suus",
+ "synagoga",
+ "tabella",
+ "tabernus",
+ "tabesco",
+ "tabgo",
+ "tabula",
+ "taceo",
+ "tactus",
+ "taedium",
+ "talio",
+ "talis",
+ "talus",
+ "tam",
+ "tamdiu",
+ "tamen",
+ "tametsi",
+ "tamisium",
+ "tamquam",
+ "tandem",
+ "tantillus",
+ "tantum",
+ "tardus",
+ "tego",
+ "temeritas",
+ "temperantia",
+ "templum",
+ "temptatio",
+ "tempus",
+ "tenax",
+ "tendo",
+ "teneo",
+ "tener",
+ "tenuis",
+ "tenus",
+ "tepesco",
+ "tepidus",
+ "ter",
+ "terebro",
+ "teres",
+ "terga",
+ "tergeo",
+ "tergiversatio",
+ "tergo",
+ "tergum",
+ "termes",
+ "terminatio",
+ "tero",
+ "terra",
+ "terreo",
+ "territo",
+ "terror",
+ "tersus",
+ "tertius",
+ "testimonium",
+ "texo",
+ "textilis",
+ "textor",
+ "textus",
+ "thalassinus",
+ "theatrum",
+ "theca",
+ "thema",
+ "theologus",
+ "thermae",
+ "thesaurus",
+ "thesis",
+ "thorax",
+ "thymbra",
+ "thymum",
+ "tibi",
+ "timidus",
+ "timor",
+ "titulus",
+ "tolero",
+ "tollo",
+ "tondeo",
+ "tonsor",
+ "torqueo",
+ "torrens",
+ "tot",
+ "totidem",
+ "toties",
+ "totus",
+ "tracto",
+ "trado",
+ "traho",
+ "trans",
+ "tredecim",
+ "tremo",
+ "trepide",
+ "tres",
+ "tribuo",
+ "tricesimus",
+ "triduana",
+ "triginta",
+ "tripudio",
+ "tristis",
+ "triumphus",
+ "trucido",
+ "truculenter",
+ "tubineus",
+ "tui",
+ "tum",
+ "tumultus",
+ "tunc",
+ "turba",
+ "turbo",
+ "turpe",
+ "turpis",
+ "tutamen",
+ "tutis",
+ "tyrannus",
+ "uberrime",
+ "ubi",
+ "ulciscor",
+ "ullus",
+ "ulterius",
+ "ultio",
+ "ultra",
+ "umbra",
+ "umerus",
+ "umquam",
+ "una",
+ "unde",
+ "undique",
+ "universe",
+ "unus",
+ "urbanus",
+ "urbs",
+ "uredo",
+ "usitas",
+ "usque",
+ "ustilo",
+ "ustulo",
+ "usus",
+ "uter",
+ "uterque",
+ "utilis",
+ "utique",
+ "utor",
+ "utpote",
+ "utrimque",
+ "utroque",
+ "utrum",
+ "uxor",
+ "vaco",
+ "vacuus",
+ "vado",
+ "vae",
+ "valde",
+ "valens",
+ "valeo",
+ "valetudo",
+ "validus",
+ "vallum",
+ "vapulus",
+ "varietas",
+ "varius",
+ "vehemens",
+ "vel",
+ "velociter",
+ "velum",
+ "velut",
+ "venia",
+ "venio",
+ "ventito",
+ "ventosus",
+ "ventus",
+ "venustas",
+ "ver",
+ "verbera",
+ "verbum",
+ "vere",
+ "verecundia",
+ "vereor",
+ "vergo",
+ "veritas",
+ "vero",
+ "versus",
+ "verto",
+ "verumtamen",
+ "verus",
+ "vesco",
+ "vesica",
+ "vesper",
+ "vespillo",
+ "vester",
+ "vestigium",
+ "vestrum",
+ "vetus",
+ "via",
+ "vicinus",
+ "vicissitudo",
+ "victoria",
+ "victus",
+ "videlicet",
+ "video",
+ "viduata",
+ "viduo",
+ "vigilo",
+ "vigor",
+ "vilicus",
+ "vilis",
+ "vilitas",
+ "villa",
+ "vinco",
+ "vinculum",
+ "vindico",
+ "vinitor",
+ "vinum",
+ "vir",
+ "virga",
+ "virgo",
+ "viridis",
+ "viriliter",
+ "virtus",
+ "vis",
+ "viscus",
+ "vita",
+ "vitiosus",
+ "vitium",
+ "vito",
+ "vivo",
+ "vix",
+ "vobis",
+ "vociferor",
+ "voco",
+ "volaticus",
+ "volo",
+ "volubilis",
+ "voluntarius",
+ "volup",
+ "volutabrum",
+ "volva",
+ "vomer",
+ "vomica",
+ "vomito",
+ "vorago",
+ "vorax",
+ "voro",
+ "vos",
+ "votum",
+ "voveo",
+ "vox",
+ "vulariter",
+ "vulgaris",
+ "vulgivagus",
+ "vulgo",
+ "vulgus",
+ "vulnero",
+ "vulnus",
+ "vulpes",
+ "vulticulus",
+ "vultuosus",
+ "xiphias",
+ },
+ "internet.free_email": []string{
+ "gmail.com",
+ "yahoo.com",
+ "hotmail.com",
+ },
+ "lorem.words": []string{
+ "alias",
+ "consequatur",
+ "aut",
+ "perferendis",
+ "sit",
+ "voluptatem",
+ "accusantium",
+ "doloremque",
+ "aperiam",
+ "eaque",
+ "ipsa",
+ "quae",
+ "ab",
+ "illo",
+ "inventore",
+ "veritatis",
+ "et",
+ "quasi",
+ "architecto",
+ "beatae",
+ "vitae",
+ "dicta",
+ "sunt",
+ "explicabo",
+ "aspernatur",
+ "aut",
+ "odit",
+ "aut",
+ "fugit",
+ "sed",
+ "quia",
+ "consequuntur",
+ "magni",
+ "dolores",
+ "eos",
+ "qui",
+ "ratione",
+ "voluptatem",
+ "sequi",
+ "nesciunt",
+ "neque",
+ "dolorem",
+ "ipsum",
+ "quia",
+ "dolor",
+ "sit",
+ "amet",
+ "consectetur",
+ "adipisci",
+ "velit",
+ "sed",
+ "quia",
+ "non",
+ "numquam",
+ "eius",
+ "modi",
+ "tempora",
+ "incidunt",
+ "ut",
+ "labore",
+ "et",
+ "dolore",
+ "magnam",
+ "aliquam",
+ "quaerat",
+ "voluptatem",
+ "ut",
+ "enim",
+ "ad",
+ "minima",
+ "veniam",
+ "quis",
+ "nostrum",
+ "exercitationem",
+ "ullam",
+ "corporis",
+ "nemo",
+ "enim",
+ "ipsam",
+ "voluptatem",
+ "quia",
+ "voluptas",
+ "sit",
+ "suscipit",
+ "laboriosam",
+ "nisi",
+ "ut",
+ "aliquid",
+ "ex",
+ "ea",
+ "commodi",
+ "consequatur",
+ "quis",
+ "autem",
+ "vel",
+ "eum",
+ "iure",
+ "reprehenderit",
+ "qui",
+ "in",
+ "ea",
+ "voluptate",
+ "velit",
+ "esse",
+ "quam",
+ "nihil",
+ "molestiae",
+ "et",
+ "iusto",
+ "odio",
+ "dignissimos",
+ "ducimus",
+ "qui",
+ "blanditiis",
+ "praesentium",
+ "laudantium",
+ "totam",
+ "rem",
+ "voluptatum",
+ "deleniti",
+ "atque",
+ "corrupti",
+ "quos",
+ "dolores",
+ "et",
+ "quas",
+ "molestias",
+ "excepturi",
+ "sint",
+ "occaecati",
+ "cupiditate",
+ "non",
+ "provident",
+ "sed",
+ "ut",
+ "perspiciatis",
+ "unde",
+ "omnis",
+ "iste",
+ "natus",
+ "error",
+ "similique",
+ "sunt",
+ "in",
+ "culpa",
+ "qui",
+ "officia",
+ "deserunt",
+ "mollitia",
+ "animi",
+ "id",
+ "est",
+ "laborum",
+ "et",
+ "dolorum",
+ "fuga",
+ "et",
+ "harum",
+ "quidem",
+ "rerum",
+ "facilis",
+ "est",
+ "et",
+ "expedita",
+ "distinctio",
+ "nam",
+ "libero",
+ "tempore",
+ "cum",
+ "soluta",
+ "nobis",
+ "est",
+ "eligendi",
+ "optio",
+ "cumque",
+ "nihil",
+ "impedit",
+ "quo",
+ "porro",
+ "quisquam",
+ "est",
+ "qui",
+ "minus",
+ "id",
+ "quod",
+ "maxime",
+ "placeat",
+ "facere",
+ "possimus",
+ "omnis",
+ "voluptas",
+ "assumenda",
+ "est",
+ "omnis",
+ "dolor",
+ "repellendus",
+ "temporibus",
+ "autem",
+ "quibusdam",
+ "et",
+ "aut",
+ "consequatur",
+ "vel",
+ "illum",
+ "qui",
+ "dolorem",
+ "eum",
+ "fugiat",
+ "quo",
+ "voluptas",
+ "nulla",
+ "pariatur",
+ "at",
+ "vero",
+ "eos",
+ "et",
+ "accusamus",
+ "officiis",
+ "debitis",
+ "aut",
+ "rerum",
+ "necessitatibus",
+ "saepe",
+ "eveniet",
+ "ut",
+ "et",
+ "voluptates",
+ "repudiandae",
+ "sint",
+ "et",
+ "molestiae",
+ "non",
+ "recusandae",
+ "itaque",
+ "earum",
+ "rerum",
+ "hic",
+ "tenetur",
+ "a",
+ "sapiente",
+ "delectus",
+ "ut",
+ "aut",
+ "reiciendis",
+ "voluptatibus",
+ "maiores",
+ "doloribus",
+ "asperiores",
+ "repellat",
+ },
+ "name.name": []string{
+ "#{prefix} #{first_name} #{last_name}",
+ "#{first_name} #{last_name} #{suffix}",
+ "#{first_name} #{last_name}",
+ "#{first_name} #{last_name}",
+ "#{first_name} #{tussenvoegsel} #{last_name}",
+ "#{first_name} #{tussenvoegsel} #{last_name}",
+ },
+ "address.building_number": []string{
+ "Koninging Juliana",
+ "Prinses Maxima",
+ "Prinses Beatrix",
+ "Meester van Vollenhoven",
+ "Prinses Irene",
+ },
+ "name.tussenvoegsel": []string{
+ "van",
+ "van de",
+ "van den",
+ "van 't",
+ "van het",
+ "de",
+ "den",
+ },
+ "phone_number.formats": []string{
+ "(####) ######",
+ "##########",
+ "06########",
+ "06 #### ####",
+ },
+ "address.street_suffix": []string{
+ "straat",
+ "laan",
+ "weg",
+ "plantsoen",
+ "park",
+ },
+ "address.city_prefix": []string{
+ "Noord",
+ "Oost",
+ "West",
+ "Zuid",
+ "Nieuw",
+ "Oud",
+ },
+ "address.state": []string{
+ "Noord-Holland",
+ "Zuid-Holland",
+ "Utrecht",
+ "Zeeland",
+ "Overijssel",
+ "Gelderland",
+ "Drenthe",
+ "Friesland",
+ "Groningen",
+ "Noord-Braband",
+ "Limburg",
+ },
+ "internet.domain_suffix": []string{
+ "nl",
+ "com",
+ "net",
+ "org",
+ },
+ "name.last_name": []string{
+ "Bakker",
+ "Beek",
+ "Berg",
+ "Boer",
+ "Bos",
+ "Bosch",
+ "Brink",
+ "Broek",
+ "Brouwer",
+ "Bruin",
+ "Dam",
+ "Dekker",
+ "Dijk",
+ "Dijkstra",
+ "Graaf",
+ "Groot",
+ "Haan",
+ "Hendriks",
+ "Heuvel",
+ "Hoek",
+ "Jacobs",
+ "Jansen",
+ "Janssen",
+ "Jong",
+ "Klein",
+ "Kok",
+ "Koning",
+ "Koster",
+ "Leeuwen",
+ "Linden",
+ "Maas",
+ "Meer",
+ "Meijer",
+ "Mulder",
+ "Peters",
+ "Ruiter",
+ "Schouten",
+ "Smit",
+ "Smits",
+ "Stichting",
+ "Veen",
+ "Ven",
+ "Vermeulen",
+ "Visser",
+ "Vliet",
+ "Vos",
+ "Vries",
+ "Wal",
+ "Willems",
+ "Wit",
+ },
+ "name.suffix": []string{
+ "Jr.",
+ "Sr.",
+ "I",
+ "II",
+ "III",
+ "IV",
+ "V",
+ },
+ },
+ "zh-CN": map[string][]string{
+ "name.first_name": []string{
+ "绍齐",
+ "博文",
+ "梓晨",
+ "胤祥",
+ "瑞霖",
+ "明哲",
+ "天翊",
+ "凯瑞",
+ "健雄",
+ "耀杰",
+ "潇然",
+ "子涵",
+ "越彬",
+ "钰轩",
+ "智辉",
+ "致远",
+ "俊驰",
+ "雨泽",
+ "烨磊",
+ "晟睿",
+ "文昊",
+ "修洁",
+ "黎昕",
+ "远航",
+ "旭尧",
+ "鸿涛",
+ "伟祺",
+ "荣轩",
+ "越泽",
+ "浩宇",
+ "瑾瑜",
+ "皓轩",
+ "擎苍",
+ "擎宇",
+ "志泽",
+ "子轩",
+ "睿渊",
+ "弘文",
+ "哲瀚",
+ "雨泽",
+ "楷瑞",
+ "建辉",
+ "晋鹏",
+ "天磊",
+ "绍辉",
+ "泽洋",
+ "鑫磊",
+ "鹏煊",
+ "昊强",
+ "伟宸",
+ "博超",
+ "君浩",
+ "子骞",
+ "鹏涛",
+ "炎彬",
+ "鹤轩",
+ "越彬",
+ "风华",
+ "靖琪",
+ "明辉",
+ "伟诚",
+ "明轩",
+ "健柏",
+ "修杰",
+ "志泽",
+ "弘文",
+ "峻熙",
+ "嘉懿",
+ "煜城",
+ "懿轩",
+ "烨伟",
+ "苑博",
+ "伟泽",
+ "熠彤",
+ "鸿煊",
+ "博涛",
+ "烨霖",
+ "烨华",
+ "煜祺",
+ "智宸",
+ "正豪",
+ "昊然",
+ "明杰",
+ "立诚",
+ "立轩",
+ "立辉",
+ "峻熙",
+ "弘文",
+ "熠彤",
+ "鸿煊",
+ "烨霖",
+ "哲瀚",
+ "鑫鹏",
+ "昊天",
+ "思聪",
+ "展鹏",
+ "笑愚",
+ "志强",
+ "炫明",
+ "雪松",
+ "思源",
+ "智渊",
+ "思淼",
+ "晓啸",
+ "天宇",
+ "浩然",
+ "文轩",
+ "鹭洋",
+ "振家",
+ "乐驹",
+ "晓博",
+ "文博",
+ "昊焱",
+ "立果",
+ "金鑫",
+ "锦程",
+ "嘉熙",
+ "鹏飞",
+ "子默",
+ "思远",
+ "浩轩",
+ "语堂",
+ "聪健",
+ "明",
+ "文",
+ "果",
+ "思",
+ "鹏",
+ "驰",
+ "涛",
+ "琪",
+ "浩",
+ "航",
+ "彬",
+ },
+ "address.building_number": []string{
+ "#####",
+ "####",
+ "###",
+ "##",
+ "#",
+ },
+ "address.city_suffix": []string{
+ "沙市",
+ "京市",
+ "宁市",
+ "安市",
+ "乡县",
+ "海市",
+ "码市",
+ "汉市",
+ "阳市",
+ "都市",
+ "州市",
+ "门市",
+ "阳市",
+ "口市",
+ "原市",
+ "南市",
+ "徽市",
+ "林市",
+ "头市",
+ },
+ "address.state_abbr": []string{
+ "京",
+ "沪",
+ "津",
+ "渝",
+ "黑",
+ "吉",
+ "辽",
+ "蒙",
+ "冀",
+ "新",
+ "甘",
+ "青",
+ "陕",
+ "宁",
+ "豫",
+ "鲁",
+ "晋",
+ "皖",
+ "鄂",
+ "湘",
+ "苏",
+ "川",
+ "黔",
+ "滇",
+ "桂",
+ "藏",
+ "浙",
+ "赣",
+ "粤",
+ "闽",
+ "琼",
+ "港",
+ "澳",
+ },
+ "name.last_name": []string{
+ "王",
+ "李",
+ "张",
+ "刘",
+ "陈",
+ "杨",
+ "黄",
+ "吴",
+ "赵",
+ "周",
+ "徐",
+ "孙",
+ "马",
+ "朱",
+ "胡",
+ "林",
+ "郭",
+ "何",
+ "高",
+ "罗",
+ "郑",
+ "梁",
+ "谢",
+ "宋",
+ "唐",
+ "许",
+ "邓",
+ "冯",
+ "韩",
+ "曹",
+ "曾",
+ "彭",
+ "萧",
+ "蔡",
+ "潘",
+ "田",
+ "董",
+ "袁",
+ "于",
+ "余",
+ "叶",
+ "蒋",
+ "杜",
+ "苏",
+ "魏",
+ "程",
+ "吕",
+ "丁",
+ "沈",
+ "任",
+ "姚",
+ "卢",
+ "傅",
+ "钟",
+ "姜",
+ "崔",
+ "谭",
+ "廖",
+ "范",
+ "汪",
+ "陆",
+ "金",
+ "石",
+ "戴",
+ "贾",
+ "韦",
+ "夏",
+ "邱",
+ "方",
+ "侯",
+ "邹",
+ "熊",
+ "孟",
+ "秦",
+ "白",
+ "江",
+ "阎",
+ "薛",
+ "尹",
+ "段",
+ "雷",
+ "黎",
+ "史",
+ "龙",
+ "陶",
+ "贺",
+ "顾",
+ "毛",
+ "郝",
+ "龚",
+ "邵",
+ "万",
+ "钱",
+ "严",
+ "赖",
+ "覃",
+ "洪",
+ "武",
+ "莫",
+ "孔",
+ },
+ "name.name": []string{
+ "#{last_name}#{first_name}",
+ },
+ "address.city_prefix": []string{
+ "长",
+ "上",
+ "南",
+ "西",
+ "北",
+ "诸",
+ "宁",
+ "珠",
+ "武",
+ "衡",
+ "成",
+ "福",
+ "厦",
+ "贵",
+ "吉",
+ "海",
+ "太",
+ "济",
+ "安",
+ "吉",
+ "包",
+ },
+ "address.default_country": []string{
+ "中国",
+ },
+ "address.street_suffix": []string{
+ "巷",
+ "街",
+ "路",
+ "桥",
+ "侬",
+ "旁",
+ "中心",
+ "栋",
+ },
+ "phone_number.formats": []string{
+ "###-########",
+ "####-########",
+ "###########",
+ },
+ "address.street_name": []string{
+ "#{Name.last_name}家#{street_suffix}",
+ },
+ "address.street_address": []string{
+ "#{street_name}#{building_number}号",
+ },
+ "address.city": []string{
+ "#{city_prefix}#{city_suffix}",
+ },
+ "address.postcode": []string{
+ "######",
+ },
+ "address.state": []string{
+ "北京市",
+ "上海市",
+ "天津市",
+ "重庆市",
+ "黑龙江省",
+ "吉林省",
+ "辽宁省",
+ "内蒙古",
+ "河北省",
+ "新疆",
+ "甘肃省",
+ "青海省",
+ "陕西省",
+ "宁夏",
+ "河南省",
+ "山东省",
+ "山西省",
+ "安徽省",
+ "湖北省",
+ "湖南省",
+ "江苏省",
+ "四川省",
+ "贵州省",
+ "云南省",
+ "广西省",
+ "西藏",
+ "浙江省",
+ "江西省",
+ "广东省",
+ "福建省",
+ "海南省",
+ "香港",
+ "澳门",
+ },
+ "company.name": []string{
+ "#{Name.first_name}#{Name.first_name}#{suffix}",
+ },
+ "company.suffix": []string{
+ "有限公司",
+ "集团",
+ "机构",
+ },
+ "lorem.words": []string{
+ "瞥",
+ "瞅",
+ "望",
+ "瞄",
+ "瞪",
+ "盯",
+ "观察",
+ "凝视",
+ "注视",
+ "看望",
+ "探望",
+ "瞻仰",
+ "扫视",
+ "环视",
+ "仰望",
+ "俯视",
+ "鸟瞰",
+ "俯瞰",
+ "远望",
+ "眺望",
+ "了望",
+ "讲",
+ "曰",
+ "讨论",
+ "议论",
+ "谈论",
+ "交流",
+ "交谈",
+ "嚷",
+ "吼",
+ "嚎",
+ "啼",
+ "鸣",
+ "嘶",
+ "嘶叫",
+ "嚎叫",
+ "叫嚷",
+ "首",
+ "元",
+ "甲",
+ "子",
+ "首先",
+ "首屈一指",
+ "名列前茅",
+ "吱呀",
+ "喀嚓",
+ "扑哧",
+ "哗啦",
+ "沙沙",
+ "咕咚",
+ "叮当",
+ "咕噜",
+ "嗖嗖",
+ "唧唧喳喳",
+ "叽叽喳喳",
+ "轰轰隆隆",
+ "叮叮当当",
+ "叮叮咚咚",
+ "哗哗啦啦",
+ "鸟语花香",
+ "春暖花开",
+ "阳春三月",
+ "万物复苏",
+ "春风轻拂",
+ "烈日当空",
+ "暑气逼人",
+ "大汗淋漓",
+ "挥汗如雨",
+ "乌云翻滚",
+ "秋高气爽",
+ "五谷丰登",
+ "万花凋谢",
+ "天高云淡",
+ "落叶沙沙",
+ "三九严寒",
+ "天寒地冻",
+ "雪花飞舞",
+ "寒冬腊月",
+ "千里冰封",
+ "头重脚轻",
+ "指手画脚",
+ "愁眉苦脸",
+ "心明眼亮",
+ "目瞪口呆",
+ "张口结舌",
+ "交头接耳",
+ "面黄肌瘦",
+ "眼明手快",
+ "眼高手低",
+ "昂首挺胸",
+ "心灵手巧",
+ "摩拳擦掌",
+ "摩肩接踵",
+ "鼠目寸光",
+ "谈虎色变",
+ "兔死狐悲",
+ "龙马精神",
+ "杯弓蛇影",
+ "马到成功",
+ "与虎谋皮",
+ "亡羊补牢",
+ "雄狮猛虎",
+ "鹤立鸡群",
+ "狗急跳墙",
+ "叶公好龙",
+ "声名狼籍",
+ "狐假虎威",
+ "画蛇添足",
+ "九牛一毛",
+ "鸡犬不宁",
+ "一箭双雕",
+ "惊弓之鸟",
+ "胆小如鼠",
+ "打草惊蛇",
+ "鸡飞蛋打",
+ "指鹿为马",
+ "顺手牵羊",
+ "对牛弹琴",
+ "鸟语花香",
+ "虎背熊腰",
+ "杀鸡儆猴",
+ "莺歌燕舞",
+ "鸦雀无声",
+ "鱼目混珠",
+ "鱼龙混杂",
+ "龙争虎斗",
+ "出生牛犊",
+ "望女成凤",
+ "望子成龙",
+ "狗尾续貂",
+ "爱屋及乌",
+ "螳臂当车",
+ "蛛丝马迹",
+ "投鼠忌器",
+ "门口罗雀",
+ "管中窥豹",
+ "马到成功",
+ "龙马精神",
+ "马失前蹄",
+ "指鹿为马",
+ "一马当先",
+ "闻鸡起舞",
+ "鹤立鸡群",
+ "杀鸡取卵",
+ "鸡犬不宁",
+ "鸡飞蛋打",
+ "小试牛刀",
+ "九牛一毛",
+ "牛头马面",
+ "牛鬼蛇神",
+ "牛马不如",
+ "一诺千金",
+ "一鸣惊人",
+ "一马当先",
+ "一触即发",
+ "一气呵成",
+ "一丝不苟",
+ "一言九鼎",
+ "一日三秋",
+ "一落千丈",
+ "一字千金",
+ "一本万利",
+ "一手遮天",
+ "一文不值",
+ "一贫如洗",
+ "一身是胆",
+ "一毛不拔",
+ "二三其德",
+ "两面三刀",
+ "两肋插刀",
+ "两败俱伤",
+ "两情相悦",
+ "两袖清风",
+ "两全其美",
+ "三生有幸",
+ "三思而行",
+ "三令五申",
+ "三头六臂",
+ "三更半夜",
+ "三顾茅庐",
+ "四面楚歌",
+ "四面八方",
+ "四海为家",
+ "四通八达",
+ "四平八稳",
+ "四分五裂",
+ "五大三粗",
+ "五光十色",
+ "五花八门",
+ "五体投地",
+ "五谷丰登",
+ "五彩缤纷",
+ "五湖四海",
+ "六神无主",
+ "六根清净",
+ "六道轮回",
+ "六亲不认",
+ "七零八落",
+ "七嘴八舌",
+ "七高八低",
+ "七窍生烟",
+ "七上八下",
+ "七折八扣",
+ "七拼八凑",
+ "八面玲珑",
+ "八面威风",
+ "八仙过海,各显神通",
+ "九霄云外",
+ "九牛一毛",
+ "九死一生",
+ "九鼎一丝",
+ "十指连心",
+ "十面埋伏",
+ "十字街头",
+ "十全十美",
+ "十年寒窗",
+ "十万火急",
+ "十拿九稳",
+ "桃红柳绿",
+ "万紫千红",
+ "青红皂白",
+ "黑白分明",
+ "绿意盎然",
+ "绿树成阴",
+ "素车白马",
+ "万古长青",
+ "漆黑一团",
+ "灯红酒绿",
+ "面红耳赤",
+ "青山绿水",
+ "白纸黑字",
+ "青黄不接",
+ "金灿灿",
+ "黄澄澄",
+ "绿莹莹",
+ "红彤彤",
+ "红艳艳",
+ "红通通",
+ "白茫茫",
+ "黑乎乎",
+ "黑压压",
+ "鹅黄",
+ "乳白",
+ "湖蓝",
+ "枣红",
+ "雪白",
+ "火红",
+ "梨黄",
+ "孔雀蓝",
+ "柠檬黄",
+ "象牙白",
+ "苹果绿",
+ "五彩缤纷",
+ "五光十色",
+ "万紫千红",
+ "绚丽多彩",
+ "色彩斑斓",
+ "千姿百态",
+ "千姿万状",
+ "姿态万千",
+ "形态多样",
+ "形态不一",
+ "不胜枚举",
+ "数不胜数",
+ "不可胜数",
+ "不计其数",
+ "成千上万",
+ "成群结队",
+ "人山人海",
+ "排山倒海",
+ "琳琅满目",
+ "车水马龙",
+ "铺天盖地",
+ "满山遍野",
+ "变化多端",
+ "变幻莫测",
+ "千变万化",
+ "瞬息万变",
+ "一泻千里",
+ "一目十行",
+ "快如闪电",
+ "移步换影",
+ "健步如飞",
+ "光阴似箭",
+ "日月如梭",
+ "星转斗移",
+ "流星赶月",
+ "慢慢",
+ "缓缓",
+ "冉冉",
+ "徐徐",
+ "缓慢",
+ "一眨眼",
+ "一瞬间",
+ "刹那间",
+ "顷刻间",
+ "霎时间",
+ "时而",
+ "去世",
+ "已故",
+ "牺牲",
+ "逝世",
+ "与世长辞",
+ "为国捐躯",
+ "驾崩",
+ "苦思冥想",
+ "静思默想",
+ "绞尽脑汁",
+ "拾金不昧",
+ "舍己为人",
+ "视死如归",
+ "坚贞不屈",
+ "不屈不挠",
+ "身材魁梧",
+ "亭亭玉立",
+ "老态龙钟",
+ "西装革履",
+ "婀娜多姿",
+ "洗耳恭听",
+ "昂首阔步",
+ "拳打脚踢",
+ "交头接耳",
+ "左顾右盼",
+ "扬眉吐气",
+ "怒目而视",
+ "火眼金睛",
+ "面红耳赤",
+ "热泪盈眶",
+ "泪流满面",
+ "泪如雨下",
+ "泪眼汪汪",
+ "泪如泉涌",
+ "嚎啕大哭",
+ "喜笑颜开",
+ "眉开眼笑",
+ "哈哈大笑",
+ "嫣然一笑",
+ "微微一笑",
+ "忐忑不安",
+ "惊慌失措",
+ "闷闷不乐",
+ "激动人心",
+ "笑容可掬",
+ "微微一笑",
+ "开怀大笑",
+ "喜出望外",
+ "乐不可支",
+ "火冒三丈",
+ "怒发冲冠",
+ "勃然大怒",
+ "怒气冲冲",
+ "咬牙切齿",
+ "可憎可恶",
+ "十分可恶",
+ "深恶痛绝",
+ "疾恶如仇",
+ "恨之入骨",
+ "伤心落泪",
+ "欲哭无泪",
+ "失声痛哭",
+ "泣不成声",
+ "潸然泪下",
+ "无精打采",
+ "顾虑重重",
+ "忧愁不安",
+ "愁眉苦脸",
+ "闷闷不乐",
+ "激动不已",
+ "激动人心",
+ "百感交集",
+ "激动万分",
+ "感慨万分",
+ "舒舒服服",
+ "高枕无忧",
+ "无忧无虑",
+ "悠然自得",
+ "心旷神怡",
+ "迫不及待",
+ "急急忙忙",
+ "急不可待",
+ "操之过急",
+ "焦急万分",
+ "追悔莫及",
+ "悔恨交加",
+ "于心不安",
+ "深感内疚",
+ "羞愧难言",
+ "心灰意冷",
+ "大失所望",
+ "灰心丧气",
+ "毫无希望",
+ "黯然神伤",
+ "惊弓之鸟",
+ "提心吊胆",
+ "惊惶失措",
+ "惊恐万状",
+ "惶惶不安",
+ "深入浅出",
+ "借尸还魂",
+ "买空卖空",
+ "内忧外患",
+ "前呼后拥",
+ "异口同声",
+ "声东击西",
+ "三长两短",
+ "凶多吉少",
+ "不进则退",
+ "大同小异",
+ "大公无私",
+ "承上启下",
+ "天长日久",
+ "天崩地裂",
+ "天老地荒",
+ "理直气壮",
+ "云开日出",
+ "长短不同",
+ "黑白相间",
+ "表里如一",
+ "喜怒哀乐",
+ "安危冷暖",
+ "生死存亡",
+ "茫雾似轻",
+ "枫叶似火",
+ "骄阳似火",
+ "秋月似钩",
+ "日月如梭",
+ "雪花如席",
+ "雪飘如絮",
+ "细雨如烟",
+ "星月如钩",
+ "碧空如洗",
+ "暴雨如注",
+ "吉祥如意",
+ "视死如归",
+ "挥金如土",
+ "疾走如飞",
+ "一见如故",
+ "和好如初",
+ "心急如焚",
+ "早出晚归",
+ "眉清目秀",
+ "月圆花好",
+ "李白桃红",
+ "心直口快",
+ "水落石出",
+ "水滴石穿",
+ "月白风清",
+ "字正腔圆",
+ "口蜜腹剑",
+ "雨打风吹",
+ "虎啸龙吟",
+ "龙争虎斗",
+ "走马观花",
+ "废寝忘食",
+ "张灯结彩",
+ "招兵买马",
+ "争分夺秒",
+ "坐井观天",
+ "思前顾后",
+ "投桃报李",
+ "行云流水",
+ "乘热打铁",
+ "生离死别",
+ "舍近求远",
+ "返老还童",
+ "载歌载舞",
+ "难舍难分",
+ "能屈能伸",
+ "蹑手蹑脚",
+ "有始有终",
+ "若即若离",
+ "古色古香",
+ "无影无踪",
+ "无牵无挂",
+ "无边无际",
+ "无情无义",
+ "无忧无虑",
+ "无缘无故",
+ "无穷无尽",
+ "不干不净",
+ "不清不楚",
+ "不明不白",
+ "不闻不问",
+ "不伦不类",
+ "不吵不闹",
+ "不理不睬",
+ "自言自语",
+ "自说自话",
+ "自吹自擂",
+ "自私自利",
+ "自高自大",
+ "自暴自弃",
+ "自给自足",
+ "时隐时现",
+ "时高时低",
+ "时明时暗",
+ "时上时下",
+ "半信半疑",
+ "半明半昧",
+ "半梦半醒",
+ "半推半就",
+ "神采奕奕",
+ "星光熠熠",
+ "小心翼翼",
+ "炊烟袅袅",
+ "白雪皑皑",
+ "烈日灼灼",
+ "赤日炎炎",
+ "绿浪滚滚",
+ "波浪滚滚",
+ "云浪滚滚",
+ "麦浪滚滚",
+ "热浪滚滚",
+ "江水滚滚",
+ "车轮滚滚",
+ "果实累累",
+ "秋实累累",
+ "硕果累累",
+ "果实累累",
+ "尸骨累累",
+ "弹孔累累",
+ "白骨累累",
+ "生气勃勃",
+ "生机勃勃",
+ "生气勃勃",
+ "朝气勃勃",
+ "兴致勃勃",
+ "雄心勃勃",
+ "千军万马",
+ "千言万语",
+ "千变万化",
+ "千山万水",
+ "千秋万代",
+ "千丝万缕",
+ "千奇百怪",
+ "千锤百炼",
+ "千方百计",
+ "千疮百孔",
+ "千姿百态",
+ "前因后果",
+ "前呼后拥",
+ "前思后想",
+ "前赴后继",
+ "前仰后合",
+ "前倨后恭",
+ "天经地义",
+ "天罗地网",
+ "天昏地暗",
+ "天诛地灭",
+ "天南地北",
+ "天荒地老",
+ "有眼无珠",
+ "有气无力",
+ "有始无终",
+ "有备无患",
+ "有恃无恐",
+ "有勇无谋",
+ "有名无实",
+ "东倒西歪",
+ "东张西望",
+ "东奔西走",
+ "东拉西扯",
+ "东拼西凑",
+ "东邻西舍",
+ "东鳞西爪",
+ "迫在眉睫",
+ "千钧一发",
+ "燃眉之急",
+ "十万火急",
+ "震耳欲聋",
+ "惊天动地",
+ "震天动地",
+ "响彻云霄",
+ "众志成城",
+ "齐心协力",
+ "同心同德",
+ "万众一心",
+ "废寝忘食",
+ "刻苦钻研",
+ "争分夺秒",
+ "精益求精",
+ "专心致志",
+ "全神贯注",
+ "聚精会神",
+ "一心一意",
+ "议论纷纷",
+ "各抒己见",
+ "七嘴八舌",
+ "争论不休",
+ "车水马龙",
+ "人山人海",
+ "人声鼎沸",
+ "摩肩接踵",
+ "生龙活虎",
+ "人流如潮",
+ "振奋人心",
+ "洁白无瑕",
+ "白璧无瑕",
+ "冰清玉洁",
+ "洁白如玉",
+ "言而有信",
+ "一言九鼎",
+ "一诺千金",
+ "信守诺言",
+ "毅然决然",
+ "当机立断",
+ "雷厉风行",
+ "前所未有",
+ "空前绝后",
+ "绝无仅有",
+ "史无前例",
+ "犹豫不决",
+ "出尔反尔",
+ "优柔寡断",
+ "狐疑不决",
+ "浩浩荡荡",
+ "气势磅礴",
+ "气势恢弘",
+ "气势非凡",
+ "枝繁叶茂",
+ "绿树成阴",
+ "绿阴如盖",
+ "闻名于世",
+ "举世闻名",
+ "闻名天下",
+ "大名鼎鼎",
+ "手足无措",
+ "手忙脚乱",
+ "手舞足蹈",
+ "足下生辉",
+ "赞不绝口",
+ "赞叹不已",
+ "连连称赞",
+ "叹为观止",
+ "慷慨激昂",
+ "壮志凌云",
+ "铿锵有力",
+ "语气坚定",
+ "汹涌澎湃",
+ "波涛汹涌",
+ "白浪滔天",
+ "惊涛骇浪",
+ "风平浪静",
+ "水平如镜",
+ "波光粼粼",
+ "碧波荡漾",
+ "旭日东升",
+ "绵绵细雨",
+ "桃红柳绿",
+ "艳阳高照",
+ "山河壮丽",
+ "高山峻岭",
+ "危峰兀立",
+ "连绵不断",
+ "飞流直下",
+ "一泻千里",
+ "万丈瀑布",
+ "水帘悬挂",
+ "雄鸡报晓",
+ "红日东升",
+ "朝霞辉映",
+ "金光万道",
+ "中午时分",
+ "丽日当空",
+ "艳阳高照",
+ "当午日明",
+ "暮色苍茫",
+ "夕阳西下",
+ "天色模糊",
+ "晚风习习",
+ "华灯初上",
+ "月明星稀",
+ "灯火通明",
+ "漫漫长夜",
+ "万家灯火",
+ "夜幕降临",
+ "狂风暴雨",
+ "倾盆大雨",
+ "瓢泼大雨",
+ "暴风骤雨",
+ "秋雨绵绵",
+ "绵绵细雨",
+ "细雨如烟",
+ "淅淅沥沥",
+ "暴雨如注",
+ "风和日丽",
+ "天高云淡",
+ "万里无云",
+ "秋高气爽",
+ "纷纷扬扬",
+ "粉妆玉砌",
+ "银妆素裹",
+ "白雪皑皑",
+ "冰雪消融",
+ "冰天雪地",
+ "白雪皑皑",
+ "雪花飞舞",
+ "大雪封门",
+ "雪中送炭",
+ "和风拂面",
+ "风狂雨猛",
+ "秋风凉爽",
+ "北风呼啸",
+ "轻风徐徐",
+ "令人发指",
+ "丧失人性",
+ },
+ "internet.domain_suffix": []string{
+ "com",
+ "info",
+ "name",
+ "net",
+ "org",
+ "xxx",
+ "cn",
+ },
+ },
+}
diff --git a/vendor/github.com/manveru/faker/faker.go b/vendor/github.com/manveru/faker/faker.go
new file mode 100644
index 000000000..344f6d593
--- /dev/null
+++ b/vendor/github.com/manveru/faker/faker.go
@@ -0,0 +1,311 @@
+// faker generates fake data in various languages.
+package faker
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "math/rand"
+ "net"
+ "regexp"
+ "strconv"
+ "strings"
+ "time"
+ "unicode"
+ "unicode/utf8"
+)
+
+var (
+ keyPattern = regexp.MustCompile("#{[^}]+}")
+ numerifyPattern = regexp.MustCompile("#+")
+ removeFancyChars = regexp.MustCompile("\\W")
+ needRegexParsing = map[string]bool{
+ "name.title.descriptor": false,
+ "name.first_name": false,
+ "name.title.job": false,
+ "name.title.level": false,
+ "name.prefix": false,
+ "name.suffix": false,
+ "name.last_name": false,
+ "name.name": true,
+ "internet.domain_suffix": false,
+ "internet.free_email": false,
+ "adress.country": false,
+ "address.state": false,
+ "address.city": true,
+ "address.city_prefix": false,
+ "adress.city_suffix": false,
+ "address.postcode": true,
+ "address.street_address": true,
+ "address.street_name": true,
+ "address.street_suffix": false,
+ "address.state_abbr": false,
+ "address.building_number": true,
+ "address.secondary_address": true,
+ "company.name": true,
+ "compagny.suffix": false,
+ "company.buzzwords.0": false,
+ "company.buzzwords.1": false,
+ "company.buzzwords.2": false,
+ "company.bs.0": false,
+ "company.bs.1": false,
+ "company.bs.2": false,
+ "phone_number.formats": true,
+ "lorem.words": false,
+ "lorem.supplemental": false,
+ }
+)
+
+type Faker struct {
+ Language string
+ Dict map[string][]string
+ Rand Rand
+ Buffer bytes.Buffer
+}
+
+type Rand interface {
+ Float32() float32
+ Float64() float64
+ Int63n(int64) int64
+ Intn(int) int
+}
+
+func New(lang string) (*Faker, error) {
+ subDict, ok := Dict[lang]
+ if !ok {
+ return nil, errors.New(fmt.Sprintf("No such language: %q", lang))
+ }
+
+ source := rand.NewSource(time.Now().UnixNano())
+ return &Faker{
+ Language: lang,
+ Dict: subDict,
+ Rand: rand.New(source),
+ }, nil
+}
+
+func (f *Faker) Words(count int, supplemental bool) []string {
+ out := make([]string, 0, count)
+ for n := 0; n < count; n++ {
+ if supplemental && f.Rand.Float32() > 0.5 {
+ out = append(out, f.parse("lorem.supplemental"))
+ } else {
+ out = append(out, f.parse("lorem.words"))
+ }
+ }
+ return out
+}
+
+func (f *Faker) Characters(count int) string {
+ if count < 1 {
+ return ""
+ }
+ out := make([]rune, 0, count)
+ for n := 0; n < count; n++ {
+ r := []rune(strconv.FormatInt(f.Rand.Int63n(36), 36))[0]
+ out = append(out, r)
+ }
+ return string(out)
+}
+
+func (f *Faker) Sentence(words int, supplemental bool) string {
+ if f.Language == "zh-CN" {
+ return strings.Join(f.Words(words+f.Rand.Intn(6), supplemental), "") + "。"
+ }
+ return capitalize(strings.Join(f.Words(words+f.Rand.Intn(6), supplemental), " ")) + "."
+}
+
+func (f *Faker) Sentences(count int, supplemental bool) []string {
+ out := make([]string, 0, count)
+ for n := 0; n < count; n++ {
+ out = append(out, f.Sentence(3, supplemental))
+ }
+ return out
+}
+
+func (f *Faker) Paragraph(sentences int, supplemental bool) string {
+ return strings.Join(f.Sentences(sentences, supplemental), " ")
+}
+
+func (f *Faker) Paragraphs(count int, supplemental bool) []string {
+ out := make([]string, 0, count)
+ for n := 0; n < count; n++ {
+ out = append(out, f.Paragraph(3, supplemental))
+ }
+ return out
+}
+
+func (f *Faker) City() string { return f.parse("address.city") }
+func (f *Faker) StreetName() string { return f.parse("address.street_name") }
+func (f *Faker) StreetAddress() string { return f.numerify(f.parse("address.street_address")) }
+func (f *Faker) SecondaryAddress() string { return f.numerify(f.parse("address.secondary_address")) }
+func (f *Faker) PostCode() string { return f.bothify(f.parse("address.postcode")) }
+func (f *Faker) StreetSuffix() string { return f.parse("address.street_suffix") }
+func (f *Faker) CitySuffix() string { return f.parse("address.city_suffix") }
+func (f *Faker) CityPrefix() string { return f.parse("address.city_prefix") }
+func (f *Faker) StateAbbr() string { return f.parse("address.state_abbr") }
+func (f *Faker) State() string { return f.parse("address.state") }
+func (f *Faker) Country() string { return f.parse("address.country") }
+func (f *Faker) Latitude() float64 { return (f.Rand.Float64() * 180) - 90 }
+func (f *Faker) Longitude() float64 { return (f.Rand.Float64() * 360) - 180 }
+
+func (f *Faker) CompanyName() string { return f.parse("company.name") }
+func (f *Faker) CompanySuffix() string { return f.parse("company.suffix") }
+func (f *Faker) CompanyCatchPhrase() string {
+ return f.combine("company.buzzwords.0", "company.buzzwords.1", "company.buzzwords.2")
+}
+func (f *Faker) CompanyBs() string { return f.combine("company.bs.0", "company.bs.1", "company.bs.2") }
+
+func (f *Faker) PhoneNumber() string {
+ return f.numerify(f.parse("phone_number.formats"))
+}
+
+func (f *Faker) CellPhoneNumber() string {
+ _, got := f.Dict["phone_number.cell_phone"]
+ if got {
+ return f.numerify(f.parse("phone_number.cell_phone"))
+ }
+ return f.numerify(f.parse("phone_number.formats"))
+}
+
+func (f *Faker) Email() string { return f.UserName() + "@" + f.DomainName() }
+func (f *Faker) FreeEmail() string { return f.UserName() + "@" + f.parse("internet.free_email") }
+func (f *Faker) SafeEmail() string {
+ return f.UserName() + "@example." + f.sample([]string{"org", "com", "net"})
+}
+func (f *Faker) UserName() string {
+ tmp := []string{f.FirstName(), f.FirstName() + f.sample([]string{".", "_"}) + f.LastName()}
+ return fixUmlauts(strings.ToLower(f.sample(tmp)))
+}
+func (f *Faker) DomainName() string {
+ return f.DomainWord() + "." + f.DomainSuffix()
+}
+
+func (f *Faker) DomainWord() string {
+ return removeFancyChars.ReplaceAllString(
+ strings.ToLower(
+ strings.SplitN(f.CompanyName(), " ", 2)[0]), "")
+}
+func (f *Faker) DomainSuffix() string { return f.parse("internet.domain_suffix") }
+func (f *Faker) IPv4Address() net.IP {
+ oct := func() int { return 2 + f.Rand.Intn(254) }
+ ip := fmt.Sprintf("%d.%d.%d.%d", oct(), oct(), oct(), oct())
+ return net.ParseIP(ip)
+}
+func (f *Faker) IPv6Address() net.IP {
+ m := 65536
+ ip := fmt.Sprintf("2001:cafe:%x:%x:%x:%x:%x:%x",
+ f.Rand.Intn(m), f.Rand.Intn(m), f.Rand.Intn(m),
+ f.Rand.Intn(m), f.Rand.Intn(m), f.Rand.Intn(m))
+ return net.ParseIP(ip)
+}
+func (f *Faker) URL() string {
+ return "http://" + f.DomainName() + "/" + f.UserName()
+}
+
+// Name returns a random personal name in various formats.
+func (f *Faker) Name() string { return f.parse("name.name") }
+func (f *Faker) FirstName() string { return f.parse("name.first_name") }
+func (f *Faker) LastName() string { return f.parse("name.last_name") }
+func (f *Faker) NamePrefix() string { return f.parse("name.prefix") }
+func (f *Faker) NameSuffix() string { return f.parse("name.suffix") }
+func (f *Faker) JobTitle() string {
+ return f.combine("name.title.descriptor", "name.title.level", "name.title.job")
+}
+
+func capitalize(s string) string {
+ if s == "" {
+ return s
+ }
+ r, n := utf8.DecodeRuneInString(s)
+ return string(unicode.ToTitle(r)) + s[n:]
+}
+
+func fixUmlauts(str string) string {
+ out := make([]rune, 0, len(str))
+ for _, r := range str {
+ switch r {
+ case 'ä':
+ out = append(out, 'a', 'e')
+ case 'ö':
+ out = append(out, 'o', 'e')
+ case 'ü':
+ out = append(out, 'u', 'e')
+ case 'ß':
+ out = append(out, 's', 's')
+ default:
+ out = append(out, r)
+ }
+ }
+ return string(out)
+}
+
+func (f *Faker) combine(keys ...string) string {
+ f.Buffer.Reset()
+ for _, key := range keys {
+ f.Buffer.WriteString(f.parse(key))
+ f.Buffer.WriteByte(' ')
+ }
+ return f.Buffer.String()[0 : f.Buffer.Len()-1]
+}
+
+func (f *Faker) parse(key string) string {
+
+ formats, found := f.Dict[key]
+ if !found {
+ panic("couldn't find key: " + key)
+ }
+ format := f.sample(formats)
+ if needRegex, ok := needRegexParsing[key]; ok {
+ if !needRegex {
+ return format
+ }
+ }
+
+ return recGsub(keyPattern, format, func(s string) string {
+ entryKey := strings.ToLower(s[2 : len(s)-1])
+
+ if strings.Index(entryKey, ".") == -1 {
+ baseKeyIndex := strings.Index(key, ".")
+ entryKey = key[0:baseKeyIndex] + "." + entryKey
+ }
+
+ entry, found := f.Dict[entryKey]
+ if !found {
+ panic("couldn't find entry key: " + entryKey)
+ }
+ return f.sample(entry)
+ })
+}
+
+func recGsub(r *regexp.Regexp, in string, f func(string) string) string {
+ for keepRunning := true; keepRunning; {
+ keepRunning = false
+ in = r.ReplaceAllStringFunc(in, func(s string) string {
+ keepRunning = true
+ return f(s)
+ })
+ }
+ return in
+}
+
+func (f *Faker) sample(set []string) string {
+ idx := f.Rand.Intn(len(set))
+ return set[idx]
+}
+
+func (f *Faker) bothify(in string) string {
+ return f.letterify(f.numerify(in))
+}
+
+func (f *Faker) letterify(in string) string {
+ return in
+}
+
+func (f *Faker) numerify(in string) string {
+ return recGsub(numerifyPattern, in, func(s string) string {
+ return strings.Map(func(r rune) rune {
+ return rune(48 + rand.Intn(9))
+ }, s)
+ })
+}
diff --git a/vendor/github.com/sergi/go-diff/AUTHORS b/vendor/github.com/sergi/go-diff/AUTHORS
new file mode 100644
index 000000000..2d7bb2bf5
--- /dev/null
+++ b/vendor/github.com/sergi/go-diff/AUTHORS
@@ -0,0 +1,25 @@
+# This is the official list of go-diff authors for copyright purposes.
+# This file is distinct from the CONTRIBUTORS files.
+# See the latter for an explanation.
+
+# Names should be added to this file as
+# Name or Organization
+# The email address is not required for organizations.
+
+# Please keep the list sorted.
+
+Danny Yoo
+James Kolb
+Jonathan Amsterdam
+Markus Zimmermann
+Matt Kovars
+Örjan Persson
+Osman Masood
+Robert Carlsen
+Rory Flynn
+Sergi Mansilla
+Shatrugna Sadhu
+Shawn Smith
+Stas Maksimov
+Tor Arvid Lund
+Zac Bergquist
diff --git a/vendor/github.com/sergi/go-diff/CONTRIBUTORS b/vendor/github.com/sergi/go-diff/CONTRIBUTORS
new file mode 100644
index 000000000..369e3d551
--- /dev/null
+++ b/vendor/github.com/sergi/go-diff/CONTRIBUTORS
@@ -0,0 +1,32 @@
+# This is the official list of people who can contribute
+# (and typically have contributed) code to the go-diff
+# repository.
+#
+# The AUTHORS file lists the copyright holders; this file
+# lists people. For example, ACME Inc. employees would be listed here
+# but not in AUTHORS, because ACME Inc. would hold the copyright.
+#
+# When adding J Random Contributor's name to this file,
+# either J's name or J's organization's name should be
+# added to the AUTHORS file.
+#
+# Names should be added to this file like so:
+# Name
+#
+# Please keep the list sorted.
+
+Danny Yoo
+James Kolb
+Jonathan Amsterdam
+Markus Zimmermann
+Matt Kovars
+Örjan Persson
+Osman Masood
+Robert Carlsen
+Rory Flynn
+Sergi Mansilla
+Shatrugna Sadhu
+Shawn Smith
+Stas Maksimov
+Tor Arvid Lund
+Zac Bergquist
diff --git a/vendor/github.com/sergi/go-diff/LICENSE b/vendor/github.com/sergi/go-diff/LICENSE
new file mode 100644
index 000000000..937942c2b
--- /dev/null
+++ b/vendor/github.com/sergi/go-diff/LICENSE
@@ -0,0 +1,20 @@
+Copyright (c) 2012-2016 The go-diff Authors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
diff --git a/vendor/github.com/sergi/go-diff/diffmatchpatch/diff.go b/vendor/github.com/sergi/go-diff/diffmatchpatch/diff.go
new file mode 100644
index 000000000..82ad7bc8f
--- /dev/null
+++ b/vendor/github.com/sergi/go-diff/diffmatchpatch/diff.go
@@ -0,0 +1,1344 @@
+// Copyright (c) 2012-2016 The go-diff authors. All rights reserved.
+// https://github.com/sergi/go-diff
+// See the included LICENSE file for license details.
+//
+// go-diff is a Go implementation of Google's Diff, Match, and Patch library
+// Original library is Copyright (c) 2006 Google Inc.
+// http://code.google.com/p/google-diff-match-patch/
+
+package diffmatchpatch
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "html"
+ "math"
+ "net/url"
+ "regexp"
+ "strconv"
+ "strings"
+ "time"
+ "unicode/utf8"
+)
+
+// Operation defines the operation of a diff item.
+type Operation int8
+
+const (
+ // DiffDelete item represents a delete diff.
+ DiffDelete Operation = -1
+ // DiffInsert item represents an insert diff.
+ DiffInsert Operation = 1
+ // DiffEqual item represents an equal diff.
+ DiffEqual Operation = 0
+)
+
+// Diff represents one diff operation
+type Diff struct {
+ Type Operation
+ Text string
+}
+
+func splice(slice []Diff, index int, amount int, elements ...Diff) []Diff {
+ return append(slice[:index], append(elements, slice[index+amount:]...)...)
+}
+
+// DiffMain finds the differences between two texts.
+// If an invalid UTF-8 sequence is encountered, it will be replaced by the Unicode replacement character.
+func (dmp *DiffMatchPatch) DiffMain(text1, text2 string, checklines bool) []Diff {
+ return dmp.DiffMainRunes([]rune(text1), []rune(text2), checklines)
+}
+
+// DiffMainRunes finds the differences between two rune sequences.
+// If an invalid UTF-8 sequence is encountered, it will be replaced by the Unicode replacement character.
+func (dmp *DiffMatchPatch) DiffMainRunes(text1, text2 []rune, checklines bool) []Diff {
+ var deadline time.Time
+ if dmp.DiffTimeout > 0 {
+ deadline = time.Now().Add(dmp.DiffTimeout)
+ }
+ return dmp.diffMainRunes(text1, text2, checklines, deadline)
+}
+
+func (dmp *DiffMatchPatch) diffMainRunes(text1, text2 []rune, checklines bool, deadline time.Time) []Diff {
+ if runesEqual(text1, text2) {
+ var diffs []Diff
+ if len(text1) > 0 {
+ diffs = append(diffs, Diff{DiffEqual, string(text1)})
+ }
+ return diffs
+ }
+ // Trim off common prefix (speedup).
+ commonlength := commonPrefixLength(text1, text2)
+ commonprefix := text1[:commonlength]
+ text1 = text1[commonlength:]
+ text2 = text2[commonlength:]
+
+ // Trim off common suffix (speedup).
+ commonlength = commonSuffixLength(text1, text2)
+ commonsuffix := text1[len(text1)-commonlength:]
+ text1 = text1[:len(text1)-commonlength]
+ text2 = text2[:len(text2)-commonlength]
+
+ // Compute the diff on the middle block.
+ diffs := dmp.diffCompute(text1, text2, checklines, deadline)
+
+ // Restore the prefix and suffix.
+ if len(commonprefix) != 0 {
+ diffs = append([]Diff{Diff{DiffEqual, string(commonprefix)}}, diffs...)
+ }
+ if len(commonsuffix) != 0 {
+ diffs = append(diffs, Diff{DiffEqual, string(commonsuffix)})
+ }
+
+ return dmp.DiffCleanupMerge(diffs)
+}
+
+// diffCompute finds the differences between two rune slices. Assumes that the texts do not have any common prefix or suffix.
+func (dmp *DiffMatchPatch) diffCompute(text1, text2 []rune, checklines bool, deadline time.Time) []Diff {
+ diffs := []Diff{}
+ if len(text1) == 0 {
+ // Just add some text (speedup).
+ return append(diffs, Diff{DiffInsert, string(text2)})
+ } else if len(text2) == 0 {
+ // Just delete some text (speedup).
+ return append(diffs, Diff{DiffDelete, string(text1)})
+ }
+
+ var longtext, shorttext []rune
+ if len(text1) > len(text2) {
+ longtext = text1
+ shorttext = text2
+ } else {
+ longtext = text2
+ shorttext = text1
+ }
+
+ if i := runesIndex(longtext, shorttext); i != -1 {
+ op := DiffInsert
+ // Swap insertions for deletions if diff is reversed.
+ if len(text1) > len(text2) {
+ op = DiffDelete
+ }
+ // Shorter text is inside the longer text (speedup).
+ return []Diff{
+ Diff{op, string(longtext[:i])},
+ Diff{DiffEqual, string(shorttext)},
+ Diff{op, string(longtext[i+len(shorttext):])},
+ }
+ } else if len(shorttext) == 1 {
+ // Single character string.
+ // After the previous speedup, the character can't be an equality.
+ return []Diff{
+ Diff{DiffDelete, string(text1)},
+ Diff{DiffInsert, string(text2)},
+ }
+ // Check to see if the problem can be split in two.
+ } else if hm := dmp.diffHalfMatch(text1, text2); hm != nil {
+ // A half-match was found, sort out the return data.
+ text1A := hm[0]
+ text1B := hm[1]
+ text2A := hm[2]
+ text2B := hm[3]
+ midCommon := hm[4]
+ // Send both pairs off for separate processing.
+ diffsA := dmp.diffMainRunes(text1A, text2A, checklines, deadline)
+ diffsB := dmp.diffMainRunes(text1B, text2B, checklines, deadline)
+ // Merge the results.
+ return append(diffsA, append([]Diff{Diff{DiffEqual, string(midCommon)}}, diffsB...)...)
+ } else if checklines && len(text1) > 100 && len(text2) > 100 {
+ return dmp.diffLineMode(text1, text2, deadline)
+ }
+ return dmp.diffBisect(text1, text2, deadline)
+}
+
+// diffLineMode does a quick line-level diff on both []runes, then rediff the parts for greater accuracy. This speedup can produce non-minimal diffs.
+func (dmp *DiffMatchPatch) diffLineMode(text1, text2 []rune, deadline time.Time) []Diff {
+ // Scan the text on a line-by-line basis first.
+ text1, text2, linearray := dmp.diffLinesToRunes(text1, text2)
+
+ diffs := dmp.diffMainRunes(text1, text2, false, deadline)
+
+ // Convert the diff back to original text.
+ diffs = dmp.DiffCharsToLines(diffs, linearray)
+ // Eliminate freak matches (e.g. blank lines)
+ diffs = dmp.DiffCleanupSemantic(diffs)
+
+ // Rediff any replacement blocks, this time character-by-character.
+ // Add a dummy entry at the end.
+ diffs = append(diffs, Diff{DiffEqual, ""})
+
+ pointer := 0
+ countDelete := 0
+ countInsert := 0
+
+ // NOTE: Rune slices are slower than using strings in this case.
+ textDelete := ""
+ textInsert := ""
+
+ for pointer < len(diffs) {
+ switch diffs[pointer].Type {
+ case DiffInsert:
+ countInsert++
+ textInsert += diffs[pointer].Text
+ case DiffDelete:
+ countDelete++
+ textDelete += diffs[pointer].Text
+ case DiffEqual:
+ // Upon reaching an equality, check for prior redundancies.
+ if countDelete >= 1 && countInsert >= 1 {
+ // Delete the offending records and add the merged ones.
+ diffs = splice(diffs, pointer-countDelete-countInsert,
+ countDelete+countInsert)
+
+ pointer = pointer - countDelete - countInsert
+ a := dmp.diffMainRunes([]rune(textDelete), []rune(textInsert), false, deadline)
+ for j := len(a) - 1; j >= 0; j-- {
+ diffs = splice(diffs, pointer, 0, a[j])
+ }
+ pointer = pointer + len(a)
+ }
+
+ countInsert = 0
+ countDelete = 0
+ textDelete = ""
+ textInsert = ""
+ }
+ pointer++
+ }
+
+ return diffs[:len(diffs)-1] // Remove the dummy entry at the end.
+}
+
+// DiffBisect finds the 'middle snake' of a diff, split the problem in two and return the recursively constructed diff.
+// If an invalid UTF-8 sequence is encountered, it will be replaced by the Unicode replacement character.
+// See Myers 1986 paper: An O(ND) Difference Algorithm and Its Variations.
+func (dmp *DiffMatchPatch) DiffBisect(text1, text2 string, deadline time.Time) []Diff {
+ // Unused in this code, but retained for interface compatibility.
+ return dmp.diffBisect([]rune(text1), []rune(text2), deadline)
+}
+
+// diffBisect finds the 'middle snake' of a diff, splits the problem in two and returns the recursively constructed diff.
+// See Myers's 1986 paper: An O(ND) Difference Algorithm and Its Variations.
+func (dmp *DiffMatchPatch) diffBisect(runes1, runes2 []rune, deadline time.Time) []Diff {
+ // Cache the text lengths to prevent multiple calls.
+ runes1Len, runes2Len := len(runes1), len(runes2)
+
+ maxD := (runes1Len + runes2Len + 1) / 2
+ vOffset := maxD
+ vLength := 2 * maxD
+
+ v1 := make([]int, vLength)
+ v2 := make([]int, vLength)
+ for i := range v1 {
+ v1[i] = -1
+ v2[i] = -1
+ }
+ v1[vOffset+1] = 0
+ v2[vOffset+1] = 0
+
+ delta := runes1Len - runes2Len
+ // If the total number of characters is odd, then the front path will collide with the reverse path.
+ front := (delta%2 != 0)
+ // Offsets for start and end of k loop. Prevents mapping of space beyond the grid.
+ k1start := 0
+ k1end := 0
+ k2start := 0
+ k2end := 0
+ for d := 0; d < maxD; d++ {
+ // Bail out if deadline is reached.
+ if !deadline.IsZero() && time.Now().After(deadline) {
+ break
+ }
+
+ // Walk the front path one step.
+ for k1 := -d + k1start; k1 <= d-k1end; k1 += 2 {
+ k1Offset := vOffset + k1
+ var x1 int
+
+ if k1 == -d || (k1 != d && v1[k1Offset-1] < v1[k1Offset+1]) {
+ x1 = v1[k1Offset+1]
+ } else {
+ x1 = v1[k1Offset-1] + 1
+ }
+
+ y1 := x1 - k1
+ for x1 < runes1Len && y1 < runes2Len {
+ if runes1[x1] != runes2[y1] {
+ break
+ }
+ x1++
+ y1++
+ }
+ v1[k1Offset] = x1
+ if x1 > runes1Len {
+ // Ran off the right of the graph.
+ k1end += 2
+ } else if y1 > runes2Len {
+ // Ran off the bottom of the graph.
+ k1start += 2
+ } else if front {
+ k2Offset := vOffset + delta - k1
+ if k2Offset >= 0 && k2Offset < vLength && v2[k2Offset] != -1 {
+ // Mirror x2 onto top-left coordinate system.
+ x2 := runes1Len - v2[k2Offset]
+ if x1 >= x2 {
+ // Overlap detected.
+ return dmp.diffBisectSplit(runes1, runes2, x1, y1, deadline)
+ }
+ }
+ }
+ }
+ // Walk the reverse path one step.
+ for k2 := -d + k2start; k2 <= d-k2end; k2 += 2 {
+ k2Offset := vOffset + k2
+ var x2 int
+ if k2 == -d || (k2 != d && v2[k2Offset-1] < v2[k2Offset+1]) {
+ x2 = v2[k2Offset+1]
+ } else {
+ x2 = v2[k2Offset-1] + 1
+ }
+ var y2 = x2 - k2
+ for x2 < runes1Len && y2 < runes2Len {
+ if runes1[runes1Len-x2-1] != runes2[runes2Len-y2-1] {
+ break
+ }
+ x2++
+ y2++
+ }
+ v2[k2Offset] = x2
+ if x2 > runes1Len {
+ // Ran off the left of the graph.
+ k2end += 2
+ } else if y2 > runes2Len {
+ // Ran off the top of the graph.
+ k2start += 2
+ } else if !front {
+ k1Offset := vOffset + delta - k2
+ if k1Offset >= 0 && k1Offset < vLength && v1[k1Offset] != -1 {
+ x1 := v1[k1Offset]
+ y1 := vOffset + x1 - k1Offset
+ // Mirror x2 onto top-left coordinate system.
+ x2 = runes1Len - x2
+ if x1 >= x2 {
+ // Overlap detected.
+ return dmp.diffBisectSplit(runes1, runes2, x1, y1, deadline)
+ }
+ }
+ }
+ }
+ }
+ // Diff took too long and hit the deadline or number of diffs equals number of characters, no commonality at all.
+ return []Diff{
+ Diff{DiffDelete, string(runes1)},
+ Diff{DiffInsert, string(runes2)},
+ }
+}
+
+func (dmp *DiffMatchPatch) diffBisectSplit(runes1, runes2 []rune, x, y int,
+ deadline time.Time) []Diff {
+ runes1a := runes1[:x]
+ runes2a := runes2[:y]
+ runes1b := runes1[x:]
+ runes2b := runes2[y:]
+
+ // Compute both diffs serially.
+ diffs := dmp.diffMainRunes(runes1a, runes2a, false, deadline)
+ diffsb := dmp.diffMainRunes(runes1b, runes2b, false, deadline)
+
+ return append(diffs, diffsb...)
+}
+
+// DiffLinesToChars splits two texts into a list of strings, and educes the texts to a string of hashes where each Unicode character represents one line.
+// It's slightly faster to call DiffLinesToRunes first, followed by DiffMainRunes.
+func (dmp *DiffMatchPatch) DiffLinesToChars(text1, text2 string) (string, string, []string) {
+ chars1, chars2, lineArray := dmp.DiffLinesToRunes(text1, text2)
+ return string(chars1), string(chars2), lineArray
+}
+
+// DiffLinesToRunes splits two texts into a list of runes. Each rune represents one line.
+func (dmp *DiffMatchPatch) DiffLinesToRunes(text1, text2 string) ([]rune, []rune, []string) {
+ // '\x00' is a valid character, but various debuggers don't like it. So we'll insert a junk entry to avoid generating a null character.
+ lineArray := []string{""} // e.g. lineArray[4] == 'Hello\n'
+ lineHash := map[string]int{} // e.g. lineHash['Hello\n'] == 4
+
+ chars1 := dmp.diffLinesToRunesMunge(text1, &lineArray, lineHash)
+ chars2 := dmp.diffLinesToRunesMunge(text2, &lineArray, lineHash)
+
+ return chars1, chars2, lineArray
+}
+
+func (dmp *DiffMatchPatch) diffLinesToRunes(text1, text2 []rune) ([]rune, []rune, []string) {
+ return dmp.DiffLinesToRunes(string(text1), string(text2))
+}
+
+// diffLinesToRunesMunge splits a text into an array of strings, and reduces the texts to a []rune where each Unicode character represents one line.
+// We use strings instead of []runes as input mainly because you can't use []rune as a map key.
+func (dmp *DiffMatchPatch) diffLinesToRunesMunge(text string, lineArray *[]string, lineHash map[string]int) []rune {
+ // Walk the text, pulling out a substring for each line. text.split('\n') would would temporarily double our memory footprint. Modifying text would create many large strings to garbage collect.
+ lineStart := 0
+ lineEnd := -1
+ runes := []rune{}
+
+ for lineEnd < len(text)-1 {
+ lineEnd = indexOf(text, "\n", lineStart)
+
+ if lineEnd == -1 {
+ lineEnd = len(text) - 1
+ }
+
+ line := text[lineStart : lineEnd+1]
+ lineStart = lineEnd + 1
+ lineValue, ok := lineHash[line]
+
+ if ok {
+ runes = append(runes, rune(lineValue))
+ } else {
+ *lineArray = append(*lineArray, line)
+ lineHash[line] = len(*lineArray) - 1
+ runes = append(runes, rune(len(*lineArray)-1))
+ }
+ }
+
+ return runes
+}
+
+// DiffCharsToLines rehydrates the text in a diff from a string of line hashes to real lines of text.
+func (dmp *DiffMatchPatch) DiffCharsToLines(diffs []Diff, lineArray []string) []Diff {
+ hydrated := make([]Diff, 0, len(diffs))
+ for _, aDiff := range diffs {
+ chars := aDiff.Text
+ text := make([]string, len(chars))
+
+ for i, r := range chars {
+ text[i] = lineArray[r]
+ }
+
+ aDiff.Text = strings.Join(text, "")
+ hydrated = append(hydrated, aDiff)
+ }
+ return hydrated
+}
+
+// DiffCommonPrefix determines the common prefix length of two strings.
+func (dmp *DiffMatchPatch) DiffCommonPrefix(text1, text2 string) int {
+ // Unused in this code, but retained for interface compatibility.
+ return commonPrefixLength([]rune(text1), []rune(text2))
+}
+
+// DiffCommonSuffix determines the common suffix length of two strings.
+func (dmp *DiffMatchPatch) DiffCommonSuffix(text1, text2 string) int {
+ // Unused in this code, but retained for interface compatibility.
+ return commonSuffixLength([]rune(text1), []rune(text2))
+}
+
+// commonPrefixLength returns the length of the common prefix of two rune slices.
+func commonPrefixLength(text1, text2 []rune) int {
+ short, long := text1, text2
+ if len(short) > len(long) {
+ short, long = long, short
+ }
+ for i, r := range short {
+ if r != long[i] {
+ return i
+ }
+ }
+ return len(short)
+}
+
+// commonSuffixLength returns the length of the common suffix of two rune slices.
+func commonSuffixLength(text1, text2 []rune) int {
+ n := min(len(text1), len(text2))
+ for i := 0; i < n; i++ {
+ if text1[len(text1)-i-1] != text2[len(text2)-i-1] {
+ return i
+ }
+ }
+ return n
+
+ // TODO research and benchmark this, why is it not activated? https://github.com/sergi/go-diff/issues/54
+ // Binary search.
+ // Performance analysis: http://neil.fraser.name/news/2007/10/09/
+ /*
+ pointermin := 0
+ pointermax := math.Min(len(text1), len(text2))
+ pointermid := pointermax
+ pointerend := 0
+ for pointermin < pointermid {
+ if text1[len(text1)-pointermid:len(text1)-pointerend] ==
+ text2[len(text2)-pointermid:len(text2)-pointerend] {
+ pointermin = pointermid
+ pointerend = pointermin
+ } else {
+ pointermax = pointermid
+ }
+ pointermid = math.Floor((pointermax-pointermin)/2 + pointermin)
+ }
+ return pointermid
+ */
+}
+
+// DiffCommonOverlap determines if the suffix of one string is the prefix of another.
+func (dmp *DiffMatchPatch) DiffCommonOverlap(text1 string, text2 string) int {
+ // Cache the text lengths to prevent multiple calls.
+ text1Length := len(text1)
+ text2Length := len(text2)
+ // Eliminate the null case.
+ if text1Length == 0 || text2Length == 0 {
+ return 0
+ }
+ // Truncate the longer string.
+ if text1Length > text2Length {
+ text1 = text1[text1Length-text2Length:]
+ } else if text1Length < text2Length {
+ text2 = text2[0:text1Length]
+ }
+ textLength := int(math.Min(float64(text1Length), float64(text2Length)))
+ // Quick check for the worst case.
+ if text1 == text2 {
+ return textLength
+ }
+
+ // Start by looking for a single character match and increase length until no match is found. Performance analysis: http://neil.fraser.name/news/2010/11/04/
+ best := 0
+ length := 1
+ for {
+ pattern := text1[textLength-length:]
+ found := strings.Index(text2, pattern)
+ if found == -1 {
+ break
+ }
+ length += found
+ if found == 0 || text1[textLength-length:] == text2[0:length] {
+ best = length
+ length++
+ }
+ }
+
+ return best
+}
+
+// DiffHalfMatch checks whether the two texts share a substring which is at least half the length of the longer text. This speedup can produce non-minimal diffs.
+func (dmp *DiffMatchPatch) DiffHalfMatch(text1, text2 string) []string {
+ // Unused in this code, but retained for interface compatibility.
+ runeSlices := dmp.diffHalfMatch([]rune(text1), []rune(text2))
+ if runeSlices == nil {
+ return nil
+ }
+
+ result := make([]string, len(runeSlices))
+ for i, r := range runeSlices {
+ result[i] = string(r)
+ }
+ return result
+}
+
+func (dmp *DiffMatchPatch) diffHalfMatch(text1, text2 []rune) [][]rune {
+ if dmp.DiffTimeout <= 0 {
+ // Don't risk returning a non-optimal diff if we have unlimited time.
+ return nil
+ }
+
+ var longtext, shorttext []rune
+ if len(text1) > len(text2) {
+ longtext = text1
+ shorttext = text2
+ } else {
+ longtext = text2
+ shorttext = text1
+ }
+
+ if len(longtext) < 4 || len(shorttext)*2 < len(longtext) {
+ return nil // Pointless.
+ }
+
+ // First check if the second quarter is the seed for a half-match.
+ hm1 := dmp.diffHalfMatchI(longtext, shorttext, int(float64(len(longtext)+3)/4))
+
+ // Check again based on the third quarter.
+ hm2 := dmp.diffHalfMatchI(longtext, shorttext, int(float64(len(longtext)+1)/2))
+
+ hm := [][]rune{}
+ if hm1 == nil && hm2 == nil {
+ return nil
+ } else if hm2 == nil {
+ hm = hm1
+ } else if hm1 == nil {
+ hm = hm2
+ } else {
+ // Both matched. Select the longest.
+ if len(hm1[4]) > len(hm2[4]) {
+ hm = hm1
+ } else {
+ hm = hm2
+ }
+ }
+
+ // A half-match was found, sort out the return data.
+ if len(text1) > len(text2) {
+ return hm
+ }
+
+ return [][]rune{hm[2], hm[3], hm[0], hm[1], hm[4]}
+}
+
+// diffHalfMatchI checks if a substring of shorttext exist within longtext such that the substring is at least half the length of longtext?
+// Returns a slice containing the prefix of longtext, the suffix of longtext, the prefix of shorttext, the suffix of shorttext and the common middle, or null if there was no match.
+func (dmp *DiffMatchPatch) diffHalfMatchI(l, s []rune, i int) [][]rune {
+ var bestCommonA []rune
+ var bestCommonB []rune
+ var bestCommonLen int
+ var bestLongtextA []rune
+ var bestLongtextB []rune
+ var bestShorttextA []rune
+ var bestShorttextB []rune
+
+ // Start with a 1/4 length substring at position i as a seed.
+ seed := l[i : i+len(l)/4]
+
+ for j := runesIndexOf(s, seed, 0); j != -1; j = runesIndexOf(s, seed, j+1) {
+ prefixLength := commonPrefixLength(l[i:], s[j:])
+ suffixLength := commonSuffixLength(l[:i], s[:j])
+
+ if bestCommonLen < suffixLength+prefixLength {
+ bestCommonA = s[j-suffixLength : j]
+ bestCommonB = s[j : j+prefixLength]
+ bestCommonLen = len(bestCommonA) + len(bestCommonB)
+ bestLongtextA = l[:i-suffixLength]
+ bestLongtextB = l[i+prefixLength:]
+ bestShorttextA = s[:j-suffixLength]
+ bestShorttextB = s[j+prefixLength:]
+ }
+ }
+
+ if bestCommonLen*2 < len(l) {
+ return nil
+ }
+
+ return [][]rune{
+ bestLongtextA,
+ bestLongtextB,
+ bestShorttextA,
+ bestShorttextB,
+ append(bestCommonA, bestCommonB...),
+ }
+}
+
+// DiffCleanupSemantic reduces the number of edits by eliminating semantically trivial equalities.
+func (dmp *DiffMatchPatch) DiffCleanupSemantic(diffs []Diff) []Diff {
+ changes := false
+ // Stack of indices where equalities are found.
+ type equality struct {
+ data int
+ next *equality
+ }
+ var equalities *equality
+
+ var lastequality string
+ // Always equal to diffs[equalities[equalitiesLength - 1]][1]
+ var pointer int // Index of current position.
+ // Number of characters that changed prior to the equality.
+ var lengthInsertions1, lengthDeletions1 int
+ // Number of characters that changed after the equality.
+ var lengthInsertions2, lengthDeletions2 int
+
+ for pointer < len(diffs) {
+ if diffs[pointer].Type == DiffEqual {
+ // Equality found.
+
+ equalities = &equality{
+ data: pointer,
+ next: equalities,
+ }
+ lengthInsertions1 = lengthInsertions2
+ lengthDeletions1 = lengthDeletions2
+ lengthInsertions2 = 0
+ lengthDeletions2 = 0
+ lastequality = diffs[pointer].Text
+ } else {
+ // An insertion or deletion.
+
+ if diffs[pointer].Type == DiffInsert {
+ lengthInsertions2 += len(diffs[pointer].Text)
+ } else {
+ lengthDeletions2 += len(diffs[pointer].Text)
+ }
+ // Eliminate an equality that is smaller or equal to the edits on both sides of it.
+ difference1 := int(math.Max(float64(lengthInsertions1), float64(lengthDeletions1)))
+ difference2 := int(math.Max(float64(lengthInsertions2), float64(lengthDeletions2)))
+ if len(lastequality) > 0 &&
+ (len(lastequality) <= difference1) &&
+ (len(lastequality) <= difference2) {
+ // Duplicate record.
+ insPoint := equalities.data
+ diffs = append(
+ diffs[:insPoint],
+ append([]Diff{Diff{DiffDelete, lastequality}}, diffs[insPoint:]...)...)
+
+ // Change second copy to insert.
+ diffs[insPoint+1].Type = DiffInsert
+ // Throw away the equality we just deleted.
+ equalities = equalities.next
+
+ if equalities != nil {
+ equalities = equalities.next
+ }
+ if equalities != nil {
+ pointer = equalities.data
+ } else {
+ pointer = -1
+ }
+
+ lengthInsertions1 = 0 // Reset the counters.
+ lengthDeletions1 = 0
+ lengthInsertions2 = 0
+ lengthDeletions2 = 0
+ lastequality = ""
+ changes = true
+ }
+ }
+ pointer++
+ }
+
+ // Normalize the diff.
+ if changes {
+ diffs = dmp.DiffCleanupMerge(diffs)
+ }
+ diffs = dmp.DiffCleanupSemanticLossless(diffs)
+ // Find any overlaps between deletions and insertions.
+ // e.g: abcxxxxxxdef
+ // -> abcxxxdef
+ // e.g: xxxabcdefxxx
+ // -> defxxxabc
+ // Only extract an overlap if it is as big as the edit ahead or behind it.
+ pointer = 1
+ for pointer < len(diffs) {
+ if diffs[pointer-1].Type == DiffDelete &&
+ diffs[pointer].Type == DiffInsert {
+ deletion := diffs[pointer-1].Text
+ insertion := diffs[pointer].Text
+ overlapLength1 := dmp.DiffCommonOverlap(deletion, insertion)
+ overlapLength2 := dmp.DiffCommonOverlap(insertion, deletion)
+ if overlapLength1 >= overlapLength2 {
+ if float64(overlapLength1) >= float64(len(deletion))/2 ||
+ float64(overlapLength1) >= float64(len(insertion))/2 {
+
+ // Overlap found. Insert an equality and trim the surrounding edits.
+ diffs = append(
+ diffs[:pointer],
+ append([]Diff{Diff{DiffEqual, insertion[:overlapLength1]}}, diffs[pointer:]...)...)
+
+ diffs[pointer-1].Text =
+ deletion[0 : len(deletion)-overlapLength1]
+ diffs[pointer+1].Text = insertion[overlapLength1:]
+ pointer++
+ }
+ } else {
+ if float64(overlapLength2) >= float64(len(deletion))/2 ||
+ float64(overlapLength2) >= float64(len(insertion))/2 {
+ // Reverse overlap found. Insert an equality and swap and trim the surrounding edits.
+ overlap := Diff{DiffEqual, deletion[:overlapLength2]}
+ diffs = append(
+ diffs[:pointer],
+ append([]Diff{overlap}, diffs[pointer:]...)...)
+
+ diffs[pointer-1].Type = DiffInsert
+ diffs[pointer-1].Text = insertion[0 : len(insertion)-overlapLength2]
+ diffs[pointer+1].Type = DiffDelete
+ diffs[pointer+1].Text = deletion[overlapLength2:]
+ pointer++
+ }
+ }
+ pointer++
+ }
+ pointer++
+ }
+
+ return diffs
+}
+
+// Define some regex patterns for matching boundaries.
+var (
+ nonAlphaNumericRegex = regexp.MustCompile(`[^a-zA-Z0-9]`)
+ whitespaceRegex = regexp.MustCompile(`\s`)
+ linebreakRegex = regexp.MustCompile(`[\r\n]`)
+ blanklineEndRegex = regexp.MustCompile(`\n\r?\n$`)
+ blanklineStartRegex = regexp.MustCompile(`^\r?\n\r?\n`)
+)
+
+// diffCleanupSemanticScore computes a score representing whether the internal boundary falls on logical boundaries.
+// Scores range from 6 (best) to 0 (worst). Closure, but does not reference any external variables.
+func diffCleanupSemanticScore(one, two string) int {
+ if len(one) == 0 || len(two) == 0 {
+ // Edges are the best.
+ return 6
+ }
+
+ // Each port of this function behaves slightly differently due to subtle differences in each language's definition of things like 'whitespace'. Since this function's purpose is largely cosmetic, the choice has been made to use each language's native features rather than force total conformity.
+ rune1, _ := utf8.DecodeLastRuneInString(one)
+ rune2, _ := utf8.DecodeRuneInString(two)
+ char1 := string(rune1)
+ char2 := string(rune2)
+
+ nonAlphaNumeric1 := nonAlphaNumericRegex.MatchString(char1)
+ nonAlphaNumeric2 := nonAlphaNumericRegex.MatchString(char2)
+ whitespace1 := nonAlphaNumeric1 && whitespaceRegex.MatchString(char1)
+ whitespace2 := nonAlphaNumeric2 && whitespaceRegex.MatchString(char2)
+ lineBreak1 := whitespace1 && linebreakRegex.MatchString(char1)
+ lineBreak2 := whitespace2 && linebreakRegex.MatchString(char2)
+ blankLine1 := lineBreak1 && blanklineEndRegex.MatchString(one)
+ blankLine2 := lineBreak2 && blanklineEndRegex.MatchString(two)
+
+ if blankLine1 || blankLine2 {
+ // Five points for blank lines.
+ return 5
+ } else if lineBreak1 || lineBreak2 {
+ // Four points for line breaks.
+ return 4
+ } else if nonAlphaNumeric1 && !whitespace1 && whitespace2 {
+ // Three points for end of sentences.
+ return 3
+ } else if whitespace1 || whitespace2 {
+ // Two points for whitespace.
+ return 2
+ } else if nonAlphaNumeric1 || nonAlphaNumeric2 {
+ // One point for non-alphanumeric.
+ return 1
+ }
+ return 0
+}
+
+// DiffCleanupSemanticLossless looks for single edits surrounded on both sides by equalities which can be shifted sideways to align the edit to a word boundary.
+// E.g: The cat came. -> The cat came.
+func (dmp *DiffMatchPatch) DiffCleanupSemanticLossless(diffs []Diff) []Diff {
+ pointer := 1
+
+ // Intentionally ignore the first and last element (don't need checking).
+ for pointer < len(diffs)-1 {
+ if diffs[pointer-1].Type == DiffEqual &&
+ diffs[pointer+1].Type == DiffEqual {
+
+ // This is a single edit surrounded by equalities.
+ equality1 := diffs[pointer-1].Text
+ edit := diffs[pointer].Text
+ equality2 := diffs[pointer+1].Text
+
+ // First, shift the edit as far left as possible.
+ commonOffset := dmp.DiffCommonSuffix(equality1, edit)
+ if commonOffset > 0 {
+ commonString := edit[len(edit)-commonOffset:]
+ equality1 = equality1[0 : len(equality1)-commonOffset]
+ edit = commonString + edit[:len(edit)-commonOffset]
+ equality2 = commonString + equality2
+ }
+
+ // Second, step character by character right, looking for the best fit.
+ bestEquality1 := equality1
+ bestEdit := edit
+ bestEquality2 := equality2
+ bestScore := diffCleanupSemanticScore(equality1, edit) +
+ diffCleanupSemanticScore(edit, equality2)
+
+ for len(edit) != 0 && len(equality2) != 0 {
+ _, sz := utf8.DecodeRuneInString(edit)
+ if len(equality2) < sz || edit[:sz] != equality2[:sz] {
+ break
+ }
+ equality1 += edit[:sz]
+ edit = edit[sz:] + equality2[:sz]
+ equality2 = equality2[sz:]
+ score := diffCleanupSemanticScore(equality1, edit) +
+ diffCleanupSemanticScore(edit, equality2)
+ // The >= encourages trailing rather than leading whitespace on edits.
+ if score >= bestScore {
+ bestScore = score
+ bestEquality1 = equality1
+ bestEdit = edit
+ bestEquality2 = equality2
+ }
+ }
+
+ if diffs[pointer-1].Text != bestEquality1 {
+ // We have an improvement, save it back to the diff.
+ if len(bestEquality1) != 0 {
+ diffs[pointer-1].Text = bestEquality1
+ } else {
+ diffs = splice(diffs, pointer-1, 1)
+ pointer--
+ }
+
+ diffs[pointer].Text = bestEdit
+ if len(bestEquality2) != 0 {
+ diffs[pointer+1].Text = bestEquality2
+ } else {
+ diffs = append(diffs[:pointer+1], diffs[pointer+2:]...)
+ pointer--
+ }
+ }
+ }
+ pointer++
+ }
+
+ return diffs
+}
+
+// DiffCleanupEfficiency reduces the number of edits by eliminating operationally trivial equalities.
+func (dmp *DiffMatchPatch) DiffCleanupEfficiency(diffs []Diff) []Diff {
+ changes := false
+ // Stack of indices where equalities are found.
+ type equality struct {
+ data int
+ next *equality
+ }
+ var equalities *equality
+ // Always equal to equalities[equalitiesLength-1][1]
+ lastequality := ""
+ pointer := 0 // Index of current position.
+ // Is there an insertion operation before the last equality.
+ preIns := false
+ // Is there a deletion operation before the last equality.
+ preDel := false
+ // Is there an insertion operation after the last equality.
+ postIns := false
+ // Is there a deletion operation after the last equality.
+ postDel := false
+ for pointer < len(diffs) {
+ if diffs[pointer].Type == DiffEqual { // Equality found.
+ if len(diffs[pointer].Text) < dmp.DiffEditCost &&
+ (postIns || postDel) {
+ // Candidate found.
+ equalities = &equality{
+ data: pointer,
+ next: equalities,
+ }
+ preIns = postIns
+ preDel = postDel
+ lastequality = diffs[pointer].Text
+ } else {
+ // Not a candidate, and can never become one.
+ equalities = nil
+ lastequality = ""
+ }
+ postIns = false
+ postDel = false
+ } else { // An insertion or deletion.
+ if diffs[pointer].Type == DiffDelete {
+ postDel = true
+ } else {
+ postIns = true
+ }
+
+ // Five types to be split:
+ // ABXYCD
+ // AXCD
+ // ABXC
+ // AXCD
+ // ABXC
+ var sumPres int
+ if preIns {
+ sumPres++
+ }
+ if preDel {
+ sumPres++
+ }
+ if postIns {
+ sumPres++
+ }
+ if postDel {
+ sumPres++
+ }
+ if len(lastequality) > 0 &&
+ ((preIns && preDel && postIns && postDel) ||
+ ((len(lastequality) < dmp.DiffEditCost/2) && sumPres == 3)) {
+
+ insPoint := equalities.data
+
+ // Duplicate record.
+ diffs = append(diffs[:insPoint],
+ append([]Diff{Diff{DiffDelete, lastequality}}, diffs[insPoint:]...)...)
+
+ // Change second copy to insert.
+ diffs[insPoint+1].Type = DiffInsert
+ // Throw away the equality we just deleted.
+ equalities = equalities.next
+ lastequality = ""
+
+ if preIns && preDel {
+ // No changes made which could affect previous entry, keep going.
+ postIns = true
+ postDel = true
+ equalities = nil
+ } else {
+ if equalities != nil {
+ equalities = equalities.next
+ }
+ if equalities != nil {
+ pointer = equalities.data
+ } else {
+ pointer = -1
+ }
+ postIns = false
+ postDel = false
+ }
+ changes = true
+ }
+ }
+ pointer++
+ }
+
+ if changes {
+ diffs = dmp.DiffCleanupMerge(diffs)
+ }
+
+ return diffs
+}
+
+// DiffCleanupMerge reorders and merges like edit sections. Merge equalities.
+// Any edit section can move as long as it doesn't cross an equality.
+func (dmp *DiffMatchPatch) DiffCleanupMerge(diffs []Diff) []Diff {
+ // Add a dummy entry at the end.
+ diffs = append(diffs, Diff{DiffEqual, ""})
+ pointer := 0
+ countDelete := 0
+ countInsert := 0
+ commonlength := 0
+ textDelete := []rune(nil)
+ textInsert := []rune(nil)
+
+ for pointer < len(diffs) {
+ switch diffs[pointer].Type {
+ case DiffInsert:
+ countInsert++
+ textInsert = append(textInsert, []rune(diffs[pointer].Text)...)
+ pointer++
+ break
+ case DiffDelete:
+ countDelete++
+ textDelete = append(textDelete, []rune(diffs[pointer].Text)...)
+ pointer++
+ break
+ case DiffEqual:
+ // Upon reaching an equality, check for prior redundancies.
+ if countDelete+countInsert > 1 {
+ if countDelete != 0 && countInsert != 0 {
+ // Factor out any common prefixies.
+ commonlength = commonPrefixLength(textInsert, textDelete)
+ if commonlength != 0 {
+ x := pointer - countDelete - countInsert
+ if x > 0 && diffs[x-1].Type == DiffEqual {
+ diffs[x-1].Text += string(textInsert[:commonlength])
+ } else {
+ diffs = append([]Diff{Diff{DiffEqual, string(textInsert[:commonlength])}}, diffs...)
+ pointer++
+ }
+ textInsert = textInsert[commonlength:]
+ textDelete = textDelete[commonlength:]
+ }
+ // Factor out any common suffixies.
+ commonlength = commonSuffixLength(textInsert, textDelete)
+ if commonlength != 0 {
+ insertIndex := len(textInsert) - commonlength
+ deleteIndex := len(textDelete) - commonlength
+ diffs[pointer].Text = string(textInsert[insertIndex:]) + diffs[pointer].Text
+ textInsert = textInsert[:insertIndex]
+ textDelete = textDelete[:deleteIndex]
+ }
+ }
+ // Delete the offending records and add the merged ones.
+ if countDelete == 0 {
+ diffs = splice(diffs, pointer-countInsert,
+ countDelete+countInsert,
+ Diff{DiffInsert, string(textInsert)})
+ } else if countInsert == 0 {
+ diffs = splice(diffs, pointer-countDelete,
+ countDelete+countInsert,
+ Diff{DiffDelete, string(textDelete)})
+ } else {
+ diffs = splice(diffs, pointer-countDelete-countInsert,
+ countDelete+countInsert,
+ Diff{DiffDelete, string(textDelete)},
+ Diff{DiffInsert, string(textInsert)})
+ }
+
+ pointer = pointer - countDelete - countInsert + 1
+ if countDelete != 0 {
+ pointer++
+ }
+ if countInsert != 0 {
+ pointer++
+ }
+ } else if pointer != 0 && diffs[pointer-1].Type == DiffEqual {
+ // Merge this equality with the previous one.
+ diffs[pointer-1].Text += diffs[pointer].Text
+ diffs = append(diffs[:pointer], diffs[pointer+1:]...)
+ } else {
+ pointer++
+ }
+ countInsert = 0
+ countDelete = 0
+ textDelete = nil
+ textInsert = nil
+ break
+ }
+ }
+
+ if len(diffs[len(diffs)-1].Text) == 0 {
+ diffs = diffs[0 : len(diffs)-1] // Remove the dummy entry at the end.
+ }
+
+ // Second pass: look for single edits surrounded on both sides by equalities which can be shifted sideways to eliminate an equality. E.g: ABAC -> ABAC
+ changes := false
+ pointer = 1
+ // Intentionally ignore the first and last element (don't need checking).
+ for pointer < (len(diffs) - 1) {
+ if diffs[pointer-1].Type == DiffEqual &&
+ diffs[pointer+1].Type == DiffEqual {
+ // This is a single edit surrounded by equalities.
+ if strings.HasSuffix(diffs[pointer].Text, diffs[pointer-1].Text) {
+ // Shift the edit over the previous equality.
+ diffs[pointer].Text = diffs[pointer-1].Text +
+ diffs[pointer].Text[:len(diffs[pointer].Text)-len(diffs[pointer-1].Text)]
+ diffs[pointer+1].Text = diffs[pointer-1].Text + diffs[pointer+1].Text
+ diffs = splice(diffs, pointer-1, 1)
+ changes = true
+ } else if strings.HasPrefix(diffs[pointer].Text, diffs[pointer+1].Text) {
+ // Shift the edit over the next equality.
+ diffs[pointer-1].Text += diffs[pointer+1].Text
+ diffs[pointer].Text =
+ diffs[pointer].Text[len(diffs[pointer+1].Text):] + diffs[pointer+1].Text
+ diffs = splice(diffs, pointer+1, 1)
+ changes = true
+ }
+ }
+ pointer++
+ }
+
+ // If shifts were made, the diff needs reordering and another shift sweep.
+ if changes {
+ diffs = dmp.DiffCleanupMerge(diffs)
+ }
+
+ return diffs
+}
+
+// DiffXIndex returns the equivalent location in s2.
+func (dmp *DiffMatchPatch) DiffXIndex(diffs []Diff, loc int) int {
+ chars1 := 0
+ chars2 := 0
+ lastChars1 := 0
+ lastChars2 := 0
+ lastDiff := Diff{}
+ for i := 0; i < len(diffs); i++ {
+ aDiff := diffs[i]
+ if aDiff.Type != DiffInsert {
+ // Equality or deletion.
+ chars1 += len(aDiff.Text)
+ }
+ if aDiff.Type != DiffDelete {
+ // Equality or insertion.
+ chars2 += len(aDiff.Text)
+ }
+ if chars1 > loc {
+ // Overshot the location.
+ lastDiff = aDiff
+ break
+ }
+ lastChars1 = chars1
+ lastChars2 = chars2
+ }
+ if lastDiff.Type == DiffDelete {
+ // The location was deleted.
+ return lastChars2
+ }
+ // Add the remaining character length.
+ return lastChars2 + (loc - lastChars1)
+}
+
+// DiffPrettyHtml converts a []Diff into a pretty HTML report.
+// It is intended as an example from which to write one's own display functions.
+func (dmp *DiffMatchPatch) DiffPrettyHtml(diffs []Diff) string {
+ var buff bytes.Buffer
+ for _, diff := range diffs {
+ text := strings.Replace(html.EscapeString(diff.Text), "\n", "¶
", -1)
+ switch diff.Type {
+ case DiffInsert:
+ _, _ = buff.WriteString("")
+ _, _ = buff.WriteString(text)
+ _, _ = buff.WriteString("")
+ case DiffDelete:
+ _, _ = buff.WriteString("")
+ _, _ = buff.WriteString(text)
+ _, _ = buff.WriteString("")
+ case DiffEqual:
+ _, _ = buff.WriteString("")
+ _, _ = buff.WriteString(text)
+ _, _ = buff.WriteString("")
+ }
+ }
+ return buff.String()
+}
+
+// DiffPrettyText converts a []Diff into a colored text report.
+func (dmp *DiffMatchPatch) DiffPrettyText(diffs []Diff) string {
+ var buff bytes.Buffer
+ for _, diff := range diffs {
+ text := diff.Text
+
+ switch diff.Type {
+ case DiffInsert:
+ _, _ = buff.WriteString("\x1b[32m")
+ _, _ = buff.WriteString(text)
+ _, _ = buff.WriteString("\x1b[0m")
+ case DiffDelete:
+ _, _ = buff.WriteString("\x1b[31m")
+ _, _ = buff.WriteString(text)
+ _, _ = buff.WriteString("\x1b[0m")
+ case DiffEqual:
+ _, _ = buff.WriteString(text)
+ }
+ }
+
+ return buff.String()
+}
+
+// DiffText1 computes and returns the source text (all equalities and deletions).
+func (dmp *DiffMatchPatch) DiffText1(diffs []Diff) string {
+ //StringBuilder text = new StringBuilder()
+ var text bytes.Buffer
+
+ for _, aDiff := range diffs {
+ if aDiff.Type != DiffInsert {
+ _, _ = text.WriteString(aDiff.Text)
+ }
+ }
+ return text.String()
+}
+
+// DiffText2 computes and returns the destination text (all equalities and insertions).
+func (dmp *DiffMatchPatch) DiffText2(diffs []Diff) string {
+ var text bytes.Buffer
+
+ for _, aDiff := range diffs {
+ if aDiff.Type != DiffDelete {
+ _, _ = text.WriteString(aDiff.Text)
+ }
+ }
+ return text.String()
+}
+
+// DiffLevenshtein computes the Levenshtein distance that is the number of inserted, deleted or substituted characters.
+func (dmp *DiffMatchPatch) DiffLevenshtein(diffs []Diff) int {
+ levenshtein := 0
+ insertions := 0
+ deletions := 0
+
+ for _, aDiff := range diffs {
+ switch aDiff.Type {
+ case DiffInsert:
+ insertions += len(aDiff.Text)
+ case DiffDelete:
+ deletions += len(aDiff.Text)
+ case DiffEqual:
+ // A deletion and an insertion is one substitution.
+ levenshtein += max(insertions, deletions)
+ insertions = 0
+ deletions = 0
+ }
+ }
+
+ levenshtein += max(insertions, deletions)
+ return levenshtein
+}
+
+// DiffToDelta crushes the diff into an encoded string which describes the operations required to transform text1 into text2.
+// E.g. =3\t-2\t+ing -> Keep 3 chars, delete 2 chars, insert 'ing'. Operations are tab-separated. Inserted text is escaped using %xx notation.
+func (dmp *DiffMatchPatch) DiffToDelta(diffs []Diff) string {
+ var text bytes.Buffer
+ for _, aDiff := range diffs {
+ switch aDiff.Type {
+ case DiffInsert:
+ _, _ = text.WriteString("+")
+ _, _ = text.WriteString(strings.Replace(url.QueryEscape(aDiff.Text), "+", " ", -1))
+ _, _ = text.WriteString("\t")
+ break
+ case DiffDelete:
+ _, _ = text.WriteString("-")
+ _, _ = text.WriteString(strconv.Itoa(utf8.RuneCountInString(aDiff.Text)))
+ _, _ = text.WriteString("\t")
+ break
+ case DiffEqual:
+ _, _ = text.WriteString("=")
+ _, _ = text.WriteString(strconv.Itoa(utf8.RuneCountInString(aDiff.Text)))
+ _, _ = text.WriteString("\t")
+ break
+ }
+ }
+ delta := text.String()
+ if len(delta) != 0 {
+ // Strip off trailing tab character.
+ delta = delta[0 : utf8.RuneCountInString(delta)-1]
+ delta = unescaper.Replace(delta)
+ }
+ return delta
+}
+
+// DiffFromDelta given the original text1, and an encoded string which describes the operations required to transform text1 into text2, comAdde the full diff.
+func (dmp *DiffMatchPatch) DiffFromDelta(text1 string, delta string) (diffs []Diff, err error) {
+ i := 0
+ runes := []rune(text1)
+
+ for _, token := range strings.Split(delta, "\t") {
+ if len(token) == 0 {
+ // Blank tokens are ok (from a trailing \t).
+ continue
+ }
+
+ // Each token begins with a one character parameter which specifies the operation of this token (delete, insert, equality).
+ param := token[1:]
+
+ switch op := token[0]; op {
+ case '+':
+ // Decode would Diff all "+" to " "
+ param = strings.Replace(param, "+", "%2b", -1)
+ param, err = url.QueryUnescape(param)
+ if err != nil {
+ return nil, err
+ }
+ if !utf8.ValidString(param) {
+ return nil, fmt.Errorf("invalid UTF-8 token: %q", param)
+ }
+
+ diffs = append(diffs, Diff{DiffInsert, param})
+ case '=', '-':
+ n, err := strconv.ParseInt(param, 10, 0)
+ if err != nil {
+ return nil, err
+ } else if n < 0 {
+ return nil, errors.New("Negative number in DiffFromDelta: " + param)
+ }
+
+ i += int(n)
+ // Break out if we are out of bounds, go1.6 can't handle this very well
+ if i > len(runes) {
+ break
+ }
+ // Remember that string slicing is by byte - we want by rune here.
+ text := string(runes[i-int(n) : i])
+
+ if op == '=' {
+ diffs = append(diffs, Diff{DiffEqual, text})
+ } else {
+ diffs = append(diffs, Diff{DiffDelete, text})
+ }
+ default:
+ // Anything else is an error.
+ return nil, errors.New("Invalid diff operation in DiffFromDelta: " + string(token[0]))
+ }
+ }
+
+ if i != len(runes) {
+ return nil, fmt.Errorf("Delta length (%v) is different from source text length (%v)", i, len(text1))
+ }
+
+ return diffs, nil
+}
diff --git a/vendor/github.com/sergi/go-diff/diffmatchpatch/diffmatchpatch.go b/vendor/github.com/sergi/go-diff/diffmatchpatch/diffmatchpatch.go
new file mode 100644
index 000000000..d3acc32ce
--- /dev/null
+++ b/vendor/github.com/sergi/go-diff/diffmatchpatch/diffmatchpatch.go
@@ -0,0 +1,46 @@
+// Copyright (c) 2012-2016 The go-diff authors. All rights reserved.
+// https://github.com/sergi/go-diff
+// See the included LICENSE file for license details.
+//
+// go-diff is a Go implementation of Google's Diff, Match, and Patch library
+// Original library is Copyright (c) 2006 Google Inc.
+// http://code.google.com/p/google-diff-match-patch/
+
+// Package diffmatchpatch offers robust algorithms to perform the operations required for synchronizing plain text.
+package diffmatchpatch
+
+import (
+ "time"
+)
+
+// DiffMatchPatch holds the configuration for diff-match-patch operations.
+type DiffMatchPatch struct {
+ // Number of seconds to map a diff before giving up (0 for infinity).
+ DiffTimeout time.Duration
+ // Cost of an empty edit operation in terms of edit characters.
+ DiffEditCost int
+ // How far to search for a match (0 = exact location, 1000+ = broad match). A match this many characters away from the expected location will add 1.0 to the score (0.0 is a perfect match).
+ MatchDistance int
+ // When deleting a large block of text (over ~64 characters), how close do the contents have to be to match the expected contents. (0.0 = perfection, 1.0 = very loose). Note that MatchThreshold controls how closely the end points of a delete need to match.
+ PatchDeleteThreshold float64
+ // Chunk size for context length.
+ PatchMargin int
+ // The number of bits in an int.
+ MatchMaxBits int
+ // At what point is no match declared (0.0 = perfection, 1.0 = very loose).
+ MatchThreshold float64
+}
+
+// New creates a new DiffMatchPatch object with default parameters.
+func New() *DiffMatchPatch {
+ // Defaults.
+ return &DiffMatchPatch{
+ DiffTimeout: time.Second,
+ DiffEditCost: 4,
+ MatchThreshold: 0.5,
+ MatchDistance: 1000,
+ PatchDeleteThreshold: 0.5,
+ PatchMargin: 4,
+ MatchMaxBits: 32,
+ }
+}
diff --git a/vendor/github.com/sergi/go-diff/diffmatchpatch/match.go b/vendor/github.com/sergi/go-diff/diffmatchpatch/match.go
new file mode 100644
index 000000000..17374e109
--- /dev/null
+++ b/vendor/github.com/sergi/go-diff/diffmatchpatch/match.go
@@ -0,0 +1,160 @@
+// Copyright (c) 2012-2016 The go-diff authors. All rights reserved.
+// https://github.com/sergi/go-diff
+// See the included LICENSE file for license details.
+//
+// go-diff is a Go implementation of Google's Diff, Match, and Patch library
+// Original library is Copyright (c) 2006 Google Inc.
+// http://code.google.com/p/google-diff-match-patch/
+
+package diffmatchpatch
+
+import (
+ "math"
+)
+
+// MatchMain locates the best instance of 'pattern' in 'text' near 'loc'.
+// Returns -1 if no match found.
+func (dmp *DiffMatchPatch) MatchMain(text, pattern string, loc int) int {
+ // Check for null inputs not needed since null can't be passed in C#.
+
+ loc = int(math.Max(0, math.Min(float64(loc), float64(len(text)))))
+ if text == pattern {
+ // Shortcut (potentially not guaranteed by the algorithm)
+ return 0
+ } else if len(text) == 0 {
+ // Nothing to match.
+ return -1
+ } else if loc+len(pattern) <= len(text) && text[loc:loc+len(pattern)] == pattern {
+ // Perfect match at the perfect spot! (Includes case of null pattern)
+ return loc
+ }
+ // Do a fuzzy compare.
+ return dmp.MatchBitap(text, pattern, loc)
+}
+
+// MatchBitap locates the best instance of 'pattern' in 'text' near 'loc' using the Bitap algorithm.
+// Returns -1 if no match was found.
+func (dmp *DiffMatchPatch) MatchBitap(text, pattern string, loc int) int {
+ // Initialise the alphabet.
+ s := dmp.MatchAlphabet(pattern)
+
+ // Highest score beyond which we give up.
+ scoreThreshold := dmp.MatchThreshold
+ // Is there a nearby exact match? (speedup)
+ bestLoc := indexOf(text, pattern, loc)
+ if bestLoc != -1 {
+ scoreThreshold = math.Min(dmp.matchBitapScore(0, bestLoc, loc,
+ pattern), scoreThreshold)
+ // What about in the other direction? (speedup)
+ bestLoc = lastIndexOf(text, pattern, loc+len(pattern))
+ if bestLoc != -1 {
+ scoreThreshold = math.Min(dmp.matchBitapScore(0, bestLoc, loc,
+ pattern), scoreThreshold)
+ }
+ }
+
+ // Initialise the bit arrays.
+ matchmask := 1 << uint((len(pattern) - 1))
+ bestLoc = -1
+
+ var binMin, binMid int
+ binMax := len(pattern) + len(text)
+ lastRd := []int{}
+ for d := 0; d < len(pattern); d++ {
+ // Scan for the best match; each iteration allows for one more error. Run a binary search to determine how far from 'loc' we can stray at this error level.
+ binMin = 0
+ binMid = binMax
+ for binMin < binMid {
+ if dmp.matchBitapScore(d, loc+binMid, loc, pattern) <= scoreThreshold {
+ binMin = binMid
+ } else {
+ binMax = binMid
+ }
+ binMid = (binMax-binMin)/2 + binMin
+ }
+ // Use the result from this iteration as the maximum for the next.
+ binMax = binMid
+ start := int(math.Max(1, float64(loc-binMid+1)))
+ finish := int(math.Min(float64(loc+binMid), float64(len(text))) + float64(len(pattern)))
+
+ rd := make([]int, finish+2)
+ rd[finish+1] = (1 << uint(d)) - 1
+
+ for j := finish; j >= start; j-- {
+ var charMatch int
+ if len(text) <= j-1 {
+ // Out of range.
+ charMatch = 0
+ } else if _, ok := s[text[j-1]]; !ok {
+ charMatch = 0
+ } else {
+ charMatch = s[text[j-1]]
+ }
+
+ if d == 0 {
+ // First pass: exact match.
+ rd[j] = ((rd[j+1] << 1) | 1) & charMatch
+ } else {
+ // Subsequent passes: fuzzy match.
+ rd[j] = ((rd[j+1]<<1)|1)&charMatch | (((lastRd[j+1] | lastRd[j]) << 1) | 1) | lastRd[j+1]
+ }
+ if (rd[j] & matchmask) != 0 {
+ score := dmp.matchBitapScore(d, j-1, loc, pattern)
+ // This match will almost certainly be better than any existing match. But check anyway.
+ if score <= scoreThreshold {
+ // Told you so.
+ scoreThreshold = score
+ bestLoc = j - 1
+ if bestLoc > loc {
+ // When passing loc, don't exceed our current distance from loc.
+ start = int(math.Max(1, float64(2*loc-bestLoc)))
+ } else {
+ // Already passed loc, downhill from here on in.
+ break
+ }
+ }
+ }
+ }
+ if dmp.matchBitapScore(d+1, loc, loc, pattern) > scoreThreshold {
+ // No hope for a (better) match at greater error levels.
+ break
+ }
+ lastRd = rd
+ }
+ return bestLoc
+}
+
+// matchBitapScore computes and returns the score for a match with e errors and x location.
+func (dmp *DiffMatchPatch) matchBitapScore(e, x, loc int, pattern string) float64 {
+ accuracy := float64(e) / float64(len(pattern))
+ proximity := math.Abs(float64(loc - x))
+ if dmp.MatchDistance == 0 {
+ // Dodge divide by zero error.
+ if proximity == 0 {
+ return accuracy
+ }
+
+ return 1.0
+ }
+ return accuracy + (proximity / float64(dmp.MatchDistance))
+}
+
+// MatchAlphabet initialises the alphabet for the Bitap algorithm.
+func (dmp *DiffMatchPatch) MatchAlphabet(pattern string) map[byte]int {
+ s := map[byte]int{}
+ charPattern := []byte(pattern)
+ for _, c := range charPattern {
+ _, ok := s[c]
+ if !ok {
+ s[c] = 0
+ }
+ }
+ i := 0
+
+ for _, c := range charPattern {
+ value := s[c] | int(uint(1)< y {
+ return x
+ }
+ return y
+}
diff --git a/vendor/github.com/sergi/go-diff/diffmatchpatch/patch.go b/vendor/github.com/sergi/go-diff/diffmatchpatch/patch.go
new file mode 100644
index 000000000..223c43c42
--- /dev/null
+++ b/vendor/github.com/sergi/go-diff/diffmatchpatch/patch.go
@@ -0,0 +1,556 @@
+// Copyright (c) 2012-2016 The go-diff authors. All rights reserved.
+// https://github.com/sergi/go-diff
+// See the included LICENSE file for license details.
+//
+// go-diff is a Go implementation of Google's Diff, Match, and Patch library
+// Original library is Copyright (c) 2006 Google Inc.
+// http://code.google.com/p/google-diff-match-patch/
+
+package diffmatchpatch
+
+import (
+ "bytes"
+ "errors"
+ "math"
+ "net/url"
+ "regexp"
+ "strconv"
+ "strings"
+)
+
+// Patch represents one patch operation.
+type Patch struct {
+ diffs []Diff
+ Start1 int
+ Start2 int
+ Length1 int
+ Length2 int
+}
+
+// String emulates GNU diff's format.
+// Header: @@ -382,8 +481,9 @@
+// Indices are printed as 1-based, not 0-based.
+func (p *Patch) String() string {
+ var coords1, coords2 string
+
+ if p.Length1 == 0 {
+ coords1 = strconv.Itoa(p.Start1) + ",0"
+ } else if p.Length1 == 1 {
+ coords1 = strconv.Itoa(p.Start1 + 1)
+ } else {
+ coords1 = strconv.Itoa(p.Start1+1) + "," + strconv.Itoa(p.Length1)
+ }
+
+ if p.Length2 == 0 {
+ coords2 = strconv.Itoa(p.Start2) + ",0"
+ } else if p.Length2 == 1 {
+ coords2 = strconv.Itoa(p.Start2 + 1)
+ } else {
+ coords2 = strconv.Itoa(p.Start2+1) + "," + strconv.Itoa(p.Length2)
+ }
+
+ var text bytes.Buffer
+ _, _ = text.WriteString("@@ -" + coords1 + " +" + coords2 + " @@\n")
+
+ // Escape the body of the patch with %xx notation.
+ for _, aDiff := range p.diffs {
+ switch aDiff.Type {
+ case DiffInsert:
+ _, _ = text.WriteString("+")
+ case DiffDelete:
+ _, _ = text.WriteString("-")
+ case DiffEqual:
+ _, _ = text.WriteString(" ")
+ }
+
+ _, _ = text.WriteString(strings.Replace(url.QueryEscape(aDiff.Text), "+", " ", -1))
+ _, _ = text.WriteString("\n")
+ }
+
+ return unescaper.Replace(text.String())
+}
+
+// PatchAddContext increases the context until it is unique, but doesn't let the pattern expand beyond MatchMaxBits.
+func (dmp *DiffMatchPatch) PatchAddContext(patch Patch, text string) Patch {
+ if len(text) == 0 {
+ return patch
+ }
+
+ pattern := text[patch.Start2 : patch.Start2+patch.Length1]
+ padding := 0
+
+ // Look for the first and last matches of pattern in text. If two different matches are found, increase the pattern length.
+ for strings.Index(text, pattern) != strings.LastIndex(text, pattern) &&
+ len(pattern) < dmp.MatchMaxBits-2*dmp.PatchMargin {
+ padding += dmp.PatchMargin
+ maxStart := max(0, patch.Start2-padding)
+ minEnd := min(len(text), patch.Start2+patch.Length1+padding)
+ pattern = text[maxStart:minEnd]
+ }
+ // Add one chunk for good luck.
+ padding += dmp.PatchMargin
+
+ // Add the prefix.
+ prefix := text[max(0, patch.Start2-padding):patch.Start2]
+ if len(prefix) != 0 {
+ patch.diffs = append([]Diff{Diff{DiffEqual, prefix}}, patch.diffs...)
+ }
+ // Add the suffix.
+ suffix := text[patch.Start2+patch.Length1 : min(len(text), patch.Start2+patch.Length1+padding)]
+ if len(suffix) != 0 {
+ patch.diffs = append(patch.diffs, Diff{DiffEqual, suffix})
+ }
+
+ // Roll back the start points.
+ patch.Start1 -= len(prefix)
+ patch.Start2 -= len(prefix)
+ // Extend the lengths.
+ patch.Length1 += len(prefix) + len(suffix)
+ patch.Length2 += len(prefix) + len(suffix)
+
+ return patch
+}
+
+// PatchMake computes a list of patches.
+func (dmp *DiffMatchPatch) PatchMake(opt ...interface{}) []Patch {
+ if len(opt) == 1 {
+ diffs, _ := opt[0].([]Diff)
+ text1 := dmp.DiffText1(diffs)
+ return dmp.PatchMake(text1, diffs)
+ } else if len(opt) == 2 {
+ text1 := opt[0].(string)
+ switch t := opt[1].(type) {
+ case string:
+ diffs := dmp.DiffMain(text1, t, true)
+ if len(diffs) > 2 {
+ diffs = dmp.DiffCleanupSemantic(diffs)
+ diffs = dmp.DiffCleanupEfficiency(diffs)
+ }
+ return dmp.PatchMake(text1, diffs)
+ case []Diff:
+ return dmp.patchMake2(text1, t)
+ }
+ } else if len(opt) == 3 {
+ return dmp.PatchMake(opt[0], opt[2])
+ }
+ return []Patch{}
+}
+
+// patchMake2 computes a list of patches to turn text1 into text2.
+// text2 is not provided, diffs are the delta between text1 and text2.
+func (dmp *DiffMatchPatch) patchMake2(text1 string, diffs []Diff) []Patch {
+ // Check for null inputs not needed since null can't be passed in C#.
+ patches := []Patch{}
+ if len(diffs) == 0 {
+ return patches // Get rid of the null case.
+ }
+
+ patch := Patch{}
+ charCount1 := 0 // Number of characters into the text1 string.
+ charCount2 := 0 // Number of characters into the text2 string.
+ // Start with text1 (prepatchText) and apply the diffs until we arrive at text2 (postpatchText). We recreate the patches one by one to determine context info.
+ prepatchText := text1
+ postpatchText := text1
+
+ for i, aDiff := range diffs {
+ if len(patch.diffs) == 0 && aDiff.Type != DiffEqual {
+ // A new patch starts here.
+ patch.Start1 = charCount1
+ patch.Start2 = charCount2
+ }
+
+ switch aDiff.Type {
+ case DiffInsert:
+ patch.diffs = append(patch.diffs, aDiff)
+ patch.Length2 += len(aDiff.Text)
+ postpatchText = postpatchText[:charCount2] +
+ aDiff.Text + postpatchText[charCount2:]
+ case DiffDelete:
+ patch.Length1 += len(aDiff.Text)
+ patch.diffs = append(patch.diffs, aDiff)
+ postpatchText = postpatchText[:charCount2] + postpatchText[charCount2+len(aDiff.Text):]
+ case DiffEqual:
+ if len(aDiff.Text) <= 2*dmp.PatchMargin &&
+ len(patch.diffs) != 0 && i != len(diffs)-1 {
+ // Small equality inside a patch.
+ patch.diffs = append(patch.diffs, aDiff)
+ patch.Length1 += len(aDiff.Text)
+ patch.Length2 += len(aDiff.Text)
+ }
+ if len(aDiff.Text) >= 2*dmp.PatchMargin {
+ // Time for a new patch.
+ if len(patch.diffs) != 0 {
+ patch = dmp.PatchAddContext(patch, prepatchText)
+ patches = append(patches, patch)
+ patch = Patch{}
+ // Unlike Unidiff, our patch lists have a rolling context. http://code.google.com/p/google-diff-match-patch/wiki/Unidiff Update prepatch text & pos to reflect the application of the just completed patch.
+ prepatchText = postpatchText
+ charCount1 = charCount2
+ }
+ }
+ }
+
+ // Update the current character count.
+ if aDiff.Type != DiffInsert {
+ charCount1 += len(aDiff.Text)
+ }
+ if aDiff.Type != DiffDelete {
+ charCount2 += len(aDiff.Text)
+ }
+ }
+
+ // Pick up the leftover patch if not empty.
+ if len(patch.diffs) != 0 {
+ patch = dmp.PatchAddContext(patch, prepatchText)
+ patches = append(patches, patch)
+ }
+
+ return patches
+}
+
+// PatchDeepCopy returns an array that is identical to a given an array of patches.
+func (dmp *DiffMatchPatch) PatchDeepCopy(patches []Patch) []Patch {
+ patchesCopy := []Patch{}
+ for _, aPatch := range patches {
+ patchCopy := Patch{}
+ for _, aDiff := range aPatch.diffs {
+ patchCopy.diffs = append(patchCopy.diffs, Diff{
+ aDiff.Type,
+ aDiff.Text,
+ })
+ }
+ patchCopy.Start1 = aPatch.Start1
+ patchCopy.Start2 = aPatch.Start2
+ patchCopy.Length1 = aPatch.Length1
+ patchCopy.Length2 = aPatch.Length2
+ patchesCopy = append(patchesCopy, patchCopy)
+ }
+ return patchesCopy
+}
+
+// PatchApply merges a set of patches onto the text. Returns a patched text, as well as an array of true/false values indicating which patches were applied.
+func (dmp *DiffMatchPatch) PatchApply(patches []Patch, text string) (string, []bool) {
+ if len(patches) == 0 {
+ return text, []bool{}
+ }
+
+ // Deep copy the patches so that no changes are made to originals.
+ patches = dmp.PatchDeepCopy(patches)
+
+ nullPadding := dmp.PatchAddPadding(patches)
+ text = nullPadding + text + nullPadding
+ patches = dmp.PatchSplitMax(patches)
+
+ x := 0
+ // delta keeps track of the offset between the expected and actual location of the previous patch. If there are patches expected at positions 10 and 20, but the first patch was found at 12, delta is 2 and the second patch has an effective expected position of 22.
+ delta := 0
+ results := make([]bool, len(patches))
+ for _, aPatch := range patches {
+ expectedLoc := aPatch.Start2 + delta
+ text1 := dmp.DiffText1(aPatch.diffs)
+ var startLoc int
+ endLoc := -1
+ if len(text1) > dmp.MatchMaxBits {
+ // PatchSplitMax will only provide an oversized pattern in the case of a monster delete.
+ startLoc = dmp.MatchMain(text, text1[:dmp.MatchMaxBits], expectedLoc)
+ if startLoc != -1 {
+ endLoc = dmp.MatchMain(text,
+ text1[len(text1)-dmp.MatchMaxBits:], expectedLoc+len(text1)-dmp.MatchMaxBits)
+ if endLoc == -1 || startLoc >= endLoc {
+ // Can't find valid trailing context. Drop this patch.
+ startLoc = -1
+ }
+ }
+ } else {
+ startLoc = dmp.MatchMain(text, text1, expectedLoc)
+ }
+ if startLoc == -1 {
+ // No match found. :(
+ results[x] = false
+ // Subtract the delta for this failed patch from subsequent patches.
+ delta -= aPatch.Length2 - aPatch.Length1
+ } else {
+ // Found a match. :)
+ results[x] = true
+ delta = startLoc - expectedLoc
+ var text2 string
+ if endLoc == -1 {
+ text2 = text[startLoc:int(math.Min(float64(startLoc+len(text1)), float64(len(text))))]
+ } else {
+ text2 = text[startLoc:int(math.Min(float64(endLoc+dmp.MatchMaxBits), float64(len(text))))]
+ }
+ if text1 == text2 {
+ // Perfect match, just shove the Replacement text in.
+ text = text[:startLoc] + dmp.DiffText2(aPatch.diffs) + text[startLoc+len(text1):]
+ } else {
+ // Imperfect match. Run a diff to get a framework of equivalent indices.
+ diffs := dmp.DiffMain(text1, text2, false)
+ if len(text1) > dmp.MatchMaxBits && float64(dmp.DiffLevenshtein(diffs))/float64(len(text1)) > dmp.PatchDeleteThreshold {
+ // The end points match, but the content is unacceptably bad.
+ results[x] = false
+ } else {
+ diffs = dmp.DiffCleanupSemanticLossless(diffs)
+ index1 := 0
+ for _, aDiff := range aPatch.diffs {
+ if aDiff.Type != DiffEqual {
+ index2 := dmp.DiffXIndex(diffs, index1)
+ if aDiff.Type == DiffInsert {
+ // Insertion
+ text = text[:startLoc+index2] + aDiff.Text + text[startLoc+index2:]
+ } else if aDiff.Type == DiffDelete {
+ // Deletion
+ startIndex := startLoc + index2
+ text = text[:startIndex] +
+ text[startIndex+dmp.DiffXIndex(diffs, index1+len(aDiff.Text))-index2:]
+ }
+ }
+ if aDiff.Type != DiffDelete {
+ index1 += len(aDiff.Text)
+ }
+ }
+ }
+ }
+ }
+ x++
+ }
+ // Strip the padding off.
+ text = text[len(nullPadding) : len(nullPadding)+(len(text)-2*len(nullPadding))]
+ return text, results
+}
+
+// PatchAddPadding adds some padding on text start and end so that edges can match something.
+// Intended to be called only from within patchApply.
+func (dmp *DiffMatchPatch) PatchAddPadding(patches []Patch) string {
+ paddingLength := dmp.PatchMargin
+ nullPadding := ""
+ for x := 1; x <= paddingLength; x++ {
+ nullPadding += string(x)
+ }
+
+ // Bump all the patches forward.
+ for i := range patches {
+ patches[i].Start1 += paddingLength
+ patches[i].Start2 += paddingLength
+ }
+
+ // Add some padding on start of first diff.
+ if len(patches[0].diffs) == 0 || patches[0].diffs[0].Type != DiffEqual {
+ // Add nullPadding equality.
+ patches[0].diffs = append([]Diff{Diff{DiffEqual, nullPadding}}, patches[0].diffs...)
+ patches[0].Start1 -= paddingLength // Should be 0.
+ patches[0].Start2 -= paddingLength // Should be 0.
+ patches[0].Length1 += paddingLength
+ patches[0].Length2 += paddingLength
+ } else if paddingLength > len(patches[0].diffs[0].Text) {
+ // Grow first equality.
+ extraLength := paddingLength - len(patches[0].diffs[0].Text)
+ patches[0].diffs[0].Text = nullPadding[len(patches[0].diffs[0].Text):] + patches[0].diffs[0].Text
+ patches[0].Start1 -= extraLength
+ patches[0].Start2 -= extraLength
+ patches[0].Length1 += extraLength
+ patches[0].Length2 += extraLength
+ }
+
+ // Add some padding on end of last diff.
+ last := len(patches) - 1
+ if len(patches[last].diffs) == 0 || patches[last].diffs[len(patches[last].diffs)-1].Type != DiffEqual {
+ // Add nullPadding equality.
+ patches[last].diffs = append(patches[last].diffs, Diff{DiffEqual, nullPadding})
+ patches[last].Length1 += paddingLength
+ patches[last].Length2 += paddingLength
+ } else if paddingLength > len(patches[last].diffs[len(patches[last].diffs)-1].Text) {
+ // Grow last equality.
+ lastDiff := patches[last].diffs[len(patches[last].diffs)-1]
+ extraLength := paddingLength - len(lastDiff.Text)
+ patches[last].diffs[len(patches[last].diffs)-1].Text += nullPadding[:extraLength]
+ patches[last].Length1 += extraLength
+ patches[last].Length2 += extraLength
+ }
+
+ return nullPadding
+}
+
+// PatchSplitMax looks through the patches and breaks up any which are longer than the maximum limit of the match algorithm.
+// Intended to be called only from within patchApply.
+func (dmp *DiffMatchPatch) PatchSplitMax(patches []Patch) []Patch {
+ patchSize := dmp.MatchMaxBits
+ for x := 0; x < len(patches); x++ {
+ if patches[x].Length1 <= patchSize {
+ continue
+ }
+ bigpatch := patches[x]
+ // Remove the big old patch.
+ patches = append(patches[:x], patches[x+1:]...)
+ x--
+
+ Start1 := bigpatch.Start1
+ Start2 := bigpatch.Start2
+ precontext := ""
+ for len(bigpatch.diffs) != 0 {
+ // Create one of several smaller patches.
+ patch := Patch{}
+ empty := true
+ patch.Start1 = Start1 - len(precontext)
+ patch.Start2 = Start2 - len(precontext)
+ if len(precontext) != 0 {
+ patch.Length1 = len(precontext)
+ patch.Length2 = len(precontext)
+ patch.diffs = append(patch.diffs, Diff{DiffEqual, precontext})
+ }
+ for len(bigpatch.diffs) != 0 && patch.Length1 < patchSize-dmp.PatchMargin {
+ diffType := bigpatch.diffs[0].Type
+ diffText := bigpatch.diffs[0].Text
+ if diffType == DiffInsert {
+ // Insertions are harmless.
+ patch.Length2 += len(diffText)
+ Start2 += len(diffText)
+ patch.diffs = append(patch.diffs, bigpatch.diffs[0])
+ bigpatch.diffs = bigpatch.diffs[1:]
+ empty = false
+ } else if diffType == DiffDelete && len(patch.diffs) == 1 && patch.diffs[0].Type == DiffEqual && len(diffText) > 2*patchSize {
+ // This is a large deletion. Let it pass in one chunk.
+ patch.Length1 += len(diffText)
+ Start1 += len(diffText)
+ empty = false
+ patch.diffs = append(patch.diffs, Diff{diffType, diffText})
+ bigpatch.diffs = bigpatch.diffs[1:]
+ } else {
+ // Deletion or equality. Only take as much as we can stomach.
+ diffText = diffText[:min(len(diffText), patchSize-patch.Length1-dmp.PatchMargin)]
+
+ patch.Length1 += len(diffText)
+ Start1 += len(diffText)
+ if diffType == DiffEqual {
+ patch.Length2 += len(diffText)
+ Start2 += len(diffText)
+ } else {
+ empty = false
+ }
+ patch.diffs = append(patch.diffs, Diff{diffType, diffText})
+ if diffText == bigpatch.diffs[0].Text {
+ bigpatch.diffs = bigpatch.diffs[1:]
+ } else {
+ bigpatch.diffs[0].Text =
+ bigpatch.diffs[0].Text[len(diffText):]
+ }
+ }
+ }
+ // Compute the head context for the next patch.
+ precontext = dmp.DiffText2(patch.diffs)
+ precontext = precontext[max(0, len(precontext)-dmp.PatchMargin):]
+
+ postcontext := ""
+ // Append the end context for this patch.
+ if len(dmp.DiffText1(bigpatch.diffs)) > dmp.PatchMargin {
+ postcontext = dmp.DiffText1(bigpatch.diffs)[:dmp.PatchMargin]
+ } else {
+ postcontext = dmp.DiffText1(bigpatch.diffs)
+ }
+
+ if len(postcontext) != 0 {
+ patch.Length1 += len(postcontext)
+ patch.Length2 += len(postcontext)
+ if len(patch.diffs) != 0 && patch.diffs[len(patch.diffs)-1].Type == DiffEqual {
+ patch.diffs[len(patch.diffs)-1].Text += postcontext
+ } else {
+ patch.diffs = append(patch.diffs, Diff{DiffEqual, postcontext})
+ }
+ }
+ if !empty {
+ x++
+ patches = append(patches[:x], append([]Patch{patch}, patches[x:]...)...)
+ }
+ }
+ }
+ return patches
+}
+
+// PatchToText takes a list of patches and returns a textual representation.
+func (dmp *DiffMatchPatch) PatchToText(patches []Patch) string {
+ var text bytes.Buffer
+ for _, aPatch := range patches {
+ _, _ = text.WriteString(aPatch.String())
+ }
+ return text.String()
+}
+
+// PatchFromText parses a textual representation of patches and returns a List of Patch objects.
+func (dmp *DiffMatchPatch) PatchFromText(textline string) ([]Patch, error) {
+ patches := []Patch{}
+ if len(textline) == 0 {
+ return patches, nil
+ }
+ text := strings.Split(textline, "\n")
+ textPointer := 0
+ patchHeader := regexp.MustCompile("^@@ -(\\d+),?(\\d*) \\+(\\d+),?(\\d*) @@$")
+
+ var patch Patch
+ var sign uint8
+ var line string
+ for textPointer < len(text) {
+
+ if !patchHeader.MatchString(text[textPointer]) {
+ return patches, errors.New("Invalid patch string: " + text[textPointer])
+ }
+
+ patch = Patch{}
+ m := patchHeader.FindStringSubmatch(text[textPointer])
+
+ patch.Start1, _ = strconv.Atoi(m[1])
+ if len(m[2]) == 0 {
+ patch.Start1--
+ patch.Length1 = 1
+ } else if m[2] == "0" {
+ patch.Length1 = 0
+ } else {
+ patch.Start1--
+ patch.Length1, _ = strconv.Atoi(m[2])
+ }
+
+ patch.Start2, _ = strconv.Atoi(m[3])
+
+ if len(m[4]) == 0 {
+ patch.Start2--
+ patch.Length2 = 1
+ } else if m[4] == "0" {
+ patch.Length2 = 0
+ } else {
+ patch.Start2--
+ patch.Length2, _ = strconv.Atoi(m[4])
+ }
+ textPointer++
+
+ for textPointer < len(text) {
+ if len(text[textPointer]) > 0 {
+ sign = text[textPointer][0]
+ } else {
+ textPointer++
+ continue
+ }
+
+ line = text[textPointer][1:]
+ line = strings.Replace(line, "+", "%2b", -1)
+ line, _ = url.QueryUnescape(line)
+ if sign == '-' {
+ // Deletion.
+ patch.diffs = append(patch.diffs, Diff{DiffDelete, line})
+ } else if sign == '+' {
+ // Insertion.
+ patch.diffs = append(patch.diffs, Diff{DiffInsert, line})
+ } else if sign == ' ' {
+ // Minor equality.
+ patch.diffs = append(patch.diffs, Diff{DiffEqual, line})
+ } else if sign == '@' {
+ // Start of next patch.
+ break
+ } else {
+ // WTF?
+ return patches, errors.New("Invalid patch mode '" + string(sign) + "' in: " + string(line))
+ }
+ textPointer++
+ }
+
+ patches = append(patches, patch)
+ }
+ return patches, nil
+}
diff --git a/vendor/github.com/sergi/go-diff/diffmatchpatch/stringutil.go b/vendor/github.com/sergi/go-diff/diffmatchpatch/stringutil.go
new file mode 100644
index 000000000..265f29cc7
--- /dev/null
+++ b/vendor/github.com/sergi/go-diff/diffmatchpatch/stringutil.go
@@ -0,0 +1,88 @@
+// Copyright (c) 2012-2016 The go-diff authors. All rights reserved.
+// https://github.com/sergi/go-diff
+// See the included LICENSE file for license details.
+//
+// go-diff is a Go implementation of Google's Diff, Match, and Patch library
+// Original library is Copyright (c) 2006 Google Inc.
+// http://code.google.com/p/google-diff-match-patch/
+
+package diffmatchpatch
+
+import (
+ "strings"
+ "unicode/utf8"
+)
+
+// unescaper unescapes selected chars for compatibility with JavaScript's encodeURI.
+// In speed critical applications this could be dropped since the receiving application will certainly decode these fine. Note that this function is case-sensitive. Thus "%3F" would not be unescaped. But this is ok because it is only called with the output of HttpUtility.UrlEncode which returns lowercase hex. Example: "%3f" -> "?", "%24" -> "$", etc.
+var unescaper = strings.NewReplacer(
+ "%21", "!", "%7E", "~", "%27", "'",
+ "%28", "(", "%29", ")", "%3B", ";",
+ "%2F", "/", "%3F", "?", "%3A", ":",
+ "%40", "@", "%26", "&", "%3D", "=",
+ "%2B", "+", "%24", "$", "%2C", ",", "%23", "#", "%2A", "*")
+
+// indexOf returns the first index of pattern in str, starting at str[i].
+func indexOf(str string, pattern string, i int) int {
+ if i > len(str)-1 {
+ return -1
+ }
+ if i <= 0 {
+ return strings.Index(str, pattern)
+ }
+ ind := strings.Index(str[i:], pattern)
+ if ind == -1 {
+ return -1
+ }
+ return ind + i
+}
+
+// lastIndexOf returns the last index of pattern in str, starting at str[i].
+func lastIndexOf(str string, pattern string, i int) int {
+ if i < 0 {
+ return -1
+ }
+ if i >= len(str) {
+ return strings.LastIndex(str, pattern)
+ }
+ _, size := utf8.DecodeRuneInString(str[i:])
+ return strings.LastIndex(str[:i+size], pattern)
+}
+
+// runesIndexOf returns the index of pattern in target, starting at target[i].
+func runesIndexOf(target, pattern []rune, i int) int {
+ if i > len(target)-1 {
+ return -1
+ }
+ if i <= 0 {
+ return runesIndex(target, pattern)
+ }
+ ind := runesIndex(target[i:], pattern)
+ if ind == -1 {
+ return -1
+ }
+ return ind + i
+}
+
+func runesEqual(r1, r2 []rune) bool {
+ if len(r1) != len(r2) {
+ return false
+ }
+ for i, c := range r1 {
+ if c != r2[i] {
+ return false
+ }
+ }
+ return true
+}
+
+// runesIndex is the equivalent of strings.Index for rune slices.
+func runesIndex(r1, r2 []rune) int {
+ last := len(r1) - len(r2)
+ for i := 0; i <= last; i++ {
+ if runesEqual(r1[i:i+len(r2)], r2) {
+ return i
+ }
+ }
+ return -1
+}
diff --git a/vendor/github.com/Sirupsen/logrus/.gitignore b/vendor/github.com/sirupsen/logrus/.gitignore
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/.gitignore
rename to vendor/github.com/sirupsen/logrus/.gitignore
diff --git a/vendor/github.com/Sirupsen/logrus/.travis.yml b/vendor/github.com/sirupsen/logrus/.travis.yml
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/.travis.yml
rename to vendor/github.com/sirupsen/logrus/.travis.yml
diff --git a/vendor/github.com/Sirupsen/logrus/CHANGELOG.md b/vendor/github.com/sirupsen/logrus/CHANGELOG.md
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/CHANGELOG.md
rename to vendor/github.com/sirupsen/logrus/CHANGELOG.md
diff --git a/vendor/github.com/Sirupsen/logrus/LICENSE b/vendor/github.com/sirupsen/logrus/LICENSE
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/LICENSE
rename to vendor/github.com/sirupsen/logrus/LICENSE
diff --git a/vendor/github.com/Sirupsen/logrus/README.md b/vendor/github.com/sirupsen/logrus/README.md
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/README.md
rename to vendor/github.com/sirupsen/logrus/README.md
diff --git a/vendor/github.com/Sirupsen/logrus/alt_exit.go b/vendor/github.com/sirupsen/logrus/alt_exit.go
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/alt_exit.go
rename to vendor/github.com/sirupsen/logrus/alt_exit.go
diff --git a/vendor/github.com/Sirupsen/logrus/appveyor.yml b/vendor/github.com/sirupsen/logrus/appveyor.yml
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/appveyor.yml
rename to vendor/github.com/sirupsen/logrus/appveyor.yml
diff --git a/vendor/github.com/Sirupsen/logrus/doc.go b/vendor/github.com/sirupsen/logrus/doc.go
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/doc.go
rename to vendor/github.com/sirupsen/logrus/doc.go
diff --git a/vendor/github.com/Sirupsen/logrus/entry.go b/vendor/github.com/sirupsen/logrus/entry.go
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/entry.go
rename to vendor/github.com/sirupsen/logrus/entry.go
diff --git a/vendor/github.com/Sirupsen/logrus/exported.go b/vendor/github.com/sirupsen/logrus/exported.go
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/exported.go
rename to vendor/github.com/sirupsen/logrus/exported.go
diff --git a/vendor/github.com/Sirupsen/logrus/formatter.go b/vendor/github.com/sirupsen/logrus/formatter.go
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/formatter.go
rename to vendor/github.com/sirupsen/logrus/formatter.go
diff --git a/vendor/github.com/Sirupsen/logrus/go.mod b/vendor/github.com/sirupsen/logrus/go.mod
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/go.mod
rename to vendor/github.com/sirupsen/logrus/go.mod
diff --git a/vendor/github.com/Sirupsen/logrus/go.sum b/vendor/github.com/sirupsen/logrus/go.sum
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/go.sum
rename to vendor/github.com/sirupsen/logrus/go.sum
diff --git a/vendor/github.com/Sirupsen/logrus/hooks.go b/vendor/github.com/sirupsen/logrus/hooks.go
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/hooks.go
rename to vendor/github.com/sirupsen/logrus/hooks.go
diff --git a/vendor/github.com/Sirupsen/logrus/json_formatter.go b/vendor/github.com/sirupsen/logrus/json_formatter.go
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/json_formatter.go
rename to vendor/github.com/sirupsen/logrus/json_formatter.go
diff --git a/vendor/github.com/Sirupsen/logrus/logger.go b/vendor/github.com/sirupsen/logrus/logger.go
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/logger.go
rename to vendor/github.com/sirupsen/logrus/logger.go
diff --git a/vendor/github.com/Sirupsen/logrus/logrus.go b/vendor/github.com/sirupsen/logrus/logrus.go
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/logrus.go
rename to vendor/github.com/sirupsen/logrus/logrus.go
diff --git a/vendor/github.com/Sirupsen/logrus/terminal_check_appengine.go b/vendor/github.com/sirupsen/logrus/terminal_check_appengine.go
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/terminal_check_appengine.go
rename to vendor/github.com/sirupsen/logrus/terminal_check_appengine.go
diff --git a/vendor/github.com/Sirupsen/logrus/terminal_check_js.go b/vendor/github.com/sirupsen/logrus/terminal_check_js.go
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/terminal_check_js.go
rename to vendor/github.com/sirupsen/logrus/terminal_check_js.go
diff --git a/vendor/github.com/Sirupsen/logrus/terminal_check_notappengine.go b/vendor/github.com/sirupsen/logrus/terminal_check_notappengine.go
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/terminal_check_notappengine.go
rename to vendor/github.com/sirupsen/logrus/terminal_check_notappengine.go
diff --git a/vendor/github.com/Sirupsen/logrus/terminal_check_windows.go b/vendor/github.com/sirupsen/logrus/terminal_check_windows.go
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/terminal_check_windows.go
rename to vendor/github.com/sirupsen/logrus/terminal_check_windows.go
diff --git a/vendor/github.com/Sirupsen/logrus/terminal_notwindows.go b/vendor/github.com/sirupsen/logrus/terminal_notwindows.go
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/terminal_notwindows.go
rename to vendor/github.com/sirupsen/logrus/terminal_notwindows.go
diff --git a/vendor/github.com/Sirupsen/logrus/terminal_windows.go b/vendor/github.com/sirupsen/logrus/terminal_windows.go
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/terminal_windows.go
rename to vendor/github.com/sirupsen/logrus/terminal_windows.go
diff --git a/vendor/github.com/Sirupsen/logrus/text_formatter.go b/vendor/github.com/sirupsen/logrus/text_formatter.go
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/text_formatter.go
rename to vendor/github.com/sirupsen/logrus/text_formatter.go
diff --git a/vendor/github.com/Sirupsen/logrus/writer.go b/vendor/github.com/sirupsen/logrus/writer.go
similarity index 100%
rename from vendor/github.com/Sirupsen/logrus/writer.go
rename to vendor/github.com/sirupsen/logrus/writer.go
diff --git a/vendor/github.com/zach-klippenstein/goregen/.gitignore b/vendor/github.com/zach-klippenstein/goregen/.gitignore
new file mode 100644
index 000000000..54ad1b472
--- /dev/null
+++ b/vendor/github.com/zach-klippenstein/goregen/.gitignore
@@ -0,0 +1,28 @@
+# Compiled Object files, Static and Dynamic libs (Shared Objects)
+*.o
+*.a
+*.so
+
+# Folders
+_obj
+_test
+
+# Architecture specific extensions/prefixes
+*.[568vq]
+[568vq].out
+
+*.cgo1.go
+*.cgo2.c
+_cgo_defun.c
+_cgo_gotypes.go
+_cgo_export.*
+
+_testmain.go
+
+*.exe
+*.test
+*.prof
+
+# IntelliJ
+*.iml
+.idea/
diff --git a/vendor/github.com/zach-klippenstein/goregen/.travis.yml b/vendor/github.com/zach-klippenstein/goregen/.travis.yml
new file mode 100644
index 000000000..fcee97a2b
--- /dev/null
+++ b/vendor/github.com/zach-klippenstein/goregen/.travis.yml
@@ -0,0 +1,7 @@
+language: go
+
+go:
+ - 1.5.1
+ - tip
+
+sudo: false
diff --git a/vendor/github.com/zach-klippenstein/goregen/LICENSE.txt b/vendor/github.com/zach-klippenstein/goregen/LICENSE.txt
new file mode 100644
index 000000000..8dada3eda
--- /dev/null
+++ b/vendor/github.com/zach-klippenstein/goregen/LICENSE.txt
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/zach-klippenstein/goregen/README.md b/vendor/github.com/zach-klippenstein/goregen/README.md
new file mode 100644
index 000000000..89b3cec20
--- /dev/null
+++ b/vendor/github.com/zach-klippenstein/goregen/README.md
@@ -0,0 +1,7 @@
+#goregen [](https://godoc.org/github.com/zach-klippenstein/goregen) [](https://travis-ci.org/zach-klippenstein/goregen)
+
+A Golang library for generating random strings from regular expressions.
+
+Checkout https://goregen-demo.herokuapp.com for a live demo.
+
+See the [godoc](https://godoc.org/github.com/zach-klippenstein/goregen) for examples.
diff --git a/vendor/github.com/zach-klippenstein/goregen/char_class.go b/vendor/github.com/zach-klippenstein/goregen/char_class.go
new file mode 100644
index 000000000..b5b9014ba
--- /dev/null
+++ b/vendor/github.com/zach-klippenstein/goregen/char_class.go
@@ -0,0 +1,120 @@
+/*
+Copyright 2014 Zachary Klippenstein
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package regen
+
+import (
+ "fmt"
+)
+
+// CharClass represents a regular expression character class as a list of ranges.
+// The runes contained in the class can be accessed by index.
+type tCharClass struct {
+ Ranges []tCharClassRange
+ TotalSize int32
+}
+
+// CharClassRange represents a single range of characters in a character class.
+type tCharClassRange struct {
+ Start rune
+ Size int32
+}
+
+// NewCharClass creates a character class with a single range.
+func newCharClass(start rune, end rune) *tCharClass {
+ charRange := newCharClassRange(start, end)
+ return &tCharClass{
+ Ranges: []tCharClassRange{charRange},
+ TotalSize: charRange.Size,
+ }
+}
+
+/*
+ParseCharClass parses a character class as represented by syntax.Parse into a slice of CharClassRange structs.
+
+Char classes are encoded as pairs of runes representing ranges:
+[0-9] = 09, [a0] = aa00 (2 1-len ranges).
+
+e.g.
+
+"[a0-9]" -> "aa09" -> a, 0-9
+
+"[^a-z]" -> "…" -> 0-(a-1), (z+1)-(max rune)
+*/
+func parseCharClass(runes []rune) *tCharClass {
+ var totalSize int32
+ numRanges := len(runes) / 2
+ ranges := make([]tCharClassRange, numRanges, numRanges)
+
+ for i := 0; i < numRanges; i++ {
+ start := runes[i*2]
+ end := runes[i*2+1]
+
+ // indicates a negative class
+ if start == 0 {
+ // doesn't make sense to generate null bytes, so all ranges must start at
+ // no less than 1.
+ start = 1
+ }
+
+ r := newCharClassRange(start, end)
+
+ ranges[i] = r
+ totalSize += r.Size
+ }
+
+ return &tCharClass{ranges, totalSize}
+}
+
+// GetRuneAt gets a rune from CharClass as a contiguous array of runes.
+func (class *tCharClass) GetRuneAt(i int32) rune {
+ for _, r := range class.Ranges {
+ if i < r.Size {
+ return r.Start + rune(i)
+ }
+ i -= r.Size
+ }
+ panic("index out of bounds")
+}
+
+func (class *tCharClass) String() string {
+ return fmt.Sprintf("%s", class.Ranges)
+}
+
+func newCharClassRange(start rune, end rune) tCharClassRange {
+ if start < 1 {
+ panic("char class range cannot contain runes less than 1")
+ }
+
+ size := end - start + 1
+
+ if size < 1 {
+ panic("char class range size must be at least 1")
+ }
+
+ return tCharClassRange{
+ Start: start,
+ Size: size,
+ }
+}
+
+func (r tCharClassRange) String() string {
+ if r.Size == 1 {
+ return fmt.Sprintf("%s:1", runesToString(r.Start))
+ }
+ return fmt.Sprintf("%s-%s:%d", runesToString(r.Start), runesToString(r.Start+rune(r.Size-1)), r.Size)
+
+}
diff --git a/vendor/github.com/zach-klippenstein/goregen/generator_error.go b/vendor/github.com/zach-klippenstein/goregen/generator_error.go
new file mode 100644
index 000000000..4536388dd
--- /dev/null
+++ b/vendor/github.com/zach-klippenstein/goregen/generator_error.go
@@ -0,0 +1,38 @@
+/*
+Copyright 2014 Zachary Klippenstein
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package regen
+
+import (
+ "fmt"
+)
+
+// Error returned by a generatorFactory if the AST is invalid.
+type tGeneratorError struct {
+ ErrorStr string
+ Cause error
+}
+
+func generatorError(cause error, format string, args ...interface{}) error {
+ return &tGeneratorError{fmt.Sprintf(format, args...), cause}
+}
+
+func (err *tGeneratorError) Error() string {
+ if err.Cause != nil {
+ return fmt.Sprintf("%s\ncaused by %s", err.ErrorStr, err.Cause.Error())
+ }
+ return err.ErrorStr
+}
diff --git a/vendor/github.com/zach-klippenstein/goregen/internal_generator.go b/vendor/github.com/zach-klippenstein/goregen/internal_generator.go
new file mode 100644
index 000000000..cfd1652b1
--- /dev/null
+++ b/vendor/github.com/zach-klippenstein/goregen/internal_generator.go
@@ -0,0 +1,271 @@
+/*
+Copyright 2014 Zachary Klippenstein
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package regen
+
+import (
+ "bytes"
+ "fmt"
+ "math"
+ "regexp/syntax"
+)
+
+// generatorFactory is a function that creates a random string generator from a regular expression AST.
+type generatorFactory func(regexp *syntax.Regexp, args *GeneratorArgs) (*internalGenerator, error)
+
+// Must be initialized in init() to avoid "initialization loop" compile error.
+var generatorFactories map[syntax.Op]generatorFactory
+
+const noBound = -1
+
+func init() {
+ generatorFactories = map[syntax.Op]generatorFactory{
+ syntax.OpEmptyMatch: opEmptyMatch,
+ syntax.OpLiteral: opLiteral,
+ syntax.OpAnyCharNotNL: opAnyCharNotNl,
+ syntax.OpAnyChar: opAnyChar,
+ syntax.OpQuest: opQuest,
+ syntax.OpStar: opStar,
+ syntax.OpPlus: opPlus,
+ syntax.OpRepeat: opRepeat,
+ syntax.OpCharClass: opCharClass,
+ syntax.OpConcat: opConcat,
+ syntax.OpAlternate: opAlternate,
+ syntax.OpCapture: opCapture,
+ syntax.OpBeginLine: noop,
+ syntax.OpEndLine: noop,
+ syntax.OpBeginText: noop,
+ syntax.OpEndText: noop,
+ syntax.OpWordBoundary: noop,
+ syntax.OpNoWordBoundary: noop,
+ }
+}
+
+type internalGenerator struct {
+ Name string
+ GenerateFunc func() string
+}
+
+func (gen *internalGenerator) Generate() string {
+ return gen.GenerateFunc()
+}
+
+func (gen *internalGenerator) String() string {
+ return gen.Name
+}
+
+// Create a new generator for each expression in regexps.
+func newGenerators(regexps []*syntax.Regexp, args *GeneratorArgs) ([]*internalGenerator, error) {
+ generators := make([]*internalGenerator, len(regexps), len(regexps))
+ var err error
+
+ // create a generator for each alternate pattern
+ for i, subR := range regexps {
+ generators[i], err = newGenerator(subR, args)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ return generators, nil
+}
+
+// Create a new generator for r.
+func newGenerator(regexp *syntax.Regexp, args *GeneratorArgs) (generator *internalGenerator, err error) {
+ simplified := regexp.Simplify()
+
+ factory, ok := generatorFactories[simplified.Op]
+ if ok {
+ return factory(simplified, args)
+ }
+
+ return nil, fmt.Errorf("invalid generator pattern: /%s/ as /%s/\n%s",
+ regexp, simplified, inspectRegexpToString(simplified))
+}
+
+// Generator that does nothing.
+func noop(regexp *syntax.Regexp, args *GeneratorArgs) (*internalGenerator, error) {
+ return &internalGenerator{regexp.String(), func() string {
+ return ""
+ }}, nil
+}
+
+func opEmptyMatch(regexp *syntax.Regexp, args *GeneratorArgs) (*internalGenerator, error) {
+ enforceOp(regexp, syntax.OpEmptyMatch)
+ return &internalGenerator{regexp.String(), func() string {
+ return ""
+ }}, nil
+}
+
+func opLiteral(regexp *syntax.Regexp, args *GeneratorArgs) (*internalGenerator, error) {
+ enforceOp(regexp, syntax.OpLiteral)
+ return &internalGenerator{regexp.String(), func() string {
+ return runesToString(regexp.Rune...)
+ }}, nil
+}
+
+func opAnyChar(regexp *syntax.Regexp, args *GeneratorArgs) (*internalGenerator, error) {
+ enforceOp(regexp, syntax.OpAnyChar)
+ return &internalGenerator{regexp.String(), func() string {
+ return runesToString(rune(args.rng.Int31()))
+ }}, nil
+}
+
+func opAnyCharNotNl(regexp *syntax.Regexp, args *GeneratorArgs) (*internalGenerator, error) {
+ enforceOp(regexp, syntax.OpAnyCharNotNL)
+ charClass := newCharClass(1, rune(math.MaxInt32))
+ return createCharClassGenerator(regexp.String(), charClass, args)
+}
+
+func opQuest(regexp *syntax.Regexp, args *GeneratorArgs) (*internalGenerator, error) {
+ enforceOp(regexp, syntax.OpQuest)
+ return createRepeatingGenerator(regexp, args, 0, 1)
+}
+
+func opStar(regexp *syntax.Regexp, args *GeneratorArgs) (*internalGenerator, error) {
+ enforceOp(regexp, syntax.OpStar)
+ return createRepeatingGenerator(regexp, args, noBound, noBound)
+}
+
+func opPlus(regexp *syntax.Regexp, args *GeneratorArgs) (*internalGenerator, error) {
+ enforceOp(regexp, syntax.OpPlus)
+ return createRepeatingGenerator(regexp, args, 1, noBound)
+}
+
+func opRepeat(regexp *syntax.Regexp, args *GeneratorArgs) (*internalGenerator, error) {
+ enforceOp(regexp, syntax.OpRepeat)
+ return createRepeatingGenerator(regexp, args, regexp.Min, regexp.Max)
+}
+
+// Handles syntax.ClassNL because the parser uses that flag to generate character
+// classes that respect it.
+func opCharClass(regexp *syntax.Regexp, args *GeneratorArgs) (*internalGenerator, error) {
+ enforceOp(regexp, syntax.OpCharClass)
+ charClass := parseCharClass(regexp.Rune)
+ return createCharClassGenerator(regexp.String(), charClass, args)
+}
+
+func opConcat(regexp *syntax.Regexp, genArgs *GeneratorArgs) (*internalGenerator, error) {
+ enforceOp(regexp, syntax.OpConcat)
+
+ generators, err := newGenerators(regexp.Sub, genArgs)
+ if err != nil {
+ return nil, generatorError(err, "error creating generators for concat pattern /%s/", regexp)
+ }
+
+ return &internalGenerator{regexp.String(), func() string {
+ var result bytes.Buffer
+ for _, generator := range generators {
+ result.WriteString(generator.Generate())
+ }
+ return result.String()
+ }}, nil
+}
+
+func opAlternate(regexp *syntax.Regexp, genArgs *GeneratorArgs) (*internalGenerator, error) {
+ enforceOp(regexp, syntax.OpAlternate)
+
+ generators, err := newGenerators(regexp.Sub, genArgs)
+ if err != nil {
+ return nil, generatorError(err, "error creating generators for alternate pattern /%s/", regexp)
+ }
+
+ numGens := len(generators)
+
+ return &internalGenerator{regexp.String(), func() string {
+ i := genArgs.rng.Intn(numGens)
+ generator := generators[i]
+ return generator.Generate()
+ }}, nil
+}
+
+func opCapture(regexp *syntax.Regexp, args *GeneratorArgs) (*internalGenerator, error) {
+ enforceOp(regexp, syntax.OpCapture)
+
+ if err := enforceSingleSub(regexp); err != nil {
+ return nil, err
+ }
+
+ groupRegexp := regexp.Sub[0]
+ generator, err := newGenerator(groupRegexp, args)
+ if err != nil {
+ return nil, err
+ }
+
+ // Group indices are 0-based, but index 0 is the whole expression.
+ index := regexp.Cap - 1
+
+ return &internalGenerator{regexp.String(), func() string {
+ return args.CaptureGroupHandler(index, regexp.Name, groupRegexp, generator, args)
+ }}, nil
+}
+
+func defaultCaptureGroupHandler(index int, name string, group *syntax.Regexp, generator Generator, args *GeneratorArgs) string {
+ return generator.Generate()
+}
+
+// Panic if r.Op != op.
+func enforceOp(r *syntax.Regexp, op syntax.Op) {
+ if r.Op != op {
+ panic(fmt.Sprintf("invalid Op: expected %s, was %s", opToString(op), opToString(r.Op)))
+ }
+}
+
+// Return an error if r has 0 or more than 1 sub-expression.
+func enforceSingleSub(regexp *syntax.Regexp) error {
+ if len(regexp.Sub) != 1 {
+ return generatorError(nil,
+ "%s expected 1 sub-expression, but got %d: %s", opToString(regexp.Op), len(regexp.Sub), regexp)
+ }
+ return nil
+}
+
+func createCharClassGenerator(name string, charClass *tCharClass, args *GeneratorArgs) (*internalGenerator, error) {
+ return &internalGenerator{name, func() string {
+ i := args.rng.Int31n(charClass.TotalSize)
+ r := charClass.GetRuneAt(i)
+ return runesToString(r)
+ }}, nil
+}
+
+// Returns a generator that will run the generator for r's sub-expression [min, max] times.
+func createRepeatingGenerator(regexp *syntax.Regexp, genArgs *GeneratorArgs, min, max int) (*internalGenerator, error) {
+ if err := enforceSingleSub(regexp); err != nil {
+ return nil, err
+ }
+
+ generator, err := newGenerator(regexp.Sub[0], genArgs)
+ if err != nil {
+ return nil, generatorError(err, "failed to create generator for subexpression: /%s/", regexp)
+ }
+
+ if min == noBound {
+ min = int(genArgs.MinUnboundedRepeatCount)
+ }
+ if max == noBound {
+ max = int(genArgs.MaxUnboundedRepeatCount)
+ }
+
+ return &internalGenerator{regexp.String(), func() string {
+ n := min + genArgs.rng.Intn(max-min+1)
+
+ var result bytes.Buffer
+ for i := 0; i < n; i++ {
+ result.WriteString(generator.Generate())
+ }
+ return result.String()
+ }}, nil
+}
diff --git a/vendor/github.com/zach-klippenstein/goregen/regen.go b/vendor/github.com/zach-klippenstein/goregen/regen.go
new file mode 100644
index 000000000..d80034b18
--- /dev/null
+++ b/vendor/github.com/zach-klippenstein/goregen/regen.go
@@ -0,0 +1,225 @@
+/*
+Copyright 2014 Zachary Klippenstein
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+/*
+Package regen is a library for generating random strings from regular expressions.
+The generated strings will match the expressions they were generated from. Similar
+to Ruby's randexp library.
+
+E.g.
+ regen.Generate("[a-z0-9]{1,64}")
+will return a lowercase alphanumeric string
+between 1 and 64 characters long.
+
+Expressions are parsed using the Go standard library's parser: http://golang.org/pkg/regexp/syntax/.
+
+Constraints
+
+"." will generate any character, not necessarily a printable one.
+
+"x{0,}", "x*", and "x+" will generate a random number of x's up to an arbitrary limit.
+If you care about the maximum number, specify it explicitly in the expression,
+e.g. "x{0,256}".
+
+Flags
+
+Flags can be passed to the parser by setting them in the GeneratorArgs struct.
+Newline flags are respected, and newlines won't be generated unless the appropriate flags for
+matching them are set.
+
+E.g.
+Generate(".|[^a]") will never generate newlines. To generate newlines, create a generator and pass
+the flag syntax.MatchNL.
+
+The Perl character class flag is supported, and required if the pattern contains them.
+
+Unicode groups are not supported at this time. Support may be added in the future.
+
+Concurrent Use
+
+A generator can safely be used from multiple goroutines without locking.
+
+A large bottleneck with running generators concurrently is actually the entropy source. Sources returned from
+rand.NewSource() are slow to seed, and not safe for concurrent use. Instead, the source passed in GeneratorArgs
+is used to seed an XorShift64 source (algorithm from the paper at http://vigna.di.unimi.it/ftp/papers/xorshift.pdf).
+This source only uses a single variable internally, and is much faster to seed than the default source. One
+source is created per call to NewGenerator. If no source is passed in, the default source is used to seed.
+
+The source is not locked and does not use atomic operations, so there is a chance that multiple goroutines using
+the same source may get the same output. While obviously not cryptographically secure, I think the simplicity and performance
+benefit outweighs the risk of collisions. If you really care about preventing this, the solution is simple: don't
+call a single Generator from multiple goroutines.
+
+Benchmarks
+
+Benchmarks are included for creating and running generators for limited-length,
+complex regexes, and simple, highly-repetitive regexes.
+
+ go test -bench .
+
+The complex benchmarks generate fake HTTP messages with the following regex:
+ POST (/[-a-zA-Z0-9_.]{3,12}){3,6}
+ Content-Length: [0-9]{2,3}
+ X-Auth-Token: [a-zA-Z0-9+/]{64}
+
+ ([A-Za-z0-9+/]{64}
+ ){3,15}[A-Za-z0-9+/]{60}([A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)
+
+The repetitive benchmarks use the regex
+ a{999}
+
+See regen_benchmarks_test.go for more information.
+
+On my mid-2014 MacBook Pro (2.6GHz Intel Core i5, 8GB 1600MHz DDR3),
+the results of running the benchmarks with minimal load are:
+ BenchmarkComplexCreation-4 200 8322160 ns/op
+ BenchmarkComplexGeneration-4 10000 153625 ns/op
+ BenchmarkLargeRepeatCreateSerial-4 3000 411772 ns/op
+ BenchmarkLargeRepeatGenerateSerial-4 5000 291416 ns/op
+*/
+package regen
+
+import (
+ "fmt"
+ "math/rand"
+ "regexp/syntax"
+)
+
+// DefaultMaxUnboundedRepeatCount is default value for MaxUnboundedRepeatCount.
+const DefaultMaxUnboundedRepeatCount = 4096
+
+// CaptureGroupHandler is a function that is called for each capture group in a regular expression.
+// index and name are the index and name of the group. If unnamed, name is empty. The first capture group has index 0
+// (not 1, as when matching).
+// group is the regular expression within the group (e.g. for `(\w+)`, group would be `\w+`).
+// generator is the generator for group.
+// args is the args used to create the generator calling this function.
+type CaptureGroupHandler func(index int, name string, group *syntax.Regexp, generator Generator, args *GeneratorArgs) string
+
+// GeneratorArgs are arguments passed to NewGenerator that control how generators
+// are created.
+type GeneratorArgs struct {
+ // May be nil.
+ // Used to seed a custom RNG that is a lot faster than the default implementation.
+ // See http://vigna.di.unimi.it/ftp/papers/xorshift.pdf.
+ RngSource rand.Source
+
+ // Default is 0 (syntax.POSIX).
+ Flags syntax.Flags
+
+ // Maximum number of instances to generate for unbounded repeat expressions (e.g. ".*" and "{1,}")
+ // Default is DefaultMaxUnboundedRepeatCount.
+ MaxUnboundedRepeatCount uint
+ // Minimum number of instances to generate for unbounded repeat expressions (e.g. ".*")
+ // Default is 0.
+ MinUnboundedRepeatCount uint
+
+ // Set this to perform special processing of capture groups (e.g. `(\w+)`). The zero value will generate strings
+ // from the expressions in the group.
+ CaptureGroupHandler CaptureGroupHandler
+
+ // Used by generators.
+ rng *rand.Rand
+}
+
+func (a *GeneratorArgs) initialize() error {
+ var seed int64
+ if nil == a.RngSource {
+ seed = rand.Int63()
+ } else {
+ seed = a.RngSource.Int63()
+ }
+ rngSource := xorShift64Source(seed)
+ a.rng = rand.New(&rngSource)
+
+ // unicode groups only allowed with Perl
+ if (a.Flags&syntax.UnicodeGroups) == syntax.UnicodeGroups && (a.Flags&syntax.Perl) != syntax.Perl {
+ return generatorError(nil, "UnicodeGroups not supported")
+ }
+
+ if a.MaxUnboundedRepeatCount < 1 {
+ a.MaxUnboundedRepeatCount = DefaultMaxUnboundedRepeatCount
+ }
+
+ if a.MinUnboundedRepeatCount > a.MaxUnboundedRepeatCount {
+ panic(fmt.Sprintf("MinUnboundedRepeatCount(%d) > MaxUnboundedRepeatCount(%d)",
+ a.MinUnboundedRepeatCount, a.MaxUnboundedRepeatCount))
+ }
+
+ if a.CaptureGroupHandler == nil {
+ a.CaptureGroupHandler = defaultCaptureGroupHandler
+ }
+
+ return nil
+}
+
+// Rng returns the random number generator used by generators.
+// Panics if called before the GeneratorArgs has been initialized by NewGenerator.
+func (a *GeneratorArgs) Rng() *rand.Rand {
+ if a.rng == nil {
+ panic("GeneratorArgs has not been initialized by NewGenerator yet")
+ }
+ return a.rng
+}
+
+// Generator generates random strings.
+type Generator interface {
+ Generate() string
+ String() string
+}
+
+/*
+Generate a random string that matches the regular expression pattern.
+If args is nil, default values are used.
+
+This function does not seed the default RNG, so you must call rand.Seed() if you want
+non-deterministic strings.
+*/
+func Generate(pattern string) (string, error) {
+ generator, err := NewGenerator(pattern, nil)
+ if err != nil {
+ return "", err
+ }
+ return generator.Generate(), nil
+}
+
+// NewGenerator creates a generator that returns random strings that match the regular expression in pattern.
+// If args is nil, default values are used.
+func NewGenerator(pattern string, inputArgs *GeneratorArgs) (generator Generator, err error) {
+ args := GeneratorArgs{}
+
+ // Copy inputArgs so the caller can't change them.
+ if inputArgs != nil {
+ args = *inputArgs
+ }
+ if err = args.initialize(); err != nil {
+ return nil, err
+ }
+
+ var regexp *syntax.Regexp
+ regexp, err = syntax.Parse(pattern, args.Flags)
+ if err != nil {
+ return
+ }
+
+ var gen *internalGenerator
+ gen, err = newGenerator(regexp, &args)
+ if err != nil {
+ return
+ }
+
+ return gen, nil
+}
diff --git a/vendor/github.com/zach-klippenstein/goregen/regexp_format.go b/vendor/github.com/zach-klippenstein/goregen/regexp_format.go
new file mode 100644
index 000000000..3bc8e000b
--- /dev/null
+++ b/vendor/github.com/zach-klippenstein/goregen/regexp_format.go
@@ -0,0 +1,152 @@
+/*
+Copyright 2014 Zachary Klippenstein
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package regen
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "regexp/syntax"
+)
+
+// inspectRegexpToString returns a string describing a regular expression.
+func inspectRegexpToString(r *syntax.Regexp) string {
+ var buffer bytes.Buffer
+ inspectRegexpToWriter(&buffer, r)
+ return buffer.String()
+}
+
+// inspectPatternsToString returns a string describing one or more regular expressions.
+func inspectPatternsToString(simplify bool, patterns ...string) string {
+ var buffer bytes.Buffer
+ for _, pattern := range patterns {
+ inspectPatternsToWriter(simplify, &buffer, pattern)
+ }
+ return buffer.String()
+}
+func inspectPatternsToWriter(simplify bool, w io.Writer, patterns ...string) {
+ for _, pattern := range patterns {
+ inspectRegexpToWriter(w, parseOrPanic(simplify, pattern))
+ }
+}
+
+func inspectRegexpToWriter(w io.Writer, r ...*syntax.Regexp) {
+ for _, regexp := range r {
+ inspectWithIndent(regexp, "", w)
+ }
+}
+
+func inspectWithIndent(r *syntax.Regexp, indent string, w io.Writer) {
+ fmt.Fprintf(w, "%s{\n", indent)
+ fmt.Fprintf(w, "%s Op: %s\n", indent, opToString(r.Op))
+ fmt.Fprintf(w, "%s Flags: %x\n", indent, r.Flags)
+ if len(r.Sub) > 0 {
+ fmt.Fprintf(w, "%s Sub: [\n", indent)
+ for _, subR := range r.Sub {
+ inspectWithIndent(subR, indent+" ", w)
+ }
+ fmt.Fprintf(w, "%s ]\n", indent)
+ } else {
+ fmt.Fprintf(w, "%s Sub: []\n", indent)
+ }
+ fmt.Fprintf(w, "%s Rune: %s (%s)\n", indent, runesToString(r.Rune...), runesToDecimalString(r.Rune))
+ fmt.Fprintf(w, "%s [Min, Max]: [%d, %d]\n", indent, r.Min, r.Max)
+ fmt.Fprintf(w, "%s Cap: %d\n", indent, r.Cap)
+ fmt.Fprintf(w, "%s Name: %s\n", indent, r.Name)
+}
+
+// ParseOrPanic parses a regular expression into an AST.
+// Panics on error.
+func parseOrPanic(simplify bool, pattern string) *syntax.Regexp {
+ regexp, err := syntax.Parse(pattern, 0)
+ if err != nil {
+ panic(err)
+ }
+ if simplify {
+ regexp = regexp.Simplify()
+ }
+ return regexp
+}
+
+// runesToString converts a slice of runes to the string they represent.
+func runesToString(runes ...rune) string {
+ defer func() {
+ if err := recover(); err != nil {
+ panic(fmt.Errorf("RunesToString panicked"))
+ }
+ }()
+ var buffer bytes.Buffer
+ for _, r := range runes {
+ buffer.WriteRune(r)
+ }
+ return buffer.String()
+}
+
+// RunesToDecimalString converts a slice of runes to their comma-separated decimal values.
+func runesToDecimalString(runes []rune) string {
+ var buffer bytes.Buffer
+ for _, r := range runes {
+ buffer.WriteString(fmt.Sprintf("%d, ", r))
+ }
+ return buffer.String()
+}
+
+// opToString gets the string name of a regular expression operation.
+func opToString(op syntax.Op) string {
+ switch op {
+ case syntax.OpNoMatch:
+ return "OpNoMatch"
+ case syntax.OpEmptyMatch:
+ return "OpEmptyMatch"
+ case syntax.OpLiteral:
+ return "OpLiteral"
+ case syntax.OpCharClass:
+ return "OpCharClass"
+ case syntax.OpAnyCharNotNL:
+ return "OpAnyCharNotNL"
+ case syntax.OpAnyChar:
+ return "OpAnyChar"
+ case syntax.OpBeginLine:
+ return "OpBeginLine"
+ case syntax.OpEndLine:
+ return "OpEndLine"
+ case syntax.OpBeginText:
+ return "OpBeginText"
+ case syntax.OpEndText:
+ return "OpEndText"
+ case syntax.OpWordBoundary:
+ return "OpWordBoundary"
+ case syntax.OpNoWordBoundary:
+ return "OpNoWordBoundary"
+ case syntax.OpCapture:
+ return "OpCapture"
+ case syntax.OpStar:
+ return "OpStar"
+ case syntax.OpPlus:
+ return "OpPlus"
+ case syntax.OpQuest:
+ return "OpQuest"
+ case syntax.OpRepeat:
+ return "OpRepeat"
+ case syntax.OpConcat:
+ return "OpConcat"
+ case syntax.OpAlternate:
+ return "OpAlternate"
+ }
+
+ panic(fmt.Sprintf("invalid op: %d", op))
+}
diff --git a/vendor/github.com/zach-klippenstein/goregen/rng.go b/vendor/github.com/zach-klippenstein/goregen/rng.go
new file mode 100644
index 000000000..74194b25e
--- /dev/null
+++ b/vendor/github.com/zach-klippenstein/goregen/rng.go
@@ -0,0 +1,48 @@
+/*
+Copyright 2014 Zachary Klippenstein
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package regen
+
+/*
+The default Source implementation is very slow to seed. Replaced with a
+64-bit xor-shift source from http://vigna.di.unimi.it/ftp/papers/xorshift.pdf.
+This source seeds very quickly, and only uses a single variable, so concurrent
+modification by multiple goroutines is possible.
+
+To create a seeded source:
+ randSource := xorShift64Source(mySeed)
+
+To create a source with the default seed:
+ var randSource xorShift64Source
+*/
+type xorShift64Source uint64
+
+func (src *xorShift64Source) Seed(seed int64) {
+ *src = xorShift64Source(seed)
+}
+
+func (src *xorShift64Source) Int63() int64 {
+ // A zero seed will only generate zeros.
+ if *src == 0 {
+ *src = 1
+ }
+
+ *src ^= *src >> 12 // a
+ *src ^= *src << 25 // b
+ *src ^= *src >> 27 // c
+
+ return int64((*src * 2685821657736338717) >> 1)
+}
diff --git a/vendor/go.bug.st/downloader/.travis.yml b/vendor/go.bug.st/downloader/.travis.yml
new file mode 100644
index 000000000..2acfbd31f
--- /dev/null
+++ b/vendor/go.bug.st/downloader/.travis.yml
@@ -0,0 +1,14 @@
+language: go
+
+go:
+ - 1.11.x
+ - tip
+
+before_install:
+ - go get -t -v ./...
+
+script:
+ - go test -race -coverprofile=coverage.txt -covermode=atomic
+
+after_success:
+ - bash <(curl -s https://codecov.io/bash)
diff --git a/vendor/go.bug.st/downloader/LICENSE b/vendor/go.bug.st/downloader/LICENSE
new file mode 100644
index 000000000..05cd1e25a
--- /dev/null
+++ b/vendor/go.bug.st/downloader/LICENSE
@@ -0,0 +1,33 @@
+
+Copyright (c) 2018, Cristian Maglie.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+
diff --git a/vendor/go.bug.st/downloader/README.md b/vendor/go.bug.st/downloader/README.md
new file mode 100644
index 000000000..47a74787a
--- /dev/null
+++ b/vendor/go.bug.st/downloader/README.md
@@ -0,0 +1,3 @@
+# go.bug.st/downloader [](https://travis-ci.org/bugst/go-downloader) [](https://codecov.io/gh/bugst/go-downloader)
+
+A simple HTTP/S file downloader for golang.
diff --git a/vendor/go.bug.st/downloader/downloader.go b/vendor/go.bug.st/downloader/downloader.go
new file mode 100644
index 000000000..c96a574b2
--- /dev/null
+++ b/vendor/go.bug.st/downloader/downloader.go
@@ -0,0 +1,171 @@
+//
+// Copyright 2018 Cristian Maglie. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+//
+
+package downloader
+
+import (
+ "fmt"
+ "io"
+ "net/http"
+ "os"
+ "sync"
+ "time"
+)
+
+// Downloader is an asynchronous downloader
+type Downloader struct {
+ URL string
+ Done chan bool
+ resp *http.Response
+ out *os.File
+ completed int64
+ completedLock sync.Mutex
+ size int64
+ err error
+}
+
+// DownloadOptions are optional flags that can be passed to Download function
+type DownloadOptions int
+
+const (
+ // NoResume will not try to resume a partial download
+ NoResume DownloadOptions = iota
+)
+
+// Close the download
+func (d *Downloader) Close() error {
+ err1 := d.out.Close()
+ err2 := d.resp.Body.Close()
+ if err1 != nil {
+ return fmt.Errorf("closing output file: %s", err1)
+ }
+ if err2 != nil {
+ return fmt.Errorf("closing input stream: %s", err2)
+ }
+ return nil
+}
+
+// Size return the size of the download
+func (d *Downloader) Size() int64 {
+ return d.size
+}
+
+// RunAndPoll starts the downloader copy-loop and calls the poll function every
+// interval time to update progress.
+func (d *Downloader) RunAndPoll(poll func(current int64), interval time.Duration) error {
+ t := time.NewTicker(interval)
+ defer t.Stop()
+
+ go d.AsyncRun()
+ for {
+ select {
+ case <-t.C:
+ poll(d.Completed())
+ case <-d.Done:
+ poll(d.Completed())
+ return d.Error()
+ }
+ }
+}
+
+// AsyncRun starts the downloader copy-loop. This function is supposed to be run
+// on his own go routine because it sends a confirmation on the Done channel
+func (d *Downloader) AsyncRun() {
+ in := d.resp.Body
+ buff := [4096]byte{}
+ for {
+ n, err := in.Read(buff[:])
+ if n > 0 {
+ d.out.Write(buff[:n])
+ d.completedLock.Lock()
+ d.completed += int64(n)
+ d.completedLock.Unlock()
+ }
+ if err == io.EOF {
+ break
+ }
+ if err != nil {
+ d.err = err
+ break
+ }
+ }
+ d.Close()
+ d.Done <- true
+}
+
+// Run starts the downloader and waits until it completes the download.
+func (d *Downloader) Run() error {
+ go d.AsyncRun()
+ <-d.Done
+ return d.Error()
+}
+
+// Error returns the error during download or nil if no errors happened
+func (d *Downloader) Error() error {
+ return d.err
+}
+
+// Completed returns the bytes read so far
+func (d *Downloader) Completed() int64 {
+ d.completedLock.Lock()
+ res := d.completed
+ d.completedLock.Unlock()
+ return res
+}
+
+// Download returns an asynchronous downloader that will donwload the specified url
+// in the specified file. A download resume is tried if a file shorter than the requested
+// url is already present.
+func Download(file string, url string, options ...DownloadOptions) (*Downloader, error) {
+ noResume := false
+ for _, opt := range options {
+ if opt == NoResume {
+ noResume = true
+ }
+ }
+ req, err := http.NewRequest("GET", url, nil)
+ if err != nil {
+ return nil, fmt.Errorf("setting up HTTP request: %s", err)
+ }
+
+ var completed int64
+ if !noResume {
+ if info, err := os.Stat(file); err == nil {
+ completed = info.Size()
+ req.Header.Set("Range", fmt.Sprintf("bytes=%d-", completed))
+ }
+ }
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ if err != nil {
+ return nil, err
+ }
+
+ // TODO: if file size == header size return nil, nil
+
+ flags := os.O_WRONLY
+ if completed == 0 {
+ flags |= os.O_CREATE
+ } else {
+ flags |= os.O_APPEND
+ }
+ f, err := os.OpenFile(file, flags, 0644)
+ if err != nil {
+ resp.Body.Close()
+ return nil, fmt.Errorf("opening %s for writing: %s", file, err)
+ }
+
+ d := &Downloader{
+ URL: url,
+ Done: make(chan bool),
+ resp: resp,
+ out: f,
+ completed: completed,
+ size: resp.ContentLength + completed,
+ }
+ return d, nil
+}
diff --git a/vendor/goa.design/goa/.gitignore b/vendor/goa.design/goa/.gitignore
new file mode 100644
index 000000000..a38998f8a
--- /dev/null
+++ b/vendor/goa.design/goa/.gitignore
@@ -0,0 +1,26 @@
+# Golang tools artifacts
+**/*.coverprofile
+**/*.test
+vendor
+
+# Executables and test outputs
+_integration_tests/*/**/*.*
+cmd/goagen/goagen
+
+# Editor / IDEs cruft
+.idea/
+*.iml
+.vscode/
+*~
+*.orig
+*.swp
+
+# OSes cruft
+.DS_Store
+
+# Example outputs
+/examples/account/cmd/*
+/examples/account/account
+/examples/cellar/cmd/cellarsvc/cellar.db
+examples/calc/cmd/calc/calc
+examples/calc/cmd/calc-cli/calc-cli
diff --git a/vendor/goa.design/goa/.golint_exclude b/vendor/goa.design/goa/.golint_exclude
new file mode 100644
index 000000000..d4423e03f
--- /dev/null
+++ b/vendor/goa.design/goa/.golint_exclude
@@ -0,0 +1,9 @@
+^examples/.*
+dsl/http.go
+expr/http_response.go
+codegen/service/testing/.*
+http/codegen/testing/.*
+http/middleware/trace.go
+http/middleware/xray/middleware.go
+http/middleware/xray/wrap_doer.go
+http/middleware/xray/wrap_doer_test.go
diff --git a/vendor/goa.design/goa/.travis.yml b/vendor/goa.design/goa/.travis.yml
new file mode 100644
index 000000000..45bef2925
--- /dev/null
+++ b/vendor/goa.design/goa/.travis.yml
@@ -0,0 +1,16 @@
+language: go
+go:
+- 1.10.x
+install:
+- export PATH=${PATH}:${HOME}/gopath/bin
+script:
+- export PATH=${PATH}:${HOME}/bin
+- cd ../../.. && mkdir goa.design && cp -r github.com/goadesign/goa goa.design/goa
+- cd goa.design/goa
+- make travis
+# https://graysonkoonce.com/getting-the-current-branch-name-during-a-pull-request-in-travis-ci/
+- export GOA_BRANCH=$(if [ "$TRAVIS_PULL_REQUEST" == "false" ]; then echo $TRAVIS_BRANCH; else echo $TRAVIS_PULL_REQUEST_BRANCH; fi) && echo $GOA_BRANCH
+- make test-plugins
+notifications:
+ slack:
+ secure: bMYXaoSEGoNdqR0t1VnMAv/4V9PSOhEWyekdJM7p9WmKjJi2yKy0k77uRmwf+5Mrz5GLs3CkZnDha/8cSFld3KEN9SC6QYmIBF/1Pd/5mKHFQOI81i7sTlhrdMv897+6sofEtbBNq1jffhVGVttbMrMWwCTNZu0NrCGBVsDmb44=
diff --git a/vendor/goa.design/goa/CODE_OF_CONDUCT.md b/vendor/goa.design/goa/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..9414f221d
--- /dev/null
+++ b/vendor/goa.design/goa/CODE_OF_CONDUCT.md
@@ -0,0 +1,46 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at info@goa.design. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
+
+[homepage]: http://contributor-covenant.org
+[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/goa.design/goa/LICENSE b/vendor/goa.design/goa/LICENSE
new file mode 100644
index 000000000..d4c1b519a
--- /dev/null
+++ b/vendor/goa.design/goa/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 Raphael Simon and goa Contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/vendor/goa.design/goa/Makefile b/vendor/goa.design/goa/Makefile
new file mode 100644
index 000000000..cdfb8d75f
--- /dev/null
+++ b/vendor/goa.design/goa/Makefile
@@ -0,0 +1,94 @@
+#! /usr/bin/make
+#
+# Makefile for goa v2
+#
+# Targets:
+# - "depend" retrieves the Go packages needed to run the linter and tests
+# - "lint" runs the linter and checks the code format using goimports
+# - "test" runs the tests
+#
+# Meta targets:
+# - "all" is the default target, it runs all the targets in the order above.
+#
+DIRS=$(shell go list -f {{.Dir}} goa.design/goa/expr/...)
+
+# Only list test and build dependencies
+# Standard dependencies are installed via go get
+DEPEND=\
+ github.com/sergi/go-diff/diffmatchpatch \
+ golang.org/x/lint/golint \
+ golang.org/x/tools/cmd/goimports \
+ github.com/hashicorp/go-getter \
+ github.com/cheggaaa/pb \
+ github.com/golang/protobuf/protoc-gen-go
+
+all: lint gen test
+
+travis: depend all
+
+# Install protoc
+GOOS=$(shell go env GOOS)
+PROTOC_VERSION="3.6.1"
+ifeq ($(GOOS),linux)
+PROTOC="protoc-$(PROTOC_VERSION)-linux-x86_64"
+PROTOC_EXEC="$(PROTOC)/bin/protoc"
+GOBIN="$(GOPATH)/bin"
+else
+ ifeq ($(GOOS),windows)
+PROTOC="protoc-$(PROTOC_VERSION)-win32"
+PROTOC_EXEC="$(PROTOC)\bin\protoc.exe"
+GOBIN="$(GOPATH)\bin"
+ endif
+endif
+depend:
+ @go get -v $(DEPEND)
+ @go install github.com/hashicorp/go-getter/cmd/go-getter && \
+ go-getter https://github.com/google/protobuf/releases/download/v$(PROTOC_VERSION)/$(PROTOC).zip $(PROTOC) && \
+ cp $(PROTOC_EXEC) $(GOBIN) && \
+ rm -r $(PROTOC)
+ @go install github.com/golang/protobuf/protoc-gen-go
+ @go get -t -v ./...
+
+lint:
+ @for d in $(DIRS) ; do \
+ if [ "`goimports -l $$d/*.go | tee /dev/stderr`" ]; then \
+ echo "^ - Repo contains improperly formatted go files" && echo && exit 1; \
+ fi \
+ done
+ @if [ "`golint ./... | grep -vf .golint_exclude | tee /dev/stderr`" ]; then \
+ echo "^ - Lint errors!" && echo && exit 1; \
+ fi
+
+gen:
+ @cd cmd/goa && \
+ go install && \
+ rm -rf $(GOPATH)/src/goa.design/goa/examples/calc/cmd && \
+ rm -rf $(GOPATH)/src/goa.design/goa/examples/cellar/cmd/cellar-cli && \
+ rm -rf $(GOPATH)/src/goa.design/goa/examples/chatter/cmd/chatter && \
+ rm -rf $(GOPATH)/src/goa.design/goa/examples/error/cmd && \
+ rm -rf $(GOPATH)/src/goa.design/goa/examples/security/cmd && \
+ goa gen goa.design/goa/examples/calc/design -o $(GOPATH)/src/goa.design/goa/examples/calc && \
+ goa example goa.design/goa/examples/calc/design -o $(GOPATH)/src/goa.design/goa/examples/calc && \
+ goa gen goa.design/goa/examples/cellar/design -o $(GOPATH)/src/goa.design/goa/examples/cellar && \
+ goa example goa.design/goa/examples/cellar/design -o $(GOPATH)/src/goa.design/goa/examples/cellar && \
+ goa gen goa.design/goa/examples/chatter/design -o $(GOPATH)/src/goa.design/goa/examples/chatter && \
+ goa example goa.design/goa/examples/chatter/design -o $(GOPATH)/src/goa.design/goa/examples/chatter && \
+ goa gen goa.design/goa/examples/error/design -o $(GOPATH)/src/goa.design/goa/examples/error && \
+ goa example goa.design/goa/examples/error/design -o $(GOPATH)/src/goa.design/goa/examples/error && \
+ goa gen goa.design/goa/examples/security/design -o $(GOPATH)/src/goa.design/goa/examples/security && \
+ goa example goa.design/goa/examples/security/design -o $(GOPATH)/src/goa.design/goa/examples/security
+
+test:
+ go test ./...
+
+test-plugins:
+ @if [ -z $(GOA_BRANCH) ]; then\
+ GOA_BRANCH=$$(git rev-parse --abbrev-ref HEAD); \
+ fi
+ @if [ ! -d "$(GOPATH)/src/goa.design/plugins" ]; then\
+ git clone https://github.com/goadesign/plugins.git $(GOPATH)/src/goa.design/plugins; \
+ fi
+ @cd $(GOPATH)/src/goa.design/plugins && git checkout $(GOA_BRANCH) || echo "Using master branch in plugins repo" && \
+ make -k || (echo "Tests in plugin repo (https://github.com/goadesign/plugins) failed" \
+ "due to changes in goa repo (branch: $(GOA_BRANCH))!" \
+ "Create a branch with name '$(GOA_BRANCH)' in the plugin repo and fix these errors." && exit 1)
diff --git a/vendor/goa.design/goa/README.md b/vendor/goa.design/goa/README.md
new file mode 100644
index 000000000..a886f022b
--- /dev/null
+++ b/vendor/goa.design/goa/README.md
@@ -0,0 +1,339 @@
+#
+
+
+goa is a framework for building micro-services and APIs in Go using a unique
+design-first approach.
+
+---
+[](https://travis-ci.org/goadesign/goa)
+[](https://ci.appveyor.com/project/RaphaelSimon/goa-oqtis/branch/master)
+[](https://sourcegraph.com/github.com/goadesign/goa?badge)
+[](https://godoc.org/goa.design/goa)
+[](https://gophers.slack.com/messages/goa/)
+
+## Overview
+
+goa takes a different approach to building services by making it possible to
+describe the *design* of the service API using a simple Go DSL. goa uses the
+description to generate specialized service helper code, client code and
+documentation. goa is extensible via plugins, for example the
+[goakit](https://github.com/goadesign/plugins/tree/master/goakit) plugin
+generates code that leverage the [go-kit](https://github.com/go-kit/kit)
+library.
+
+The service design describes the transport independent layer of the services in
+the form of simple methods that accept a context and a payload and return a
+result and an error. The design also describes how the payloads, results and
+errors are serialized in the transport (HTTP or gRPC). For example a service
+method payload may be built from an HTTP request by extracting values from the
+request path, headers and body. This clean separation of layers makes it
+possible to expose the same service using multiple transports. It also promotes
+good design where the service business logic concerns are expressed and
+implemented separately from the transport logic.
+
+The goa DSL consists of Go functions so that it may be extended easily to avoid
+repetition and promote standards. The design code itself can easily be shared
+across multiple services by simply importing the corresponding Go package again
+promoting reuse and standardization across service boundaries.
+
+## Code Generation
+
+The goa tool accepts the Go design package import path as input and produces the
+interface as well as the glue that binds the service and client code with the
+underlying transport. The code is specific to the API so that for example there
+is no need to cast or "bind" any data structure prior to using the request
+payload or response result. The design may define validations in which case the
+generated code takes care of validating the incoming request payload prior to
+invoking the service method on the server, and validating the response prior to
+invoking the client code.
+
+## Installation
+
+Assuming you have a working [Go](https://golang.org) setup:
+
+``` bash
+go get -u goa.design/goa/...
+```
+
+### Vendoring
+
+Since goa generates and compiles code vendoring tools are not able to
+automatically identify all the dependencies. In particular the `generator`
+package is only used by the generated code. To alleviate this issue simply add
+`goa.design/goa/codegen/generator` as a required package to the vendor manifest.
+For example if you are using `dep` add the following line to `Gopkg.toml`:
+
+``` toml
+required = ["goa.design/goa/codegen/generator"]
+```
+
+### Stable Versions
+
+goa follows [Semantic Versioning](http://semver.org/) which is a fancy way of
+saying it publishes releases with version numbers of the form `vX.Y.Z` and makes
+sure that your code can upgrade to new versions with the same `X` component
+without having to make changes.
+
+Releases are tagged with the corresponding version number. There is also a
+branch for each major version (`v1` and `v2`). The recommended practice is to
+vendor the stable branch.
+
+Current Release: `v2.0.0-wip`
+
+## Teaser
+
+### 1. Design
+
+Create the file `$GOPATH/src/calcsvc/design/design.go` with the following
+content:
+
+```go
+package design
+
+import . "goa.design/goa/dsl"
+
+// API describes the global properties of the API server.
+var _ = API("calc", func() {
+ Title("Calculator Service")
+ Description("HTTP service for adding numbers, a goa teaser")
+})
+
+// Service describes a service
+var _ = Service("calc", func() {
+ Description("The calc service performs operations on numbers")
+ // Method describes a service method (endpoint)
+ Method("add", func() {
+ // Payload describes the method payload
+ // Here the payload is an object that consists of two fields
+ Payload(func() {
+ // Attribute describes an object field
+ Attribute("a", Int, "Left operand")
+ Attribute("b", Int, "Right operand")
+ // Both attributes must be provided when invoking "add"
+ Required("a", "b")
+ })
+ // Result describes the method result
+ // Here the result is a simple integer value
+ Result(Int)
+ // HTTP describes the HTTP transport mapping
+ HTTP(func() {
+ // Requests to the service consist of HTTP GET requests
+ // The payload fields are encoded as path parameters
+ GET("/add/{a}/{b}")
+ // Responses use a "200 OK" HTTP status
+ // The result is encoded in the response body
+ Response(StatusOK)
+ })
+ })
+})
+```
+
+This file contains the design for a `calc` service which accepts HTTP GET
+requests to `/add/{a}/{b}` where `{a}` and `{b}` are placeholders for integer
+values. The API returns the sum of `a` and `b` in the HTTP response body.
+
+### 2. Implement
+
+Now that the design is done, let's run `goa` on the design package:
+
+``` bash
+cd $GOPATH/src/calcsvc
+goa gen calcsvc/design
+```
+
+This produces a `gen` directory with the following directory structure:
+
+``` text
+gen
+├── calc
+│ ├── client.go
+│ ├── endpoints.go
+│ └── service.go
+└── http
+ ├── calc
+ │ ├── client
+ │ │ ├── client.go
+ │ │ ├── cli.go
+ │ │ ├── encode_decode.go
+ │ │ ├── paths.go
+ │ │ └── types.go
+ │ └── server
+ │ ├── encode_decode.go
+ │ ├── paths.go
+ │ ├── server.go
+ │ └── types.go
+ ├── cli
+ │ └── cli.go
+ └── openapi.json
+
+6 directories, 14 files
+```
+
+* `calc` contains the service endpoints and interface as well as a service
+ client.
+* `http` contains the HTTP transport layer. This layer maps the service
+ endpoints to HTTP handlers server side and HTTP client methods client side.
+ The `http` directory also contains a complete
+ [OpenAPI 2.0](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md)
+ spec of the service.
+
+The `goa` tool can also generate example implementations for both the service
+and client. These examples provide a good starting point:
+
+``` text
+goa example calcsvc/design
+calc.go
+cmd/calccli/main.go
+cmd/calcsvc/main.go
+```
+
+The tool generated the `main` functions for two commands: one that runs the
+server and one the client. The tool also generated a dummy service
+implementation that prints a log message. Again note that the `example` command
+is intended to generate just that: an *example*, in particular it is not
+intended to be re-run each time the design changes (as opposed to the `gen`
+command which should be re-run each time the desgin changes).
+
+Let's implement our service by providing a proper implementation for the `add`
+method. goa generated a payload struct for the `add` method that contains both
+fields. goa also generated the transport layer that takes care of decoding the
+request so all we have to do is to perform the actual sum. Edit the file
+`calc.go` and change the code of the `add` function as follows:
+
+```go
+// Add returns the sum of attributes a and b of p.
+func (s *calcsvcSvc) Add(ctx context.Context, p *calcsvc.AddPayload) (int, error) {
+ return p.A + p.B, nil
+}
+```
+
+That's it! we have now a full-fledged HTTP service with a corresponding OpenAPI
+specification and a client tool.
+
+### 3. Run
+
+Now let's compile and run the service:
+
+``` bash
+cd $GOPATH/src/calcsvc/cmd/calcsvc
+go build
+./calcsvc
+[calc] 04:27:45 [INFO] service "calc" method "Add" mounted on GET /add/{a}/{b}
+[calc] 04:27:45 [INFO] listening on :8080
+```
+
+Open a new console and compile the generated CLI tool:
+
+``` bash
+cd $GOPATH/src/calcsvc/cmd/calccli
+go build
+```
+
+and run it:
+
+``` bash
+./calccli -a 1 -b 2
+3
+```
+
+The tool includes contextual help:
+
+``` bash
+./calccli --help
+```
+
+Help is also available on each command:
+
+``` bash
+./calccli calc add --help
+```
+
+Now let's see how robust our code is and try to use non integer values:
+
+``` bash
+./calccli calc add -a 1 -b foo
+invalid value for b, must be INT
+run './calccli --help' for detailed usage.
+```
+
+The generated code validates the command line arguments against the types
+defined in the design. The server also validates the types when decoding
+incoming requests so that your code only has to deal with the business logic.
+
+### 4. Document
+
+The `http` directory contains the OpenAPI 2.0 specification in both YAML and
+JSON format.
+
+The specification can easily be served from the service itself using a file
+server. The [Files](http://godoc.org/goa.design/goa/dsl/http.go#Files) DSL
+function makes it possible to server static file. Edit the file
+`design/design.go` and add:
+
+```go
+var _ = Service("openapi", func() {
+ // Serve the file with relative path ../../gen/http/openapi.json for
+ // requests sent to /swagger.json.
+ Files("/swagger.json", "../../gen/http/openapi.json")
+})
+```
+
+Re-run `goa gen calcsvc/design` and note the new directory `gen/openapi`
+containing the implementation for a HTTP handler that serves the `openapi.json`
+file.
+
+All we need to do is mount the handler on the service mux. Add the corresponding
+import statement to `cmd/calcsvc/main.go`:
+
+```go
+import openapisvr "calcsvc/gen/http/openapi/server"
+```
+
+and mount the handler by adding the following line in the same file and after
+the mux creation (e.g. one the line after the `// Configure the mux.` comment):
+
+```go
+openapisvr.Mount(mux)
+```
+
+That's it! we now have a self-documenting service. Stop the running service
+with CTRL-C. Rebuild and re-run it then make requests to the newly added
+`/swagger.json` endpoint:
+
+``` bash
+^C[calc] 05:04:28 exiting (interrupt)
+[calc] 05:04:28 exited
+go build
+./calcsvc
+```
+
+In a different console:
+
+``` bash
+curl localhost:8080/swagger.json
+{"swagger":"2.0","info":{"title":"Calculator Service","description":...
+```
+
+## Resources
+
+Consult the following resources to learn more about goa.
+
+### Docs
+
+The [Getting Started Guide](https://github.com/goadesign/goa/blob/v2/docs/Guide.md) is
+a great place to start.
+
+There is also a [FAQ](https://github.com/goadesign/goa/blob/v2/docs/FAQ.md) and
+a document describing
+[error handling](https://github.com/goadesign/goa/blob/v2/docs/ErrorHandling.md).
+
+### Examples
+
+The [examples](https://github.com/goadesign/goa/tree/v2/examples) directory
+contains simple examples illustrating basic concepts.
+
+## Contributing
+
+Did you fix a bug? write docs or additional tests? or implement some new awesome
+functionality? You're a rock star!! Just make sure that `make` succeeds (or that
+TravisCI is green) and send a PR over.
diff --git a/vendor/goa.design/goa/appveyor.yml b/vendor/goa.design/goa/appveyor.yml
new file mode 100644
index 000000000..a502e9476
--- /dev/null
+++ b/vendor/goa.design/goa/appveyor.yml
@@ -0,0 +1,21 @@
+version: "{build}"
+
+os: Windows Server 2012 R2
+
+environment:
+ GOPATH: C:\gopath
+
+clone_folder: c:\gopath\src\goa.design\goa
+
+install:
+ - go version
+ - copy c:\MinGW\bin\mingw32-make.exe c:\MinGW\bin\make.exe
+ - set PATH=%PATH%;c:\MinGW\bin;%GOPATH%\bin
+
+build_script:
+ - make depend
+ - make test
+
+test: off
+
+deploy: off
diff --git a/vendor/goa.design/goa/codegen/file.go b/vendor/goa.design/goa/codegen/file.go
new file mode 100644
index 000000000..f6bdcdbb0
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/file.go
@@ -0,0 +1,185 @@
+package codegen
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/format"
+ "go/parser"
+ "go/scanner"
+ "go/token"
+ "io"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "strings"
+ "text/template"
+
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/imports"
+)
+
+// Gendir is the name of the subdirectory of the output directory that contains
+// the generated files. This directory is wiped and re-written each time goa is
+// run.
+const Gendir = "gen"
+
+type (
+ // A File contains the logic to generate a complete file.
+ File struct {
+ // SectionTemplates is the list of file section templates in
+ // order of rendering.
+ SectionTemplates []*SectionTemplate
+ // Path returns the file path relative to the output directory.
+ Path string
+ // SkipExist indicates whether the file should be skipped if one
+ // already exists at the given path.
+ SkipExist bool
+ // FinalizeFunc is called after the file has been generated. It
+ // is given the absolute path to the file as argument.
+ FinalizeFunc func(string) error
+ }
+
+ // A SectionTemplate is a template and accompanying render data. The
+ // template format is described in the (stdlib) text/template package.
+ SectionTemplate struct {
+ // Name is the name reported when parsing the source fails.
+ Name string
+ // Source is used to create the text/template.Template that
+ // renders the section text.
+ Source string
+ // FuncMap lists the functions used to render the templates.
+ FuncMap map[string]interface{}
+ // Data used as input of template.
+ Data interface{}
+ }
+)
+
+// Section returns the section templates with the given name or nil if not found.
+func (f *File) Section(name string) []*SectionTemplate {
+ var sts []*SectionTemplate
+ for _, s := range f.SectionTemplates {
+ if s.Name == name {
+ sts = append(sts, s)
+ }
+ }
+ return sts
+}
+
+// Render executes the file section templates and writes the resulting bytes to
+// an output file. The path of the output file is computed by appending the file
+// path to dir. If a file already exists with the computed path then Render
+// happens the smallest integer value greater than 1 to make it unique. Renders
+// returns the computed path.
+func (f *File) Render(dir string) (string, error) {
+ base, err := filepath.Abs(dir)
+ if err != nil {
+ return "", err
+ }
+ path := filepath.Join(base, f.Path)
+ if f.SkipExist {
+ if _, err = os.Stat(path); err == nil {
+ return "", nil
+ }
+ }
+
+ if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil {
+ return "", err
+ }
+
+ file, err := os.OpenFile(
+ path,
+ os.O_CREATE|os.O_APPEND|os.O_WRONLY,
+ 0644,
+ )
+ if err != nil {
+ return "", err
+ }
+ for _, s := range f.SectionTemplates {
+ if err := s.Write(file); err != nil {
+ return "", err
+ }
+ }
+ if err := file.Close(); err != nil {
+ return "", err
+ }
+
+ // Format Go source files
+ if filepath.Ext(path) == ".go" {
+ if err := finalizeGoSource(path); err != nil {
+ return "", err
+ }
+ }
+
+ // Run finalizer if any
+ if f.FinalizeFunc != nil {
+ if err := f.FinalizeFunc(path); err != nil {
+ return "", err
+ }
+ }
+
+ return path, nil
+}
+
+// Write writes the section to the given writer.
+func (s *SectionTemplate) Write(w io.Writer) error {
+ funcs := TemplateFuncs()
+ for k, v := range s.FuncMap {
+ funcs[k] = v
+ }
+ tmpl := template.Must(template.New(s.Name).Funcs(funcs).Parse(s.Source))
+ return tmpl.Execute(w, s.Data)
+}
+
+// finalizeGoSource removes unneeded imports from the given Go source file and
+// runs go fmt on it.
+func finalizeGoSource(path string) error {
+ // Make sure file parses and print content if it does not.
+ fset := token.NewFileSet()
+ file, err := parser.ParseFile(fset, path, nil, parser.ParseComments)
+ if err != nil {
+ content, _ := ioutil.ReadFile(path)
+ var buf bytes.Buffer
+ scanner.PrintError(&buf, err)
+ return fmt.Errorf("%s\n========\nContent:\n%s", buf.String(), content)
+ }
+
+ // Clean unused imports
+ imps := astutil.Imports(fset, file)
+ for _, group := range imps {
+ for _, imp := range group {
+ path := strings.Trim(imp.Path.Value, `"`)
+ if !astutil.UsesImport(file, path) {
+ if imp.Name != nil {
+ astutil.DeleteNamedImport(fset, file, imp.Name.Name, path)
+ } else {
+ astutil.DeleteImport(fset, file, path)
+ }
+ }
+ }
+ }
+ ast.SortImports(fset, file)
+ w, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm)
+ if err != nil {
+ return err
+ }
+ if err := format.Node(w, fset, file); err != nil {
+ return err
+ }
+ w.Close()
+
+ // Format code using goimport standard
+ bs, err := ioutil.ReadFile(path)
+ if err != nil {
+ return err
+ }
+ opt := imports.Options{
+ Comments: true,
+ FormatOnly: true,
+ }
+ bs, err = imports.Process(path, bs, &opt)
+ if err != nil {
+ return err
+ }
+ return ioutil.WriteFile(path, bs, os.ModePerm)
+}
diff --git a/vendor/goa.design/goa/codegen/funcs.go b/vendor/goa.design/goa/codegen/funcs.go
new file mode 100644
index 000000000..edf84998a
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/funcs.go
@@ -0,0 +1,330 @@
+package codegen
+
+import (
+ "bytes"
+ "fmt"
+ "os"
+ "strings"
+ "unicode"
+
+ "goa.design/goa/pkg"
+)
+
+// TemplateFuncs lists common template helper functions.
+func TemplateFuncs() map[string]interface{} {
+ return map[string]interface{}{
+ "commandLine": CommandLine,
+ "comment": Comment,
+ }
+}
+
+// CheckVersion returns an error if the ver is empty, contains an incorrect value or
+// a version number that is not compatible with the version of this repo.
+func CheckVersion(ver string) error {
+ compat, err := pkg.Compatible(ver)
+ if err != nil {
+ return err
+ }
+ if !compat {
+ return fmt.Errorf("version mismatch: using goa %s to generate code that compiles with goa %s",
+ ver, pkg.Version())
+ }
+ return nil
+}
+
+// CommandLine return the command used to run this process.
+func CommandLine() string {
+ cmdl := "$ goa"
+ for _, arg := range os.Args {
+ if strings.HasPrefix(arg, "--cmd=") {
+ cmdl = arg[6:]
+ break
+ }
+ }
+ return cmdl
+}
+
+// Comment produces line comments by concatenating the given strings and producing 80 characters
+// long lines starting with "//"
+func Comment(elems ...string) string {
+ var lines []string
+ for _, e := range elems {
+ lines = append(lines, strings.Split(e, "\n")...)
+ }
+ var trimmed = make([]string, len(lines))
+ for i, l := range lines {
+ trimmed[i] = strings.TrimLeft(l, " \t")
+ }
+ t := strings.Join(trimmed, "\n")
+
+ return Indent(WrapText(t, 77), "// ")
+}
+
+// Indent inserts prefix at the beginning of each non-empty line of s. The
+// end-of-line marker is NL.
+func Indent(s, prefix string) string {
+ var (
+ res []byte
+ b = []byte(s)
+ p = []byte(prefix)
+ bol = true
+ )
+ for _, c := range b {
+ if bol && c != '\n' {
+ res = append(res, p...)
+ }
+ res = append(res, c)
+ bol = c == '\n'
+ }
+ return string(res)
+}
+
+// Add adds two integers and returns the sum of the two.
+func Add(a, b int) int { return a + b }
+
+// Casing exceptions
+var toLower = map[string]string{"OAuth": "oauth"}
+
+// CamelCase produces the CamelCase version of the given string. It removes any
+// non letter and non digit character.
+//
+// If firstUpper is true the first letter of the string is capitalized else
+// the first letter is in lowercase.
+// If acronym is true and a part of the string is a common acronym
+// then it keeps the part capitalized (firstUpper = true)
+// (e.g. APIVersion) or lowercase (firstUpper = false) (e.g. apiVersion).
+func CamelCase(name string, firstUpper bool, acronym bool) string {
+ if name == "" {
+ return ""
+ }
+
+ runes := []rune(name)
+ // remove trailing invalid identifiers (makes code below simpler)
+ runes = removeTrailingInvalid(runes)
+
+ // all characters are invalid
+ if len(runes) == 0 {
+ return ""
+ }
+
+ w, i := 0, 0 // index of start of word, scan
+ for i+1 <= len(runes) {
+ eow := false // whether we hit the end of a word
+
+ // remove leading invalid identifiers
+ runes = removeInvalidAtIndex(i, runes)
+
+ if i+1 == len(runes) {
+ eow = true
+ } else if !validIdentifier(runes[i]) {
+ // get rid of it
+ runes = append(runes[:i], runes[i+1:]...)
+ } else if runes[i+1] == '_' {
+ // underscore; shift the remainder forward over any run of underscores
+ eow = true
+ n := 1
+ for i+n+1 < len(runes) && runes[i+n+1] == '_' {
+ n++
+ }
+ copy(runes[i+1:], runes[i+n+1:])
+ runes = runes[:len(runes)-n]
+ } else if isLower(runes[i]) && !isLower(runes[i+1]) {
+ // lower->non-lower
+ eow = true
+ }
+ i++
+ if !eow {
+ continue
+ }
+
+ // [w,i] is a word.
+ word := string(runes[w:i])
+ // is it one of our initialisms?
+ if u := strings.ToUpper(word); acronym && commonInitialisms[u] {
+ if firstUpper {
+ u = strings.ToUpper(u)
+ } else if w == 0 {
+ u = strings.ToLower(u)
+ }
+
+ // All the common initialisms are ASCII,
+ // so we can replace the bytes exactly.
+ copy(runes[w:], []rune(u))
+ } else if w > 0 && strings.ToLower(word) == word {
+ // already all lowercase, and not the first word, so uppercase the first character.
+ runes[w] = unicode.ToUpper(runes[w])
+ } else if w == 0 && strings.ToLower(word) == word && firstUpper {
+ runes[w] = unicode.ToUpper(runes[w])
+ }
+ if w == 0 && !firstUpper {
+ runes[w] = unicode.ToLower(runes[w])
+ }
+ //advance to next word
+ w = i
+ }
+
+ return string(runes)
+}
+
+// SnakeCase produces the snake_case version of the given CamelCase string.
+func SnakeCase(name string) string {
+ for u, l := range toLower {
+ name = strings.Replace(name, u, l, -1)
+ }
+ var b bytes.Buffer
+ var lastUnderscore bool
+ ln := len(name)
+ if ln == 0 {
+ return ""
+ }
+ b.WriteRune(unicode.ToLower(rune(name[0])))
+ for i := 1; i < ln; i++ {
+ r := rune(name[i])
+ nextIsLower := false
+ if i < ln-1 {
+ n := rune(name[i+1])
+ nextIsLower = unicode.IsLower(n) && unicode.IsLetter(n)
+ }
+ if unicode.IsUpper(r) {
+ if !lastUnderscore && nextIsLower {
+ b.WriteRune('_')
+ lastUnderscore = true
+ }
+ b.WriteRune(unicode.ToLower(r))
+ } else {
+ b.WriteRune(r)
+ lastUnderscore = false
+ }
+ }
+ return b.String()
+}
+
+// KebabCase produces the kebab-case version of the given CamelCase string.
+func KebabCase(name string) string {
+ name = SnakeCase(name)
+ ln := len(name)
+ if name[ln-1] == '_' {
+ name = name[:ln-1]
+ }
+ return strings.Replace(name, "_", "-", -1)
+}
+
+// WrapText produces lines with text capped at maxChars
+// it will keep words intact and respects newlines.
+func WrapText(text string, maxChars int) string {
+ res := ""
+ lines := strings.Split(text, "\n")
+ for _, v := range lines {
+ runes := []rune(strings.TrimSpace(v))
+ for l := len(runes); l >= 0; l = len(runes) {
+ if maxChars >= l {
+ res = res + string(runes) + "\n"
+ break
+ }
+
+ i := runeSpacePosRev(runes[:maxChars])
+ if i == 0 {
+ i = runeSpacePos(runes)
+ }
+
+ res = res + string(runes[:i]) + "\n"
+ if l == i {
+ break
+ }
+ runes = runes[i+1:]
+ }
+ }
+ return res[:len(res)-1]
+}
+
+func runeSpacePosRev(r []rune) int {
+ for i := len(r) - 1; i > 0; i-- {
+ if unicode.IsSpace(r[i]) {
+ return i
+ }
+ }
+ return 0
+}
+
+func runeSpacePos(r []rune) int {
+ for i := 0; i < len(r); i++ {
+ if unicode.IsSpace(r[i]) {
+ return i
+ }
+ }
+ return len(r)
+}
+
+// isLower returns true if the character is considered a lower case character
+// when transforming word into CamelCase.
+func isLower(r rune) bool {
+ return unicode.IsDigit(r) || unicode.IsLower(r)
+}
+
+// validIdentifier returns true if the rune is a letter or number
+func validIdentifier(r rune) bool {
+ return unicode.IsLetter(r) || unicode.IsDigit(r)
+}
+
+// removeTrailingInvalid removes trailing invalid identifiers from runes.
+func removeTrailingInvalid(runes []rune) []rune {
+ valid := len(runes) - 1
+ for ; valid >= 0 && !validIdentifier(runes[valid]); valid-- {
+ }
+
+ return runes[0 : valid+1]
+}
+
+// removeInvalidAtIndex removes consecutive invalid identifiers from runes starting at index i.
+func removeInvalidAtIndex(i int, runes []rune) []rune {
+ valid := i
+ for ; valid < len(runes) && !validIdentifier(runes[valid]); valid++ {
+ }
+
+ return append(runes[:i], runes[valid:]...)
+}
+
+var (
+ // common words who need to keep their
+ commonInitialisms = map[string]bool{
+ "API": true,
+ "ASCII": true,
+ "CPU": true,
+ "CSS": true,
+ "DNS": true,
+ "EOF": true,
+ "GUID": true,
+ "HTML": true,
+ "HTTP": true,
+ "HTTPS": true,
+ "ID": true,
+ "IP": true,
+ "JMES": true,
+ "JSON": true,
+ "JWT": true,
+ "LHS": true,
+ "OK": true,
+ "QPS": true,
+ "RAM": true,
+ "RHS": true,
+ "RPC": true,
+ "SLA": true,
+ "SMTP": true,
+ "SQL": true,
+ "SSH": true,
+ "TCP": true,
+ "TLS": true,
+ "TTL": true,
+ "UDP": true,
+ "UI": true,
+ "UID": true,
+ "UUID": true,
+ "URI": true,
+ "URL": true,
+ "UTF8": true,
+ "VM": true,
+ "XML": true,
+ "XSRF": true,
+ "XSS": true,
+ }
+)
diff --git a/vendor/goa.design/goa/codegen/generator/docs.go b/vendor/goa.design/goa/codegen/generator/docs.go
new file mode 100644
index 000000000..2a556e336
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/generator/docs.go
@@ -0,0 +1,21 @@
+/*
+Package generator contains the code generation algorithms for a service server,
+client and OpenAPI specification.
+
+Server and Client
+
+The code generated for the service server and client includes:
+
+ - A `service' package that contains the declarations for the service
+ interfaces.
+ - A `endpoint' package that contains the declarations for the endpoints
+ which wrap the service methods.
+ - transport specific packages for each of the transports defined in the
+ design.
+
+OpenAPI
+
+The OpenAPI generator generates a OpenAPI v2 specification for the service
+REST endpoints. This generator requires the design to define the HTTP transport.
+*/
+package generator
diff --git a/vendor/goa.design/goa/codegen/generator/example.go b/vendor/goa.design/goa/codegen/generator/example.go
new file mode 100644
index 000000000..74778291f
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/generator/example.go
@@ -0,0 +1,72 @@
+package generator
+
+import (
+ "goa.design/goa/codegen"
+ "goa.design/goa/codegen/server"
+ "goa.design/goa/codegen/service"
+ "goa.design/goa/eval"
+ "goa.design/goa/expr"
+ grpccodegen "goa.design/goa/grpc/codegen"
+ httpcodegen "goa.design/goa/http/codegen"
+)
+
+// Example iterates through the roots and returns files that implement an
+// example service and client.
+func Example(genpkg string, roots []eval.Root) ([]*codegen.File, error) {
+ var files []*codegen.File
+ for _, root := range roots {
+ r, ok := root.(*expr.RootExpr)
+ if !ok {
+ continue // could be a plugin root expression
+ }
+
+ // example service implementation
+ if fs := service.ExampleServiceFiles(genpkg, r); len(fs) != 0 {
+ files = append(files, fs...)
+ }
+
+ // example auth file
+ if f := service.AuthFuncsFile(genpkg, r); f != nil {
+ files = append(files, f)
+ }
+
+ // server main
+ if fs := server.ExampleServerFiles(genpkg, r); len(fs) != 0 {
+ files = append(files, fs...)
+ }
+
+ // CLI main
+ if fs := server.ExampleCLIFiles(genpkg, r); len(fs) != 0 {
+ files = append(files, fs...)
+ }
+
+ // HTTP
+ if len(r.API.HTTP.Services) > 0 {
+ svcs := make([]string, 0, len(r.API.HTTP.Services))
+ for _, s := range r.API.HTTP.Services {
+ svcs = append(svcs, s.Name())
+ }
+ if fs := httpcodegen.ExampleServerFiles(genpkg, r); len(fs) != 0 {
+ files = append(files, fs...)
+ }
+ if fs := httpcodegen.ExampleCLIFiles(genpkg, r); len(fs) != 0 {
+ files = append(files, fs...)
+ }
+ }
+
+ // GRPC
+ if len(r.API.GRPC.Services) > 0 {
+ svcs := make([]string, 0, len(r.API.GRPC.Services))
+ for _, s := range r.API.GRPC.Services {
+ svcs = append(svcs, s.Name())
+ }
+ if fs := grpccodegen.ExampleServerFiles(genpkg, r); len(fs) > 0 {
+ files = append(files, fs...)
+ }
+ if fs := grpccodegen.ExampleCLIFiles(genpkg, r); len(fs) > 0 {
+ files = append(files, fs...)
+ }
+ }
+ }
+ return files, nil
+}
diff --git a/vendor/goa.design/goa/codegen/generator/generate.go b/vendor/goa.design/goa/codegen/generator/generate.go
new file mode 100644
index 000000000..ee21fdd7d
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/generator/generate.go
@@ -0,0 +1,108 @@
+package generator
+
+import (
+ "go/build"
+ "os"
+ "path/filepath"
+ "sort"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/eval"
+)
+
+// Generate runs the code generation algorithms.
+func Generate(dir, cmd string) ([]string, error) {
+ // 1. Compute design roots.
+ var roots []eval.Root
+ {
+ rs, err := eval.Context.Roots()
+ if err != nil {
+ return nil, err
+ }
+ roots = rs
+ }
+
+ // 2. Compute "gen" package import path.
+ var genpkg string
+ {
+ base, err := filepath.Abs(dir)
+ if err != nil {
+ return nil, err
+ }
+ path := filepath.Join(base, codegen.Gendir)
+ if err := os.MkdirAll(path, 0777); err != nil {
+ return nil, err
+ }
+ pkg, err := build.ImportDir(path, build.FindOnly)
+ if err != nil {
+ return nil, err
+ }
+ genpkg = pkg.ImportPath
+ }
+
+ // 3. Retrieve goa generators for given command.
+ var genfuncs []Genfunc
+ {
+ gs, err := Generators(cmd)
+ if err != nil {
+ return nil, err
+ }
+ genfuncs = gs
+ }
+
+ // 4. Run the code pre generation plugins.
+ err := codegen.RunPluginsPrepare(cmd, genpkg, roots)
+ if err != nil {
+ return nil, err
+ }
+
+ // 5. Generate initial set of files produced by goa code generators.
+ var genfiles []*codegen.File
+ for _, gen := range genfuncs {
+ fs, err := gen(genpkg, roots)
+ if err != nil {
+ return nil, err
+ }
+ genfiles = append(genfiles, fs...)
+ }
+
+ // 6. Run the code generation plugins.
+ genfiles, err = codegen.RunPlugins(cmd, genpkg, roots, genfiles)
+ if err != nil {
+ return nil, err
+ }
+
+ // 7. Write the files.
+ written := make(map[string]struct{})
+ for _, f := range genfiles {
+ filename, err := f.Render(dir)
+ if err != nil {
+ return nil, err
+ }
+ if filename != "" {
+ written[filename] = struct{}{}
+ }
+ }
+
+ // 8. Compute all output filenames.
+ var outputs []string
+ {
+ outputs = make([]string, len(written))
+ cwd, err := os.Getwd()
+ if err != nil {
+ cwd = "."
+ }
+ i := 0
+ for o := range written {
+ rel, err := filepath.Rel(cwd, o)
+ if err != nil {
+ rel = o
+ }
+ outputs[i] = rel
+ i++
+ }
+ }
+ sort.Strings(outputs)
+
+ return outputs, nil
+}
diff --git a/vendor/goa.design/goa/codegen/generator/generators.go b/vendor/goa.design/goa/codegen/generator/generators.go
new file mode 100644
index 000000000..abe2d0298
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/generator/generators.go
@@ -0,0 +1,30 @@
+package generator
+
+import (
+ "fmt"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/eval"
+)
+
+// Genfunc is the type of the functions invoked to generate code.
+type Genfunc func(genpkg string, roots []eval.Root) ([]*codegen.File, error)
+
+// Generators returns the qualified paths (including the package name) to the
+// code generator functions for the given command, an error if the command is
+// not supported. Generators is a public variable so that external code (e.g.
+// plugins) may override the default generators.
+var Generators = generators
+
+// generators returns the generator functions exposed by the generator package
+// for the given command.
+func generators(cmd string) ([]Genfunc, error) {
+ switch cmd {
+ case "gen":
+ return []Genfunc{Service, Transport, OpenAPI}, nil
+ case "example":
+ return []Genfunc{Example}, nil
+ default:
+ return nil, fmt.Errorf("unknown command %q", cmd)
+ }
+}
diff --git a/vendor/goa.design/goa/codegen/generator/openapi.go b/vendor/goa.design/goa/codegen/generator/openapi.go
new file mode 100644
index 000000000..7c3967865
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/generator/openapi.go
@@ -0,0 +1,20 @@
+package generator
+
+import (
+ "goa.design/goa/codegen"
+ "goa.design/goa/eval"
+ "goa.design/goa/expr"
+ httpcodegen "goa.design/goa/http/codegen"
+)
+
+// OpenAPI iterates through the roots and returns the files needed to render
+// the service OpenAPI spec. It returns an error if the roots slice does not
+// include a HTTP root.
+func OpenAPI(_ string, roots []eval.Root) ([]*codegen.File, error) {
+ for _, root := range roots {
+ if r, ok := root.(*expr.RootExpr); ok {
+ return httpcodegen.OpenAPIFiles(r)
+ }
+ }
+ return nil, nil
+}
diff --git a/vendor/goa.design/goa/codegen/generator/service.go b/vendor/goa.design/goa/codegen/generator/service.go
new file mode 100644
index 000000000..1e97c6814
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/generator/service.go
@@ -0,0 +1,43 @@
+package generator
+
+import (
+ "fmt"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/codegen/service"
+ "goa.design/goa/expr"
+ "goa.design/goa/eval"
+)
+
+// Service iterates through the roots and returns the files needed to render the
+// service code. It returns an error if the roots slice does not include a goa
+// design.
+func Service(genpkg string, roots []eval.Root) ([]*codegen.File, error) {
+ var files []*codegen.File
+ for _, root := range roots {
+ switch r := root.(type) {
+ case *expr.RootExpr:
+ for _, s := range r.Services {
+ // Make sure service is first so name scope is
+ // properly initialized.
+ files = append(files, service.File(genpkg, s))
+ files = append(files, service.EndpointFile(genpkg, s))
+ files = append(files, service.ClientFile(s))
+ if f := service.ViewsFile(genpkg, s); f != nil {
+ files = append(files, f)
+ }
+ f, err := service.ConvertFile(r, s)
+ if err != nil {
+ return nil, err
+ }
+ if f != nil {
+ files = append(files, f)
+ }
+ }
+ }
+ }
+ if len(files) == 0 {
+ return nil, fmt.Errorf("design must define at least one service")
+ }
+ return files, nil
+}
diff --git a/vendor/goa.design/goa/codegen/generator/transport.go b/vendor/goa.design/goa/codegen/generator/transport.go
new file mode 100644
index 000000000..4247ebbb9
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/generator/transport.go
@@ -0,0 +1,44 @@
+package generator
+
+import (
+ "fmt"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/eval"
+ "goa.design/goa/expr"
+ grpccodegen "goa.design/goa/grpc/codegen"
+ httpcodegen "goa.design/goa/http/codegen"
+)
+
+// Transport iterates through the roots and returns the files needed to render
+// the transport code. It returns an error if the roots slice does not include
+// at least one transport design roots.
+func Transport(genpkg string, roots []eval.Root) ([]*codegen.File, error) {
+ var files []*codegen.File
+ for _, root := range roots {
+ r, ok := root.(*expr.RootExpr)
+ if !ok {
+ continue // could be a plugin root expression
+ }
+
+ // HTTP
+ files = append(files, httpcodegen.ServerFiles(genpkg, r)...)
+ files = append(files, httpcodegen.ClientFiles(genpkg, r)...)
+ files = append(files, httpcodegen.ServerTypeFiles(genpkg, r)...)
+ files = append(files, httpcodegen.ClientTypeFiles(genpkg, r)...)
+ files = append(files, httpcodegen.PathFiles(r)...)
+ files = append(files, httpcodegen.ClientCLIFiles(genpkg, r)...)
+
+ // GRPC
+ files = append(files, grpccodegen.ProtoFiles(genpkg, r)...)
+ files = append(files, grpccodegen.ServerFiles(genpkg, r)...)
+ files = append(files, grpccodegen.ClientFiles(genpkg, r)...)
+ files = append(files, grpccodegen.ServerTypeFiles(genpkg, r)...)
+ files = append(files, grpccodegen.ClientTypeFiles(genpkg, r)...)
+ files = append(files, grpccodegen.ClientCLIFiles(genpkg, r)...)
+ }
+ if len(files) == 0 {
+ return nil, fmt.Errorf("transport: no HTTP design found")
+ }
+ return files, nil
+}
diff --git a/vendor/goa.design/goa/codegen/go_transform.go b/vendor/goa.design/goa/codegen/go_transform.go
new file mode 100644
index 000000000..52fca6be5
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/go_transform.go
@@ -0,0 +1,719 @@
+package codegen
+
+import (
+ "bytes"
+ "fmt"
+ "strings"
+ "text/template"
+
+ "goa.design/goa/expr"
+)
+
+var (
+ // transformGoArrayT is the template to generate Go array transformation
+ // code.
+ transformGoArrayT *template.Template
+ // TransformGoMapT is the template to generate Go map transformation
+ // code.
+ transformGoMapT *template.Template
+)
+
+// NOTE: can't initialize inline because https://github.com/golang/go/issues/1817
+func init() {
+ transformGoArrayT = template.Must(template.New("transformGoArray").Funcs(template.FuncMap{
+ "transformAttribute": transformAttributeHelper,
+ "loopVar": arrayLoopVar,
+ }).Parse(transformGoArrayTmpl))
+ transformGoMapT = template.Must(template.New("transformGoMap").Funcs(template.FuncMap{
+ "transformAttribute": transformAttributeHelper,
+ "loopVar": mapLoopVar,
+ }).Parse(transformGoMapTmpl))
+}
+
+type (
+ // GoAttribute represents an attribute type that produces Go code.
+ GoAttribute struct {
+ // Attribute is the underlying attribute expression.
+ Attribute *expr.AttributeExpr
+ // Pkg is the package name where the attribute type exists.
+ Pkg string
+ // NameScope is the named scope to produce unique reference to the attribute.
+ NameScope *NameScope
+ }
+
+ // goTransformer is a Transformer that generates Go code for converting a
+ // data structure represented as an attribute expression into a different data
+ // structure also represented as an attribute expression.
+ goTransformer struct {
+ // helperPrefix is the prefix for the helper functions generated during
+ // the transformation. The helper functions are named based on this
+ // pattern - To. If no prefix
+ // specified, "transform" is used as a prefix by default.
+ helperPrefix string
+ }
+)
+
+// NewGoAttribute returns an attribute that produces Go code.
+func NewGoAttribute(att *expr.AttributeExpr, pkg string, scope *NameScope) Attributor {
+ return &GoAttribute{
+ Attribute: att,
+ Pkg: pkg,
+ NameScope: scope,
+ }
+}
+
+// GoTransform produces Go code that initializes the data structure defined
+// by target from an instance of the data structure described by source.
+// The data structures can be objects, arrays or maps. The algorithm
+// matches object fields by name and ignores object fields in target that
+// don't have a match in source. The matching and generated code leverage
+// mapped attributes so that attribute names may use the "name:elem"
+// syntax to define the name of the design attribute and the name of the
+// corresponding generated Go struct field. The function returns an error
+// if target is not compatible with source (different type, fields of
+// different type etc).
+//
+// source and target are the attributes used in the transformation
+//
+// sourceVar and targetVar are the variable names used in the transformation
+//
+// prefix is the transformation helper function prefix
+//
+func GoTransform(source, target *ContextualAttribute, sourceVar, targetVar, prefix string) (string, []*TransformFunctionData, error) {
+ t := &goTransformer{helperPrefix: prefix}
+
+ code, err := t.Transform(source, target, &TransformAttrs{SourceVar: sourceVar, TargetVar: targetVar, NewVar: true})
+ if err != nil {
+ return "", nil, err
+ }
+
+ funcs, err := GoTransformHelpers(source, target, t, prefix)
+ if err != nil {
+ return "", nil, err
+ }
+
+ return strings.TrimRight(code, "\n"), funcs, nil
+}
+
+// GoObjectTransform produces Go code that initializes the data structure
+// defined by target object type from an instance of the data structure
+// defined by source object type. The algorithm matches object fields by
+// name and ignores object fields in target that don't have a match in source.
+// The matching and generated code leverage mapped attributes so that attribute
+// names may use the "name:elem" syntax to define the name of the design
+// attribute and the name of the corresponding generated Go struct field.
+// The function returns an error if source or target are not object types
+// or has fields of different types.
+//
+// source and target are the attributes of object type used in the
+// transformation
+//
+// ta is the transform attributes used in the transformation code
+//
+// t is the transformer used to transform source to target
+//
+func GoObjectTransform(source, target *ContextualAttribute, ta *TransformAttrs, t Transformer) (string, error) {
+ if t := source.Attribute.Expr().Type; !expr.IsObject(t) {
+ return "", fmt.Errorf("source is not an object type: received %T", t)
+ }
+ if t := target.Attribute.Expr().Type; !expr.IsObject(t) {
+ return "", fmt.Errorf("target is not an object type: received %T", t)
+ }
+ var (
+ initCode string
+ postInitCode string
+ )
+ {
+ // iterate through primitive attributes to initialize the struct
+ walkMatches(source, target, func(srcMatt, tgtMatt *expr.MappedAttributeExpr, srcc, tgtc *ContextualAttribute, n string) {
+ if !expr.IsPrimitive(srcc.Attribute.Expr().Type) {
+ return
+ }
+ srcField := ta.SourceVar + "." + srcc.Attribute.Field(srcMatt.ElemName(n), true)
+ tgtField := tgtc.Attribute.Field(tgtMatt.ElemName(n), true)
+ srcPtr := srcc.IsPointer()
+ tgtPtr := tgtc.IsPointer()
+ srcFieldConv := t.ConvertType(srcc.Attribute, tgtc.Attribute, srcField)
+ switch {
+ case srcPtr && !tgtPtr:
+ srcFieldConv = t.ConvertType(srcc.Attribute, tgtc.Attribute, "*"+srcField)
+ if !srcc.Required {
+ postInitCode += fmt.Sprintf("if %s != nil {\n\t%s.%s = %s\n}\n", srcField, ta.TargetVar, tgtField, srcFieldConv)
+ return
+ }
+ case !srcPtr && tgtPtr:
+ if srcField != srcFieldConv {
+ // type conversion required. Add it in postinit code.
+ tgtName := tgtc.Attribute.Field(tgtMatt.ElemName(n), false)
+ postInitCode += fmt.Sprintf("%sptr := %s\n%s.%s = &%sptr\n", tgtName, srcFieldConv, ta.TargetVar, tgtField, tgtName)
+ return
+ }
+ srcFieldConv = fmt.Sprintf("&%s", srcField)
+ case srcPtr && tgtPtr:
+ srcFieldConv = t.ConvertType(srcc.Attribute, tgtc.Attribute, "*"+srcField)
+ if "*"+srcField != srcFieldConv {
+ // type conversion required. Add it in postinit code.
+ tgtName := tgtc.Attribute.Field(tgtMatt.ElemName(n), false)
+ postInitCode += fmt.Sprintf("%sptr := %s\n%s.%s = &%sptr\n", tgtName, srcFieldConv, ta.TargetVar, tgtField, tgtName)
+ return
+ }
+ srcFieldConv = srcField
+ }
+ initCode += fmt.Sprintf("\n%s: %s,", tgtField, srcFieldConv)
+ })
+ if initCode != "" {
+ initCode += "\n"
+ }
+ }
+
+ buffer := &bytes.Buffer{}
+ deref := "&"
+ // if the target is a raw struct no need to return a pointer
+ if _, ok := target.Attribute.Expr().Type.(*expr.Object); ok {
+ deref = ""
+ }
+ assign := "="
+ if ta.NewVar {
+ assign = ":="
+ }
+ buffer.WriteString(fmt.Sprintf("%s %s %s%s{%s}\n", ta.TargetVar, assign, deref, target.Attribute.Name(), initCode))
+ buffer.WriteString(postInitCode)
+
+ // iterate through non-primitive attributes to initialize rest of the
+ // struct fields
+ var err error
+ walkMatches(source, target, func(srcMatt, tgtMatt *expr.MappedAttributeExpr, srcc, tgtc *ContextualAttribute, n string) {
+ var (
+ code string
+
+ newTA = &TransformAttrs{
+ SourceVar: ta.SourceVar + "." + srcc.Attribute.Field(srcMatt.ElemName(n), true),
+ TargetVar: ta.TargetVar + "." + tgtc.Attribute.Field(tgtMatt.ElemName(n), true),
+ NewVar: false,
+ }
+ )
+ {
+ if srcc, tgtc, newTA, err = t.MakeCompatible(srcc, tgtc, newTA, ""); err != nil {
+ return
+ }
+ srccAtt := srcc.Attribute.Expr()
+ _, ok := srccAtt.Type.(expr.UserType)
+ switch {
+ case expr.IsArray(srccAtt.Type):
+ code, err = t.TransformArray(srcc, tgtc, newTA)
+ case expr.IsMap(srccAtt.Type):
+ code, err = t.TransformMap(srcc, tgtc, newTA)
+ case ok:
+ code = fmt.Sprintf("%s = %s\n", newTA.TargetVar, t.ConvertType(srcc.Attribute, tgtc.Attribute, newTA.SourceVar))
+ case expr.IsObject(srccAtt.Type):
+ code, err = t.Transform(srcc, tgtc, newTA)
+ }
+ }
+ if err != nil {
+ return
+ }
+
+ // We need to check for a nil source if it holds a reference (pointer to
+ // primitive or an object, array or map) and is not required. We also want
+ // to always check nil if the attribute is not a primitive; it's a
+ // 1) user type and we want to avoid calling transform helper functions
+ // with nil value
+ // 2) it's an object, map or array to avoid making empty arrays and maps
+ // and to avoid derefencing nil.
+ var checkNil bool
+ {
+ checkNil = srcc.IsPointer()
+ if !checkNil && !expr.IsPrimitive(srcc.Attribute.Expr().Type) {
+ if !srcc.Required && srcc.DefaultValue() == nil {
+ checkNil = true
+ }
+ }
+ }
+ if code != "" && checkNil {
+ code = fmt.Sprintf("if %s != nil {\n\t%s}\n", newTA.SourceVar, code)
+ }
+
+ // Default value handling. We need to handle default values if the target
+ // type uses default values (i.e. attributes with default values are
+ // non-pointers) and has a default value set.
+ if tdef := tgtc.DefaultValue(); tdef != nil {
+ if srcc.IsPointer() {
+ code += fmt.Sprintf("if %s == nil {\n\t", newTA.SourceVar)
+ if tgtc.IsPointer() {
+ code += fmt.Sprintf("var tmp %s = %#v\n\t%s = &tmp\n", tgtc.Def(), tdef, newTA.TargetVar)
+ } else {
+ code += fmt.Sprintf("%s = %#v\n", newTA.TargetVar, tdef)
+ }
+ code += "}\n"
+ }
+ }
+ buffer.WriteString(code)
+ })
+ if err != nil {
+ return "", err
+ }
+
+ return buffer.String(), nil
+}
+
+// GoTransformHelpers returns the Go transform functions and their definitions
+// that may be used in code produced by Transform. It returns an error if source and
+// target are incompatible (different types, fields of different type etc).
+//
+// source, target are the source and target attributes used in transformation
+//
+// t is the transformer used in the transformation
+//
+// prefix is the function name prefix
+//
+// seen keeps track of generated transform functions to avoid recursion
+//
+func GoTransformHelpers(source, target *ContextualAttribute, t Transformer, prefix string, seen ...map[string]*TransformFunctionData) ([]*TransformFunctionData, error) {
+ var (
+ err error
+
+ ta = &TransformAttrs{}
+ )
+ if source, target, ta, err = t.MakeCompatible(source, target, ta, ""); err != nil {
+ return nil, err
+ }
+
+ var (
+ helpers []*TransformFunctionData
+
+ sourceType = source.Attribute.Expr().Type
+ targetType = target.Attribute.Expr().Type
+ )
+ {
+ // Do not generate a transform function for the top most user type.
+ switch {
+ case expr.IsArray(sourceType):
+ source = source.Dup(expr.AsArray(sourceType).ElemType, true)
+ target = target.Dup(expr.AsArray(targetType).ElemType, true)
+ helpers, err = GoTransformHelpers(source, target, t, prefix, seen...)
+ case expr.IsMap(sourceType):
+ sm := expr.AsMap(sourceType)
+ tm := expr.AsMap(targetType)
+ source = source.Dup(sm.ElemType, true)
+ target = target.Dup(tm.ElemType, true)
+ helpers, err = GoTransformHelpers(source, target, t, prefix, seen...)
+ if err == nil {
+ var other []*TransformFunctionData
+ source = source.Dup(sm.KeyType, true)
+ target = target.Dup(tm.KeyType, true)
+ other, err = GoTransformHelpers(source, target, t, prefix, seen...)
+ helpers = append(helpers, other...)
+ }
+ case expr.IsObject(sourceType):
+ walkMatches(source, target, func(srcMatt, tgtMatt *expr.MappedAttributeExpr, srcc, tgtc *ContextualAttribute, n string) {
+ if err != nil {
+ return
+ }
+ if srcc, tgtc, ta, err = t.MakeCompatible(srcc, tgtc, ta, ""); err != nil {
+ return
+ }
+ h, err2 := collectHelpers(srcc, tgtc, t, prefix, seen...)
+ if err2 != nil {
+ err = err2
+ return
+ }
+ helpers = append(helpers, h...)
+ })
+ }
+ }
+ if err != nil {
+ return nil, err
+ }
+ return helpers, nil
+}
+
+// Name returns a valid Go type name for the attribute.
+func (g *GoAttribute) Name() string {
+ return g.NameScope.GoFullTypeName(g.Attribute, g.Pkg)
+}
+
+// Ref returns a valid Go reference to the attribute.
+func (g *GoAttribute) Ref() string {
+ return g.NameScope.GoFullTypeRef(g.Attribute, g.Pkg)
+}
+
+// Scope returns the name scope.
+func (g *GoAttribute) Scope() *NameScope {
+ return g.NameScope
+}
+
+// Expr returns the underlying attribute expression.
+func (g *GoAttribute) Expr() *expr.AttributeExpr {
+ return g.Attribute
+}
+
+// Dup creates a copy of GoAttribute by setting the underlying attribute
+// expression.
+func (g *GoAttribute) Dup(att *expr.AttributeExpr) Attributor {
+ return &GoAttribute{Attribute: att, Pkg: g.Pkg, NameScope: g.NameScope}
+}
+
+// Field returns a valid Go field name for the attribute.
+func (g *GoAttribute) Field(name string, firstUpper bool) string {
+ return GoifyAtt(g.Attribute, name, firstUpper)
+}
+
+// Def returns a valid Go definition for the attribute.
+func (g *GoAttribute) Def(pointer, useDefault bool) string {
+ return g.NameScope.GoTypeDef(g.Attribute, pointer, useDefault)
+}
+
+// MakeCompatible checks if target can be transformed to source.
+func (g *goTransformer) MakeCompatible(source, target *ContextualAttribute, ta *TransformAttrs, suffix string) (src, tgt *ContextualAttribute, newTA *TransformAttrs, err error) {
+ if err = IsCompatible(source.Attribute.Expr().Type, target.Attribute.Expr().Type, ta.SourceVar+suffix, ta.TargetVar+suffix); err != nil {
+ return source, target, ta, err
+ }
+ return source, target, ta, nil
+}
+
+// ConvertType produces code to initialize a target type from a source type
+// held by sourceVar.
+func (g *goTransformer) ConvertType(source, target Attributor, sourceVar string) string {
+ if _, ok := source.Expr().Type.(expr.UserType); ok {
+ // return a function name for the conversion
+ return fmt.Sprintf("%s(%s)", HelperName(source, target, g.helperPrefix), sourceVar)
+ }
+ // source and target Go types produced by goa are the same kind.
+ // Hence no type conversion necessary.
+ return sourceVar
+}
+
+// Transform returns the code to transform source attribute to
+// target attribute. It returns an error if source and target are not
+// compatible for transformation.
+func (g *goTransformer) Transform(source, target *ContextualAttribute, ta *TransformAttrs) (string, error) {
+ var (
+ err error
+
+ sourceType = source.Attribute.Expr().Type
+ targetType = target.Attribute.Expr().Type
+ )
+ {
+ if err = IsCompatible(sourceType, targetType, ta.SourceVar, ta.TargetVar); err != nil {
+ return "", err
+ }
+ }
+
+ var code string
+ {
+ switch {
+ case expr.IsArray(sourceType):
+ code, err = g.TransformArray(source, target, ta)
+ case expr.IsMap(sourceType):
+ code, err = g.TransformMap(source, target, ta)
+ case expr.IsObject(sourceType):
+ code, err = g.TransformObject(source, target, ta)
+ default:
+ assign := "="
+ if ta.NewVar {
+ assign = ":="
+ }
+ if _, ok := target.Attribute.Expr().Type.(expr.UserType); ok {
+ // Primitive user type, these are used for error results
+ cast := target.Attribute.Ref()
+ return fmt.Sprintf("%s %s %s(%s)\n", ta.TargetVar, assign, cast, ta.SourceVar), nil
+ }
+ srcField := g.ConvertType(source.Attribute, target.Attribute, ta.SourceVar)
+ code = fmt.Sprintf("%s %s %s\n", ta.TargetVar, assign, srcField)
+ }
+ }
+ if err != nil {
+ return "", err
+ }
+ return code, nil
+}
+
+// TransformObject generates Go code to transform source object to
+// target object.
+//
+// source, target are the source and target attributes of object type
+//
+// ta is the transform attributes to assist in the transformation
+//
+func (g *goTransformer) TransformObject(source, target *ContextualAttribute, ta *TransformAttrs) (string, error) {
+ return GoObjectTransform(source, target, ta, g)
+}
+
+// TransformArray generates Go code to transform source array to
+// target array.
+//
+// source, target are the source and target analyzers of array type
+//
+// ta is the transform attributes to assist in the transformation
+//
+func (g *goTransformer) TransformArray(source, target *ContextualAttribute, ta *TransformAttrs) (string, error) {
+ sourceArr := expr.AsArray(source.Attribute.Expr().Type)
+ if sourceArr == nil {
+ return "", fmt.Errorf("source is not an array type: received %T", source.Attribute.Expr().Type)
+ }
+ targetArr := expr.AsArray(target.Attribute.Expr().Type)
+ if targetArr == nil {
+ return "", fmt.Errorf("target is not an array type: received %T", target.Attribute.Expr().Type)
+ }
+
+ source = source.Dup(sourceArr.ElemType, true)
+ target = target.Dup(targetArr.ElemType, true)
+ if err := IsCompatible(source.Attribute.Expr().Type, target.Attribute.Expr().Type, ta.SourceVar+"[0]", ta.TargetVar+"[0]"); err != nil {
+ return "", err
+ }
+ data := map[string]interface{}{
+ "Transformer": g,
+ "ElemTypeRef": target.Attribute.Ref(),
+ "SourceElem": source,
+ "TargetElem": target,
+ "SourceVar": ta.SourceVar,
+ "TargetVar": ta.TargetVar,
+ "NewVar": ta.NewVar,
+ }
+ return RunGoArrayTemplate(data)
+}
+
+// RunGoArrayTemplate runs the template to generate Go array code.
+func RunGoArrayTemplate(data map[string]interface{}) (string, error) {
+ var buf bytes.Buffer
+ if err := transformGoArrayT.Execute(&buf, data); err != nil {
+ return "", err
+ }
+ return buf.String(), nil
+}
+
+// TransformMap generates Go code to transform source map to target map.
+//
+// source, target are the source and target analyzers
+//
+// ta is the transform attributes to assist in the transformation
+//
+// t is the Transfomer used in the transformation
+//
+func (g *goTransformer) TransformMap(source, target *ContextualAttribute, ta *TransformAttrs) (string, error) {
+ sourceMap := expr.AsMap(source.Attribute.Expr().Type)
+ if sourceMap == nil {
+ return "", fmt.Errorf("source is not a map type: received %T", source.Attribute.Expr().Type)
+ }
+ targetMap := expr.AsMap(target.Attribute.Expr().Type)
+ if targetMap == nil {
+ return "", fmt.Errorf("target is not a map type: received %T", target.Attribute.Expr().Type)
+ }
+
+ sourceKey := source.Dup(sourceMap.KeyType, true)
+ targetKey := target.Dup(targetMap.KeyType, true)
+ if err := IsCompatible(sourceKey.Attribute.Expr().Type, targetKey.Attribute.Expr().Type, ta.SourceVar+"[key]", ta.TargetVar+"[key]"); err != nil {
+ return "", err
+ }
+ sourceElem := source.Dup(sourceMap.ElemType, true)
+ targetElem := target.Dup(targetMap.ElemType, true)
+ if err := IsCompatible(sourceElem.Attribute.Expr().Type, targetElem.Attribute.Expr().Type, ta.SourceVar+"[*]", ta.TargetVar+"[*]"); err != nil {
+ return "", err
+ }
+ data := map[string]interface{}{
+ "Transformer": g,
+ "KeyTypeRef": targetKey.Attribute.Ref(),
+ "ElemTypeRef": targetElem.Attribute.Ref(),
+ "SourceKey": sourceKey,
+ "TargetKey": targetKey,
+ "SourceElem": sourceElem,
+ "TargetElem": targetElem,
+ "SourceVar": ta.SourceVar,
+ "TargetVar": ta.TargetVar,
+ "NewVar": ta.NewVar,
+ "TargetMap": targetMap,
+ }
+ return RunGoMapTemplate(data)
+}
+
+// RunGoMapTemplate runs the template to generate Go map code.
+func RunGoMapTemplate(data map[string]interface{}) (string, error) {
+ var buf bytes.Buffer
+ if err := transformGoMapT.Execute(&buf, data); err != nil {
+ return "", err
+ }
+ return buf.String(), nil
+}
+
+// collectHelpers recursively traverses the given attributes and return the
+// transform helper functions required to generate the transform code.
+func collectHelpers(source, target *ContextualAttribute, t Transformer, prefix string, seen ...map[string]*TransformFunctionData) ([]*TransformFunctionData, error) {
+ var (
+ data []*TransformFunctionData
+
+ sourceType = source.Attribute.Expr().Type
+ targetType = target.Attribute.Expr().Type
+ )
+ switch {
+ case expr.IsArray(sourceType):
+ source = source.Dup(expr.AsArray(sourceType).ElemType, true)
+ target = target.Dup(expr.AsArray(targetType).ElemType, true)
+ helpers, err := GoTransformHelpers(source, target, t, prefix, seen...)
+ if err != nil {
+ return nil, err
+ }
+ data = append(data, helpers...)
+ case expr.IsMap(sourceType):
+ source = source.Dup(expr.AsMap(sourceType).KeyType, true)
+ target = target.Dup(expr.AsMap(targetType).KeyType, true)
+ helpers, err := GoTransformHelpers(source, target, t, prefix, seen...)
+ if err != nil {
+ return nil, err
+ }
+ data = append(data, helpers...)
+ source = source.Dup(expr.AsMap(sourceType).ElemType, true)
+ target = target.Dup(expr.AsMap(targetType).ElemType, true)
+ helpers, err = GoTransformHelpers(source, target, t, prefix, seen...)
+ if err != nil {
+ return nil, err
+ }
+ data = append(data, helpers...)
+ case expr.IsObject(sourceType):
+ if ut, ok := sourceType.(expr.UserType); ok {
+ name := HelperName(source.Attribute, target.Attribute, prefix)
+ var s map[string]*TransformFunctionData
+ if len(seen) > 0 {
+ s = seen[0]
+ } else {
+ s = make(map[string]*TransformFunctionData)
+ seen = append(seen, s)
+ }
+ if _, ok := s[name]; ok {
+ return nil, nil
+ }
+ code, err := t.Transform(
+ source.Dup(ut.Attribute(), true), target,
+ &TransformAttrs{SourceVar: "v", TargetVar: "res", NewVar: true})
+ if err != nil {
+ return nil, err
+ }
+ if !source.Required {
+ code = "if v == nil {\n\treturn nil\n}\n" + code
+ }
+ tfd := &TransformFunctionData{
+ Name: name,
+ ParamTypeRef: source.Attribute.Ref(),
+ ResultTypeRef: target.Attribute.Ref(),
+ Code: code,
+ }
+ s[name] = tfd
+ data = append(data, tfd)
+ }
+
+ // collect helpers
+ var err error
+ {
+ walkMatches(source, target, func(srcMatt, _ *expr.MappedAttributeExpr, srcc, tgtc *ContextualAttribute, n string) {
+ var helpers []*TransformFunctionData
+ helpers, err = collectHelpers(srcc, tgtc, t, prefix, seen...)
+ if err != nil {
+ return
+ }
+ data = append(data, helpers...)
+ })
+ }
+ if err != nil {
+ return nil, err
+ }
+ }
+ return data, nil
+}
+
+// walkMatches iterates through the source attribute expression and executes
+// the walker function.
+func walkMatches(source, target *ContextualAttribute, walker func(src, tgt *expr.MappedAttributeExpr, srcc, tgtc *ContextualAttribute, n string)) {
+ srcMatt := expr.NewMappedAttributeExpr(source.Attribute.Expr())
+ tgtMatt := expr.NewMappedAttributeExpr(target.Attribute.Expr())
+ srcObj := expr.AsObject(srcMatt.Type)
+ tgtObj := expr.AsObject(tgtMatt.Type)
+ for _, nat := range *srcObj {
+ if att := tgtObj.Attribute(nat.Name); att != nil {
+ srcc := source.Dup(nat.Attribute, srcMatt.IsRequired(nat.Name))
+ tgtc := target.Dup(att, tgtMatt.IsRequired(nat.Name))
+ walker(srcMatt, tgtMatt, srcc, tgtc, nat.Name)
+ }
+ }
+}
+
+// used by template
+func transformAttributeHelper(source, target *ContextualAttribute, sourceVar, targetVar string, newVar bool, t Transformer) (string, error) {
+ ta := &TransformAttrs{
+ SourceVar: sourceVar,
+ TargetVar: targetVar,
+ NewVar: newVar,
+ }
+ return t.Transform(source, target, ta)
+}
+
+// used by template
+func arrayLoopVar(s string) string {
+ return string(105 + strings.Count(s, "["))
+}
+
+// used by template
+func mapLoopVar(mp *expr.Map) string {
+ if depth := mapDepth(mp); depth > 0 {
+ return string(97 + depth)
+ }
+ return ""
+}
+
+// mapDepth returns the level of nested maps. If map not nested, it returns 0.
+func mapDepth(mp *expr.Map) int {
+ return traverseMap(mp.ElemType.Type, 0)
+}
+
+func traverseMap(dt expr.DataType, depth int, seen ...map[string]struct{}) int {
+ if mp := expr.AsMap(dt); mp != nil {
+ depth++
+ depth = traverseMap(mp.ElemType.Type, depth, seen...)
+ } else if ar := expr.AsArray(dt); ar != nil {
+ depth = traverseMap(ar.ElemType.Type, depth, seen...)
+ } else if mo := expr.AsObject(dt); mo != nil {
+ var s map[string]struct{}
+ if len(seen) > 0 {
+ s = seen[0]
+ } else {
+ s = make(map[string]struct{})
+ seen = append(seen, s)
+ }
+ key := dt.Name()
+ if u, ok := dt.(expr.UserType); ok {
+ key = u.ID()
+ }
+ if _, ok := s[key]; ok {
+ return depth
+ }
+ s[key] = struct{}{}
+ var level int
+ for _, nat := range *mo {
+ // if object type has attributes of type map then find out the attribute that has
+ // the deepest level of nested maps
+ lvl := 0
+ lvl = traverseMap(nat.Attribute.Type, lvl, seen...)
+ if lvl > level {
+ level = lvl
+ }
+ }
+ depth += level
+ }
+ return depth
+}
+
+const (
+ transformGoArrayTmpl = `{{ .TargetVar }} {{ if .NewVar }}:={{ else }}={{ end }} make([]{{ .ElemTypeRef }}, len({{ .SourceVar }}))
+{{- $loopVar := loopVar .TargetVar }}
+for {{ $loopVar }}, val := range {{ .SourceVar }} {
+ {{ transformAttribute .SourceElem .TargetElem "val" (printf "%s[%s]" .TargetVar $loopVar) false .Transformer -}}
+}
+`
+
+ transformGoMapTmpl = `{{ .TargetVar }} {{ if .NewVar }}:={{ else }}={{ end }} make(map[{{ .KeyTypeRef }}]{{ .ElemTypeRef }}, len({{ .SourceVar }}))
+{{- $loopVar := loopVar .TargetMap }}
+for key, val := range {{ .SourceVar }} {
+ {{ transformAttribute .SourceKey .TargetKey "key" "tk" true .Transformer -}}
+ {{ transformAttribute .SourceElem .TargetElem "val" (printf "tv%s" $loopVar) true .Transformer -}}
+ {{ .TargetVar }}[tk] = {{ printf "tv%s" $loopVar }}
+}
+`
+)
diff --git a/vendor/goa.design/goa/codegen/goify.go b/vendor/goa.design/goa/codegen/goify.go
new file mode 100644
index 000000000..e23110912
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/goify.go
@@ -0,0 +1,113 @@
+package codegen
+
+import (
+ "strings"
+
+ "goa.design/goa/expr"
+)
+
+// Goify makes a valid Go identifier out of any string. It does that by removing
+// any non letter and non digit character and by making sure the first character
+// is a letter or "_". Goify produces a "CamelCase" version of the string, if
+// firstUpper is true the first character of the identifier is uppercase
+// otherwise it's lowercase.
+func Goify(str string, firstUpper bool) string {
+ // Optimize trivial case
+ if str == "" {
+ return ""
+ }
+
+ // Remove optional suffix that defines corresponding transport specific
+ // name.
+ idx := strings.Index(str, ":")
+ if idx > 0 {
+ str = str[:idx]
+ }
+
+ str = CamelCase(str, firstUpper, true)
+ if str == "" {
+ // All characters are invalid. Produce a default value.
+ if firstUpper {
+ return "Val"
+ }
+ return "val"
+ }
+ return fixReservedGo(str)
+}
+
+// GoifyAtt honors any struct:field:name meta set on the attribute and calls
+// Goify with the tag value if present or the given name otherwise.
+func GoifyAtt(att *expr.AttributeExpr, name string, upper bool) string {
+ if tname, ok := att.Meta["struct:field:name"]; ok {
+ if len(tname) > 0 {
+ name = tname[0]
+ }
+ }
+ return Goify(name, upper)
+}
+
+// fixReservedGo appends an underscore on to Go reserved keywords.
+func fixReservedGo(w string) string {
+ if reservedGo[w] {
+ w += "_"
+ }
+ return w
+}
+
+var (
+ // reserved golang keywords and package names
+ reservedGo = map[string]bool{
+ "byte": true,
+ "complex128": true,
+ "complex64": true,
+ "float32": true,
+ "float64": true,
+ "int": true,
+ "int16": true,
+ "int32": true,
+ "int64": true,
+ "int8": true,
+ "rune": true,
+ "string": true,
+ "uint16": true,
+ "uint32": true,
+ "uint64": true,
+ "uint8": true,
+
+ // reserved keywords
+ "break": true,
+ "case": true,
+ "chan": true,
+ "const": true,
+ "continue": true,
+ "default": true,
+ "defer": true,
+ "delete": true,
+ "else": true,
+ "fallthrough": true,
+ "for": true,
+ "func": true,
+ "go": true,
+ "goto": true,
+ "if": true,
+ "import": true,
+ "interface": true,
+ "map": true,
+ "package": true,
+ "range": true,
+ "return": true,
+ "select": true,
+ "struct": true,
+ "switch": true,
+ "type": true,
+ "var": true,
+
+ // stdlib and goa packages used by generated code
+ "fmt": true,
+ "http": true,
+ "json": true,
+ "os": true,
+ "url": true,
+ "time": true,
+ }
+)
diff --git a/vendor/goa.design/goa/codegen/header.go b/vendor/goa.design/goa/codegen/header.go
new file mode 100644
index 000000000..8fdf44c2c
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/header.go
@@ -0,0 +1,48 @@
+package codegen
+
+import (
+ "goa.design/goa/pkg"
+)
+
+// Header returns a Go source file header section template.
+func Header(title, pack string, imports []*ImportSpec) *SectionTemplate {
+ return &SectionTemplate{
+ Name: "source-header",
+ Source: headerT,
+ Data: map[string]interface{}{
+ "Title": title,
+ "ToolVersion": pkg.Version(),
+ "Pkg": pack,
+ "Imports": imports,
+ },
+ }
+}
+
+// AddImport adds an import to a section template that was generated with
+// Header.
+func AddImport(section *SectionTemplate, imprt *ImportSpec) {
+ var specs []*ImportSpec
+ if data, ok := section.Data.(map[string]interface{}); ok {
+ if imports, ok := data["Imports"]; ok {
+ specs = imports.([]*ImportSpec)
+ }
+ data["Imports"] = append(specs, imprt)
+ }
+}
+
+const (
+ headerT = `{{if .Title}}// Code generated by goa {{.ToolVersion}}, DO NOT EDIT.
+//
+// {{.Title}}
+//
+// Command:
+{{comment commandLine}}
+
+{{end}}package {{.Pkg}}
+
+{{if .Imports}}import {{if gt (len .Imports) 1}}(
+{{end}}{{range .Imports}} {{.Code}}
+{{end}}{{if gt (len .Imports) 1}})
+{{end}}
+{{end}}`
+)
diff --git a/vendor/goa.design/goa/codegen/import.go b/vendor/goa.design/goa/codegen/import.go
new file mode 100644
index 000000000..cd3b0b63a
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/import.go
@@ -0,0 +1,31 @@
+package codegen
+
+import "fmt"
+
+type (
+ // ImportSpec defines a generated import statement.
+ ImportSpec struct {
+ // Name of imported package if needed.
+ Name string
+ // Go import path of package.
+ Path string
+ }
+)
+
+// NewImport creates an import spec.
+func NewImport(name, path string) *ImportSpec {
+ return &ImportSpec{Name: name, Path: path}
+}
+
+// SimpleImport creates an import with no explicit path component.
+func SimpleImport(path string) *ImportSpec {
+ return &ImportSpec{Path: path}
+}
+
+// Code returns the Go import statement for the ImportSpec.
+func (s *ImportSpec) Code() string {
+ if len(s.Name) > 0 {
+ return fmt.Sprintf(`%s "%s"`, s.Name, s.Path)
+ }
+ return fmt.Sprintf(`"%s"`, s.Path)
+}
diff --git a/vendor/goa.design/goa/codegen/plugin.go b/vendor/goa.design/goa/codegen/plugin.go
new file mode 100644
index 000000000..a5a95fc76
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/plugin.go
@@ -0,0 +1,126 @@
+package codegen
+
+import "goa.design/goa/eval"
+
+type (
+ // GenerateFunc makes it possible to modify the files generated by the
+ // goa code generators and other plugins. A GenerateFunc accepts the Go
+ // import path of the "gen" package, the design roots as well as the
+ // currently generated files (produced initially by the goa generators
+ // and potentially modified by previously run plugins) and returns a new
+ // set of files.
+ GenerateFunc func(genpkg string, roots []eval.Root, files []*File) ([]*File, error)
+
+ // PrepareFunc makes it possible to modify the design roots before
+ // the files being generated by the goa code generators or other plugins.
+ PrepareFunc func(genpkg string, roots []eval.Root) error
+
+ // plugin is a plugin that has been registered with a given command.
+ plugin struct {
+ // PrepareFunc is the plugin preparation function.
+ PrepareFunc
+ // GenerateFunc is the plugin generator function.
+ GenerateFunc
+ // name is the plugin name.
+ name string
+ // cmd is the name of cmd to run.
+ cmd string
+ // if first is set the plugin cmd must run before all other plugins.
+ first bool
+ // if last is set the plugin cmd must run after all other plugins.
+ last bool
+ }
+)
+
+// plugins keeps track of the registered plugins sorted by their first/last bools,
+// names, or registration order.
+var plugins []*plugin
+
+// RegisterPlugin adds the plugin to the list of plugins to be invoked with the
+// given command.
+func RegisterPlugin(name string, cmd string, pre PrepareFunc, p GenerateFunc) {
+ np := &plugin{name: name, PrepareFunc: pre, GenerateFunc: p, cmd: cmd}
+ var inserted bool
+ for i, plgn := range plugins {
+ if plgn.last || (!plgn.first && np.name < plgn.name) {
+ plugins = append(plugins[:i], append([]*plugin{np}, plugins[i:]...)...)
+ inserted = true
+ break
+ }
+ }
+ if !inserted {
+ plugins = append(plugins, np)
+ }
+}
+
+// RegisterPluginFirst adds the plugin to the beginning of the list of plugins
+// to be invoked with the given command. If more than one plugins are registered
+// using this, the plugins will be sorted alphabetically by their names. If two
+// plugins have same names, then they are sorted by registration order.
+func RegisterPluginFirst(name string, cmd string, pre PrepareFunc, p GenerateFunc) {
+ np := &plugin{name: name, PrepareFunc: pre, GenerateFunc: p, cmd: cmd, first: true}
+ var inserted bool
+ for i, plgn := range plugins {
+ if !plgn.first || np.name < plgn.name {
+ plugins = append(plugins[:i], append([]*plugin{np}, plugins[i:]...)...)
+ inserted = true
+ break
+ }
+ }
+ if !inserted {
+ plugins = append(plugins, np)
+ }
+}
+
+// RegisterPluginLast adds the plugin to the end of the list of plugins
+// to be invoked with the given command. If more than one plugins are registered
+// using this, the plugins will be sorted alphabetically by their names. If two
+// plugins have same names, then they are sorted by registration order.
+func RegisterPluginLast(name string, cmd string, pre PrepareFunc, p GenerateFunc) {
+ np := &plugin{name: name, PrepareFunc: pre, GenerateFunc: p, cmd: cmd, last: true}
+ var inserted bool
+ for i := len(plugins) - 1; i >= 0; i-- {
+ plgn := plugins[i]
+ if !plgn.last || plgn.name < np.name {
+ plugins = append(plugins[:i+1], append([]*plugin{np}, plugins[i+1:]...)...)
+ inserted = true
+ break
+ }
+ }
+ if !inserted {
+ plugins = append(plugins, np)
+ }
+}
+
+// RunPluginsPrepare executes the plugins prepare functions in the order
+// they were registered.
+func RunPluginsPrepare(cmd, genpkg string, roots []eval.Root) error {
+ for _, plugin := range plugins {
+ if plugin.cmd != cmd {
+ continue
+ }
+ if plugin.PrepareFunc != nil {
+ err := plugin.PrepareFunc(genpkg, roots)
+ if err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+}
+
+// RunPlugins executes the plugins registered with the given command in the order
+// they were registered.
+func RunPlugins(cmd, genpkg string, roots []eval.Root, genfiles []*File) ([]*File, error) {
+ for _, plugin := range plugins {
+ if plugin.cmd != cmd {
+ continue
+ }
+ gs, err := plugin.GenerateFunc(genpkg, roots, genfiles)
+ if err != nil {
+ return nil, err
+ }
+ genfiles = gs
+ }
+ return genfiles, nil
+}
diff --git a/vendor/goa.design/goa/codegen/scope.go b/vendor/goa.design/goa/codegen/scope.go
new file mode 100644
index 000000000..c94c37582
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/scope.go
@@ -0,0 +1,239 @@
+package codegen
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+
+ "goa.design/goa/expr"
+)
+
+type (
+ // NameScope defines a naming scope.
+ NameScope struct {
+ names map[string]string // type hash to unique name
+ counts map[string]int // raw type name to occurrence count
+ }
+
+ // Hasher is the interface implemented by the objects that must be
+ // scoped.
+ Hasher interface {
+ // Hash computes a unique instance hash suitable for indexing
+ // in a map.
+ Hash() string
+ }
+
+ // Scoper provides a scope for generating unique names.
+ Scoper interface {
+ Scope() *NameScope
+ }
+)
+
+// NewNameScope creates an empty name scope.
+func NewNameScope() *NameScope {
+ ns := &NameScope{
+ names: make(map[string]string),
+ counts: make(map[string]int),
+ }
+ if expr.Root.API != nil {
+ ns.HashedUnique(expr.Root.API, expr.Root.API.Name)
+ }
+ return ns
+}
+
+// HashedUnique builds the unique name for key using name and - if not unique -
+// appending suffix and - if still not unique - a counter value. It returns
+// the same value when called multiple times for a key returning the same hash.
+func (s *NameScope) HashedUnique(key Hasher, name string, suffix ...string) string {
+ if n, ok := s.names[key.Hash()]; ok {
+ return n
+ }
+ var (
+ i int
+ suf string
+ )
+ _, ok := s.counts[name]
+ if !ok {
+ goto done
+ }
+ if len(suffix) > 0 {
+ suf = suffix[0]
+ }
+ name += suf
+ i, ok = s.counts[name]
+ if !ok {
+ goto done
+ }
+ name += strconv.Itoa(i + 1)
+done:
+ s.counts[name] = i + 1
+ s.names[key.Hash()] = name
+ return name
+}
+
+// Unique returns a unique name for the given name. If given name not unique
+// the suffix is appended. It still not unique, a counter value is added to
+// the name until unique.
+func (s *NameScope) Unique(name string, suffix ...string) string {
+ var (
+ i int
+ suf string
+ )
+ _, ok := s.counts[name]
+ if !ok {
+ goto done
+ }
+ if len(suffix) > 0 {
+ suf = suffix[0]
+ }
+ name += suf
+ i, ok = s.counts[name]
+ if !ok {
+ goto done
+ }
+ name += strconv.Itoa(i + 1)
+done:
+ return name
+}
+
+// GoTypeDef returns the Go code that defines a Go type which matches the data
+// structure definition (the part that comes after `type foo`).
+//
+// ptr if true indicates that the attribute must be stored in a pointer
+// (except array and map types which are always non-pointers)
+//
+// useDefault if true indicates that the attribute must not be a pointer
+// if it has a default value.
+func (s *NameScope) GoTypeDef(att *expr.AttributeExpr, ptr, useDefault bool) string {
+ switch actual := att.Type.(type) {
+ case expr.Primitive:
+ return GoNativeTypeName(actual)
+ case *expr.Array:
+ d := s.GoTypeDef(actual.ElemType, ptr, useDefault)
+ if expr.IsObject(actual.ElemType.Type) {
+ d = "*" + d
+ }
+ return "[]" + d
+ case *expr.Map:
+ keyDef := s.GoTypeDef(actual.KeyType, ptr, useDefault)
+ if expr.IsObject(actual.KeyType.Type) {
+ keyDef = "*" + keyDef
+ }
+ elemDef := s.GoTypeDef(actual.ElemType, ptr, useDefault)
+ if expr.IsObject(actual.ElemType.Type) {
+ elemDef = "*" + elemDef
+ }
+ return fmt.Sprintf("map[%s]%s", keyDef, elemDef)
+ case *expr.Object:
+ var ss []string
+ ss = append(ss, "struct {")
+ for _, nat := range *actual {
+ var (
+ fn string
+ tdef string
+ desc string
+ tags string
+
+ name = nat.Name
+ at = nat.Attribute
+ )
+ {
+ fn = GoifyAtt(at, name, true)
+ tdef = s.GoTypeDef(at, ptr, useDefault)
+ if expr.IsObject(at.Type) ||
+ att.IsPrimitivePointer(name, useDefault) ||
+ (ptr && expr.IsPrimitive(at.Type) && at.Type.Kind() != expr.AnyKind && at.Type.Kind() != expr.BytesKind) {
+ tdef = "*" + tdef
+ }
+ if at.Description != "" {
+ desc = Comment(at.Description) + "\n\t"
+ }
+ tags = AttributeTags(att, at)
+ }
+ ss = append(ss, fmt.Sprintf("\t%s%s %s%s", desc, fn, tdef, tags))
+ }
+ ss = append(ss, "}")
+ return strings.Join(ss, "\n")
+ case expr.UserType:
+ return s.GoTypeName(att)
+ default:
+ panic(fmt.Sprintf("unknown data type %T", actual)) // bug
+ }
+}
+
+// GoVar returns the Go code that returns the address of a variable of the Go type
+// which matches the given attribute type.
+func (s *NameScope) GoVar(varName string, dt expr.DataType) string {
+ // For a raw struct, no need to indirecting
+ if isRawStruct(dt) {
+ return varName
+ }
+ return "&" + varName
+}
+
+// GoTypeRef returns the Go code that refers to the Go type which matches the
+// given attribute type.
+func (s *NameScope) GoTypeRef(att *expr.AttributeExpr) string {
+ name := s.GoTypeName(att)
+ return goTypeRef(name, att.Type)
+}
+
+// GoFullTypeRef returns the Go code that refers to the Go type which matches
+// the given attribute type defined in the given package if a user type.
+func (s *NameScope) GoFullTypeRef(att *expr.AttributeExpr, pkg string) string {
+ name := s.GoFullTypeName(att, pkg)
+ return goTypeRef(name, att.Type)
+}
+
+// GoTypeName returns the Go type name of the given attribute type.
+func (s *NameScope) GoTypeName(att *expr.AttributeExpr) string {
+ return s.GoFullTypeName(att, "")
+}
+
+// GoFullTypeName returns the Go type name of the given data type qualified with
+// the given package name if applicable and if not the empty string.
+func (s *NameScope) GoFullTypeName(att *expr.AttributeExpr, pkg string) string {
+ switch actual := att.Type.(type) {
+ case expr.Primitive:
+ return GoNativeTypeName(actual)
+ case *expr.Array:
+ return "[]" + s.GoFullTypeRef(actual.ElemType, pkg)
+ case *expr.Map:
+ return fmt.Sprintf("map[%s]%s",
+ s.GoFullTypeRef(actual.KeyType, pkg),
+ s.GoFullTypeRef(actual.ElemType, pkg))
+ case *expr.Object:
+ return s.GoTypeDef(att, false, false)
+ case expr.UserType:
+ if actual == expr.ErrorResult {
+ return "goa.ServiceError"
+ }
+ n := s.HashedUnique(actual, Goify(actual.Name(), true), "")
+ if pkg == "" {
+ return n
+ }
+ return pkg + "." + n
+ case expr.CompositeExpr:
+ return s.GoFullTypeName(actual.Attribute(), pkg)
+ default:
+ panic(fmt.Sprintf("unknown data type %T", actual)) // bug
+ }
+}
+
+func goTypeRef(name string, dt expr.DataType) string {
+ // For a raw struct, no need to dereference
+ if isRawStruct(dt) {
+ return name
+ }
+ return "*" + name
+}
+
+func isRawStruct(dt expr.DataType) bool {
+ if _, ok := dt.(*expr.Object); ok {
+ return true
+ }
+ if expr.IsObject(dt) {
+ return false
+ }
+ return true
+}
diff --git a/vendor/goa.design/goa/codegen/server/example_cli.go b/vendor/goa.design/goa/codegen/server/example_cli.go
new file mode 100644
index 000000000..de0e908fe
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/server/example_cli.go
@@ -0,0 +1,240 @@
+package server
+
+import (
+ "os"
+ "path/filepath"
+ "strings"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+// ExampleCLIFiles returns example client tool main implementation for each
+// server expression in the design.
+func ExampleCLIFiles(genpkg string, root *expr.RootExpr) []*codegen.File {
+ var fw []*codegen.File
+ for _, svr := range root.API.Servers {
+ if m := exampleCLIMain(genpkg, root, svr); m != nil {
+ fw = append(fw, m)
+ }
+ }
+ return fw
+}
+
+// exampleCLIMain returns an example client tool main implementation for the
+// given server expression.
+func exampleCLIMain(genpkg string, root *expr.RootExpr, svr *expr.ServerExpr) *codegen.File {
+ pkg := codegen.SnakeCase(codegen.Goify(svr.Name, true))
+ path := filepath.Join("cmd", pkg+"-cli", "main.go")
+ if _, err := os.Stat(path); !os.IsNotExist(err) {
+ return nil // file already exists, skip it.
+ }
+ specs := []*codegen.ImportSpec{
+ {Path: "context"},
+ {Path: "encoding/json"},
+ {Path: "flag"},
+ {Path: "fmt"},
+ {Path: "net/url"},
+ {Path: "os"},
+ {Path: "strings"},
+ {Path: "goa.design/goa"},
+ }
+ svrData := Servers.Get(svr)
+ sections := []*codegen.SectionTemplate{
+ codegen.Header("", "main", specs),
+ &codegen.SectionTemplate{
+ Name: "cli-main-start",
+ Source: cliMainStartT,
+ Data: map[string]interface{}{
+ "Server": svrData,
+ },
+ FuncMap: map[string]interface{}{
+ "join": strings.Join,
+ },
+ },
+ &codegen.SectionTemplate{
+ Name: "cli-main-var-init",
+ Source: cliMainVarInitT,
+ Data: map[string]interface{}{
+ "Server": svrData,
+ },
+ FuncMap: map[string]interface{}{
+ "join": strings.Join,
+ },
+ },
+ &codegen.SectionTemplate{
+ Name: "cli-main-endpoint-init",
+ Source: cliMainEndpointInitT,
+ Data: map[string]interface{}{
+ "Server": svrData,
+ },
+ FuncMap: map[string]interface{}{
+ "join": strings.Join,
+ "toUpper": strings.ToUpper,
+ },
+ },
+ &codegen.SectionTemplate{Name: "cli-main-end", Source: cliMainEndT},
+ &codegen.SectionTemplate{
+ Name: "cli-main-usage",
+ Source: cliMainUsageT,
+ Data: map[string]interface{}{
+ "APIName": root.API.Name,
+ "Server": svrData,
+ },
+ FuncMap: map[string]interface{}{
+ "toUpper": strings.ToUpper,
+ "join": strings.Join,
+ },
+ },
+ }
+ return &codegen.File{Path: path, SectionTemplates: sections, SkipExist: true}
+}
+
+const (
+ // input: map[string]interface{}{"Server": *Data}
+ cliMainStartT = `func main() {
+ var (
+ hostF = flag.String("host", {{ printf "%q" .Server.DefaultHost.Name }}, "Server host (valid values: {{ (join .Server.AvailableHosts ", ") }})")
+ addrF = flag.String("url", "", "URL to service host")
+ {{ range .Server.Variables }}
+ {{ .VarName }}F = flag.String({{ printf "%q" .Name }}, {{ printf "%q" .DefaultValue }}, {{ printf "%q" .Description }})
+ {{- end }}
+ verboseF = flag.Bool("verbose", false, "Print request and response details")
+ vF = flag.Bool("v", false, "Print request and response details")
+ timeoutF = flag.Int("timeout", 30, "Maximum number of seconds to wait for response")
+ )
+ flag.Usage = usage
+ flag.Parse()
+`
+
+ // input: map[string]interface{}{"Server": *Data}
+ cliMainVarInitT = `var (
+ addr string
+ timeout int
+ debug bool
+ )
+ {
+ addr = *addrF
+ if addr == "" {
+ switch *hostF {
+ {{- range $h := .Server.Hosts }}
+ case {{ printf "%q" $h.Name }}:
+ addr = {{ printf "%q" ($h.DefaultURL $.Server.DefaultTransport.Type) }}
+ {{- range $h.Variables }}
+ {{- if .Values }}
+ var {{ .VarName }}Seen bool
+ {
+ for _, v := range []string{ {{ range $v := .Values }}"{{ $v }}",{{ end }} } {
+ if v == *{{ .VarName }}F {
+ {{ .VarName }}Seen = true
+ break
+ }
+ }
+ }
+ if !{{ .VarName }}Seen {
+ fmt.Fprintf(os.Stderr, "invalid value for URL '{{ .Name }}' variable: %q (valid values: {{ join .Values "," }})", *{{ .VarName }}F)
+ os.Exit(1)
+ }
+ {{- end }}
+ addr = strings.Replace(addr, {{ printf "\"{%s}\"" .Name }}, *{{ .VarName }}F, -1)
+ {{- end }}
+ {{- end }}
+ default:
+ fmt.Fprintf(os.Stderr, "invalid host argument: %q (valid hosts: {{ join .Server.AvailableHosts "|" }})", *hostF)
+ os.Exit(1)
+ }
+ }
+ timeout = *timeoutF
+ debug = *verboseF || *vF
+ }
+
+ var (
+ scheme string
+ host string
+ )
+ {
+ u, err := url.Parse(addr)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "invalid URL %#v: %s", addr, err)
+ os.Exit(1)
+ }
+ scheme = u.Scheme
+ host = u.Host
+ }
+`
+
+ // input: map[string]interface{}{"Server": *Data}
+ cliMainEndpointInitT = `var(
+ endpoint goa.Endpoint
+ payload interface{}
+ err error
+ )
+ {
+ switch scheme {
+ {{- range $t := .Server.Transports }}
+ case "{{ $t.Type }}", "{{ $t.Type }}s":
+ endpoint, payload, err = do{{ toUpper $t.Name }}(scheme, host, timeout, debug)
+ {{- end }}
+ default:
+ fmt.Fprintf(os.Stderr, "invalid scheme: %q (valid schemes: {{ join .Server.Schemes "|" }})", scheme)
+ os.Exit(1)
+ }
+ }
+ if err != nil {
+ if err == flag.ErrHelp {
+ os.Exit(0)
+ }
+ fmt.Fprintln(os.Stderr, err.Error())
+ fmt.Fprintln(os.Stderr, "run '"+os.Args[0]+" --help' for detailed usage.")
+ os.Exit(1)
+ }
+`
+
+ cliMainEndT = `
+ data, err := endpoint(context.Background(), payload)
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+
+ if data != nil && !debug {
+ m, _ := json.MarshalIndent(data, "", " ")
+ fmt.Println(string(m))
+ }
+}
+`
+
+ // input: map[string]interface{}{"APIName": string, "Server": *Data}
+ cliMainUsageT = `
+func usage() {
+ fmt.Fprintf(os.Stderr, ` + "`" + `%s is a command line client for the {{ .APIName }} API.
+
+Usage:
+ %s [-host HOST][-url URL][-timeout SECONDS][-verbose|-v]{{ range .Server.Variables }}[-{{ .Name }} {{ toUpper .Name }}]{{ end }} SERVICE ENDPOINT [flags]
+
+ -host HOST: server host ({{ .Server.DefaultHost.Name }}). valid values: {{ (join .Server.AvailableHosts ", ") }}
+ -url URL: specify service URL overriding host URL (http://localhost:8080)
+ -timeout: maximum number of seconds to wait for response (30)
+ -verbose|-v: print request and response details (false)
+ {{- range .Server.Variables }}
+ -{{ .Name }}: {{ .Description }} ({{ .DefaultValue }})
+ {{- end }}
+
+Commands:
+%s
+Additional help:
+ %s SERVICE [ENDPOINT] --help
+
+Example:
+%s
+` + "`" + `, os.Args[0], os.Args[0], indent({{ .Server.DefaultTransport.Type }}UsageCommands()), os.Args[0], indent({{ .Server.DefaultTransport.Type }}UsageExamples()))
+}
+
+func indent(s string) string {
+ if s == "" {
+ return ""
+ }
+ return " " + strings.Replace(s, "\n", "\n ", -1)
+}
+`
+)
diff --git a/vendor/goa.design/goa/codegen/server/example_server.go b/vendor/goa.design/goa/codegen/server/example_server.go
new file mode 100644
index 000000000..fcd2d33a6
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/server/example_server.go
@@ -0,0 +1,267 @@
+package server
+
+import (
+ "os"
+ "path/filepath"
+ "strings"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/codegen/service"
+ "goa.design/goa/expr"
+)
+
+// ExampleServerFiles returns an example server main implementation for every
+// server expression in the service design.
+func ExampleServerFiles(genpkg string, root *expr.RootExpr) []*codegen.File {
+ var fw []*codegen.File
+ for _, svr := range root.API.Servers {
+ if m := exampleSvrMain(genpkg, root, svr); m != nil {
+ fw = append(fw, m)
+ }
+ }
+ return fw
+}
+
+// exampleSvrMain returns the default main function for the given server
+// expression.
+func exampleSvrMain(genpkg string, root *expr.RootExpr, svr *expr.ServerExpr) *codegen.File {
+ pkg := codegen.SnakeCase(codegen.Goify(svr.Name, true))
+ mainPath := filepath.Join("cmd", pkg, "main.go")
+ if _, err := os.Stat(mainPath); !os.IsNotExist(err) {
+ return nil // file already exists, skip it.
+ }
+ idx := strings.LastIndex(genpkg, string(os.PathSeparator))
+ rootPath := "."
+ if idx > 0 {
+ rootPath = genpkg[:idx]
+ }
+ apiPkg := strings.ToLower(codegen.Goify(root.API.Name, false))
+ specs := []*codegen.ImportSpec{
+ {Path: "context"},
+ {Path: "flag"},
+ {Path: "fmt"},
+ {Path: "log"},
+ {Path: "net/url"},
+ {Path: "os"},
+ {Path: "os/signal"},
+ {Path: "strings"},
+ {Path: "sync"},
+ {Path: "time"},
+ {Path: rootPath, Name: apiPkg},
+ }
+
+ svrData := Servers.Get(svr)
+
+ // Iterate through services listed in the server expression.
+ svcData := make([]*service.Data, len(svr.Services))
+ for i, svc := range svr.Services {
+ sd := service.Services.Get(svc)
+ svcData[i] = sd
+ specs = append(specs, &codegen.ImportSpec{
+ Path: filepath.Join(genpkg, codegen.SnakeCase(svc)),
+ Name: sd.PkgName,
+ })
+ }
+
+ sections := []*codegen.SectionTemplate{
+ codegen.Header("", "main", specs),
+ &codegen.SectionTemplate{
+ Name: "server-main-start",
+ Source: mainStartT,
+ Data: map[string]interface{}{
+ "Server": svrData,
+ },
+ FuncMap: map[string]interface{}{
+ "join": strings.Join,
+ },
+ },
+ &codegen.SectionTemplate{
+ Name: "server-main-logger",
+ Source: mainLoggerT,
+ Data: map[string]interface{}{
+ "APIPkg": apiPkg,
+ },
+ },
+ &codegen.SectionTemplate{
+ Name: "server-main-services",
+ Source: mainSvcsT,
+ Data: map[string]interface{}{
+ "APIPkg": apiPkg,
+ "Services": svcData,
+ },
+ },
+ &codegen.SectionTemplate{
+ Name: "server-main-endpoints",
+ Source: mainEndpointsT,
+ Data: map[string]interface{}{
+ "Services": svcData,
+ },
+ },
+ &codegen.SectionTemplate{Name: "server-main-interrupts", Source: mainInterruptsT},
+ &codegen.SectionTemplate{
+ Name: "server-main-handler",
+ Source: mainServerHndlrT,
+ Data: map[string]interface{}{
+ "Server": svrData,
+ "Services": svcData,
+ },
+ FuncMap: map[string]interface{}{
+ "goify": codegen.Goify,
+ "join": strings.Join,
+ "toUpper": strings.ToUpper,
+ },
+ },
+ &codegen.SectionTemplate{Name: "server-main-end", Source: mainEndT},
+ }
+
+ return &codegen.File{Path: mainPath, SectionTemplates: sections, SkipExist: true}
+}
+
+const (
+ // input: map[string]interface{"Server": *ServerData}
+ mainStartT = `func main() {
+ {{ comment "Define command line flags, add any other flag required to configure the service." }}
+ var(
+ hostF = flag.String("host", {{ printf "%q" .Server.DefaultHost.Name }}, "Server host (valid values: {{ (join .Server.AvailableHosts ", ") }})")
+ domainF = flag.String("domain", "", "Host domain name (overrides host domain specified in service design)")
+ {{- range .Server.Transports }}
+ {{ .Type }}PortF = flag.String("{{ .Type }}-port", "", "{{ .Name }} port (overrides host {{ .Name }} port specified in service design)")
+ {{- end }}
+ {{- range .Server.Variables }}
+ {{ .VarName }}F = flag.String({{ printf "%q" .Name }}, {{ printf "%q" .DefaultValue }}, "{{ .Description }}{{ if .Values }} (valid values: {{ join .Values ", " }}){{ end }}")
+ {{- end }}
+ secureF = flag.Bool("secure", false, "Use secure scheme (https or grpcs)")
+ dbgF = flag.Bool("debug", false, "Log request and response bodies")
+ )
+ flag.Parse()
+`
+
+ // input: map[string]interface{"APIPkg": string}
+ mainLoggerT = `// Setup logger and goa log adapter. Replace logger with your own using
+ // your log package of choice. The goa.design/middleware/logging/...
+ // packages define log adapters for common log packages.
+ var (
+ logger *log.Logger
+ )
+ {
+ logger = log.New(os.Stderr, "[{{ .APIPkg }}] ", log.Ltime)
+ }
+`
+
+ // input: map[string]interface{"APIPkg": string, "Services": []*service.Data}
+ mainSvcsT = `{{ comment "Initialize the services." }}
+ var (
+ {{- range .Services }}
+ {{- if .Methods }}
+ {{ .VarName }}Svc {{ .PkgName }}.Service
+ {{- end }}
+ {{- end }}
+ )
+ {
+ {{- range .Services }}
+ {{- if .Methods }}
+ {{ .VarName }}Svc = {{ $.APIPkg }}.New{{ .StructName }}(logger)
+ {{- end }}
+ {{- end }}
+ }
+`
+
+ // input: map[string]interface{"Services": []*service.Data}
+ mainEndpointsT = `{{ comment "Wrap the services in endpoints that can be invoked from other services potentially running in different processes." }}
+ var (
+ {{- range .Services }}
+ {{- if .Methods }}
+ {{ .VarName }}Endpoints *{{ .PkgName }}.Endpoints
+ {{- end }}
+ {{- end }}
+ )
+ {
+ {{- range .Services }}
+ {{- if .Methods }}
+ {{ .VarName }}Endpoints = {{ .PkgName }}.NewEndpoints({{ .VarName }}Svc)
+ {{- end }}
+ {{- end }}
+ }
+`
+
+ mainInterruptsT = `// Create channel used by both the signal handler and server goroutines
+ // to notify the main goroutine when to stop the server.
+ errc := make(chan error)
+
+ // Setup interrupt handler. This optional step configures the process so
+ // that SIGINT and SIGTERM signals cause the services to stop gracefully.
+ go func() {
+ c := make(chan os.Signal, 1)
+ signal.Notify(c, os.Interrupt)
+ errc <- fmt.Errorf("%s", <-c)
+ }()
+
+ var wg sync.WaitGroup
+ ctx, cancel := context.WithCancel(context.Background())
+`
+
+ // input: map[string]interface{"Server": *Data, "Services": []*service.Data}
+ mainServerHndlrT = `{{ comment "Start the servers and send errors (if any) to the error channel." }}
+ switch *hostF {
+{{- range $h := .Server.Hosts }}
+ case {{ printf "%q" $h.Name }}:
+ {{- range $u := $h.URIs }}
+ {{- if $.Server.HasTransport $u.Transport.Type }}
+ {
+ addr := {{ printf "%q" $u.URL }}
+ {{- range $h.Variables }}
+ {{- if .Values }}
+ var {{ .VarName }}Seen bool
+ {
+ for _, v := range []string{ {{ range $v := .Values }}"{{ $v }}",{{ end }} } {
+ if v == *{{ .VarName }}F {
+ {{ .VarName }}Seen = true
+ break
+ }
+ }
+ }
+ if !{{ .VarName }}Seen {
+ fmt.Fprintf(os.Stderr, "invalid value for URL '{{ .Name }}' variable: %q (valid values: {{ join .Values "," }})", *{{ .VarName }}F)
+ os.Exit(1)
+ }
+ {{- end }}
+ addr = strings.Replace(addr, {{ printf "\"{%s}\"" .Name }}, *{{ .VarName }}F, -1)
+ {{- end }}
+ u, err := url.Parse(addr)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "invalid URL %#v: %s", addr, err)
+ os.Exit(1)
+ }
+ if *secureF {
+ u.Scheme = "{{ $u.Transport.Type }}s"
+ }
+ if *domainF != "" {
+ u.Host = *domainF
+ }
+ if *{{ $u.Transport.Type }}PortF != "" {
+ h := strings.Split(u.Host, ":")[0]
+ u.Host = h + ":" + *{{ $u.Transport.Type }}PortF
+ } else if u.Port() == "" {
+ u.Host += ":{{ $u.Port }}"
+ }
+ handle{{ toUpper $u.Transport.Name }}Server(ctx, u, {{ range $.Services }}{{ if .Methods }}{{ .VarName }}Endpoints, {{ end }}{{ end }}&wg, errc, logger, *dbgF)
+ }
+ {{- end }}
+ {{ end }}
+{{- end }}
+ default:
+ fmt.Fprintf(os.Stderr, "invalid host argument: %q (valid hosts: {{ join .Server.AvailableHosts "|" }})", *hostF)
+ }
+`
+
+ mainEndT = `{{ comment "Wait for signal." }}
+ logger.Printf("exiting (%v)", <-errc)
+
+ {{ comment "Send cancellation signal to the goroutines." }}
+ cancel()
+
+ wg.Wait()
+ logger.Println("exited")
+}
+`
+)
diff --git a/vendor/goa.design/goa/codegen/server/server_data.go b/vendor/goa.design/goa/codegen/server/server_data.go
new file mode 100644
index 000000000..3fd169427
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/server/server_data.go
@@ -0,0 +1,356 @@
+package server
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+// Servers holds the server data needed to generate the example service and
+// client. It is computed from the Server expressions in the service design.
+var Servers = make(ServersData)
+
+type (
+ // ServersData holds the server data from the service design indexed by
+ // server name.
+ ServersData map[string]*Data
+
+ // Data contains the data about a single server.
+ Data struct {
+ // Name is the server name.
+ Name string
+ // Description is the server description.
+ Description string
+ // Services is the list of services supported by the server.
+ Services []string
+ // Schemes is the list of supported schemes by the server.
+ Schemes []string
+ // Hosts is the list of hosts defined in the server.
+ Hosts []*HostData
+ // Variables is the list of URL parameters defined in every host.
+ Variables []*VariableData
+ // Transports is the list of transports defined in the server.
+ Transports []*TransportData
+ }
+
+ // HostData contains the data about a single host in a server.
+ HostData struct {
+ // Name is the host name.
+ Name string
+ // Description is the host description.
+ Description string
+ // Schemes is the list of schemes supported by the host. It is computed
+ // from the URI expressions defined in the Host.
+ // Possible values are http, https, grpc, grpcs.
+ Schemes []string
+ // URIs is the list of URLs defined in the host.
+ URIs []*URIData
+ // Variables is the list of URL parameters.
+ Variables []*VariableData
+ }
+
+ // VariableData contains the data about a URL variable.
+ VariableData struct {
+ // Name is the name of the variable.
+ Name string
+ // Description is the variable description.
+ Description string
+ // VarName is the variable name used in generating flag variables.
+ VarName string
+ // DefaultValue is the default value for the variable. It is set to the
+ // default value defined in the variable attribute if exists, or else set
+ // to the first value in the enum expression.
+ DefaultValue string
+ // Values is the list of allowed values for the variable. The values can
+ // only be primitives. We convert the primitives into string type so that
+ // we could use them to replace the URL variables in the example
+ // generation.
+ Values []string
+ }
+
+ // URIData contains the data about a URL.
+ URIData struct {
+ // URL is the underlying URL.
+ URL string
+ // Scheme is the URL scheme.
+ Scheme string
+ // Port is the default port for the scheme.
+ // http - 80, https - 443, grpc - 8080, grpcs - 8443
+ Port string
+ // Transport is the transport type for the URL.
+ Transport *TransportData
+ }
+
+ // TransportData contains the data about a transport (http or grpc).
+ TransportData struct {
+ // Type is the transport type.
+ Type Transport
+ // Name is the transport name.
+ Name string
+ }
+
+ // Transport is a type for supported goa transports.
+ Transport string
+)
+
+const (
+ // TransportHTTP is the HTTP transport.
+ TransportHTTP Transport = "http"
+ // TransportGRPC is the gRPC transport.
+ TransportGRPC = "grpc"
+)
+
+// Get returns the server data for the given server expression. It builds the
+// server data if the server name does not exist in the map.
+func (d ServersData) Get(svr *expr.ServerExpr) *Data {
+ if data, ok := d[svr.Name]; ok {
+ return data
+ }
+ sd := buildServerData(svr)
+ d[svr.Name] = sd
+ return sd
+}
+
+// DefaultHost returns the first host defined in the server expression.
+func (s *Data) DefaultHost() *HostData {
+ if len(s.Hosts) == 0 {
+ return nil
+ }
+ return s.Hosts[0]
+}
+
+// AvailableHosts returns a list of available host names.
+func (s *Data) AvailableHosts() []string {
+ hosts := make([]string, len(s.Hosts))
+ for i, h := range s.Hosts {
+ hosts[i] = h.Name
+ }
+ return hosts
+}
+
+// DefaultTransport returns the default transport for the given server.
+// If multiple transports are defined, HTTP transport is used as the default.
+func (s *Data) DefaultTransport() *TransportData {
+ if len(s.Transports) == 1 {
+ return s.Transports[0]
+ }
+ for _, t := range s.Transports {
+ if t.Type == TransportHTTP {
+ return t
+ }
+ }
+ return nil // bug
+}
+
+// HasTransport checks if the server supports the given transport.
+func (s *Data) HasTransport(transport Transport) bool {
+ for _, t := range s.Transports {
+ if t.Type == transport {
+ return true
+ }
+ }
+ return false
+}
+
+// DefaultURL returns the first URL defined for the given transport in a host.
+func (h *HostData) DefaultURL(transport Transport) string {
+ for _, u := range h.URIs {
+ if u.Transport.Type == transport {
+ return u.URL
+ }
+ }
+ return ""
+}
+
+// buildServerData builds the server data for the given server expression.
+func buildServerData(svr *expr.ServerExpr) *Data {
+ var (
+ hosts []*HostData
+ )
+ {
+ for _, h := range svr.Hosts {
+ hosts = append(hosts, buildHostData(h))
+ }
+ }
+
+ var (
+ variables []*VariableData
+
+ foundVars = make(map[string]struct{})
+ )
+ {
+ // collect all the URL variables defined in host expressions
+ for _, h := range hosts {
+ for _, v := range h.Variables {
+ if _, ok := foundVars[v.Name]; ok {
+ continue
+ }
+ variables = append(variables, v)
+ foundVars[v.Name] = struct{}{}
+ }
+ }
+ }
+
+ var (
+ transports []*TransportData
+
+ foundTrans = make(map[Transport]struct{})
+ )
+ {
+ for _, svc := range svr.Services {
+ _, seenHTTP := foundTrans[TransportHTTP]
+ _, seenGRPC := foundTrans[TransportGRPC]
+ if seenHTTP && seenGRPC {
+ // only HTTP and gRPC are supported right now.
+ break
+ }
+ if expr.Root.API.HTTP.Service(svc) != nil && !seenHTTP {
+ transports = append(transports, newHTTPTransport())
+ foundTrans[TransportHTTP] = struct{}{}
+ }
+ if expr.Root.API.GRPC.Service(svc) != nil && !seenGRPC {
+ transports = append(transports, newGRPCTransport())
+ foundTrans[TransportGRPC] = struct{}{}
+ }
+ }
+ }
+ return &Data{
+ Name: svr.Name,
+ Description: svr.Description,
+ Services: svr.Services,
+ Schemes: svr.Schemes(),
+ Hosts: hosts,
+ Variables: variables,
+ Transports: transports,
+ }
+}
+
+// buildHostData builds the host data for the given host expression.
+func buildHostData(host *expr.HostExpr) *HostData {
+ var (
+ uris []*URIData
+ )
+ {
+ uris = make([]*URIData, len(host.URIs))
+ for i, uv := range host.URIs {
+ var (
+ t *TransportData
+ scheme string
+ port string
+
+ ustr = string(uv)
+ )
+ {
+ // Did not use url package to find scheme because the url may
+ // contain params (i.e. http://{version}.example.com) which needs
+ // substition for url.Parse to succeed. Also URIs in host must have
+ // a scheme otherwise validations would have failed.
+ switch {
+ case strings.HasPrefix(ustr, "https"):
+ scheme = "https"
+ port = "443"
+ t = newHTTPTransport()
+ case strings.HasPrefix(ustr, "http"):
+ scheme = "http"
+ port = "80"
+ t = newHTTPTransport()
+ case strings.HasPrefix(ustr, "grpcs"):
+ scheme = "grpcs"
+ port = "8443"
+ t = newGRPCTransport()
+ case strings.HasPrefix(ustr, "grpc"):
+ scheme = "grpc"
+ port = "8080"
+ t = newGRPCTransport()
+
+ // No need for default case here because we only support the above
+ // possibilites for the scheme. Invalid scheme would have failed
+ // validations in the first place.
+ }
+ }
+ uris[i] = &URIData{
+ Scheme: scheme,
+ URL: ustr,
+ Port: port,
+ Transport: t,
+ }
+ }
+ }
+
+ var (
+ variables []*VariableData
+ )
+ {
+ vars := expr.AsObject(host.Variables.Type)
+ if len(*vars) > 0 {
+ variables = make([]*VariableData, len(*vars))
+ for i, v := range *vars {
+ def := v.Attribute.DefaultValue
+ var values []string
+ if def == nil {
+ def = v.Attribute.Validation.Values[0]
+ // DSL ensures v.Attribute has either a
+ // default value or an enum validation
+ values = convertToString(v.Attribute.Validation.Values...)
+ }
+ variables[i] = &VariableData{
+ Name: v.Name,
+ Description: v.Attribute.Description,
+ VarName: codegen.Goify(v.Name, false),
+ DefaultValue: convertToString(def)[0],
+ Values: values,
+ }
+ }
+ }
+ }
+ return &HostData{
+ Name: host.Name,
+ Description: host.Description,
+ Schemes: host.Schemes(),
+ URIs: uris,
+ Variables: variables,
+ }
+}
+
+// convertToString converts primitive type to a string.
+func convertToString(vals ...interface{}) []string {
+ str := make([]string, len(vals))
+ for i, v := range vals {
+ switch t := v.(type) {
+ case bool:
+ str[i] = strconv.FormatBool(t)
+ case int:
+ str[i] = strconv.Itoa(t)
+ case int32:
+ str[i] = strconv.FormatInt(int64(t), 10)
+ case int64:
+ str[i] = strconv.FormatInt(t, 10)
+ case uint:
+ str[i] = strconv.FormatUint(uint64(t), 10)
+ case uint32:
+ str[i] = strconv.FormatUint(uint64(t), 10)
+ case uint64:
+ str[i] = strconv.FormatUint(t, 10)
+ case float32:
+ str[i] = strconv.FormatFloat(float64(t), 'f', -1, 32)
+ case float64:
+ str[i] = strconv.FormatFloat(t, 'f', -1, 64)
+ case string:
+ str[i] = t
+ default:
+ panic(fmt.Sprintf("invalid value type %q to convert to string", t))
+ }
+ }
+ return str
+}
+
+func newHTTPTransport() *TransportData {
+ return &TransportData{Type: TransportHTTP, Name: "HTTP"}
+}
+
+func newGRPCTransport() *TransportData {
+ return &TransportData{Type: TransportGRPC, Name: "gRPC"}
+}
diff --git a/vendor/goa.design/goa/codegen/service/auth_funcs.go b/vendor/goa.design/goa/codegen/service/auth_funcs.go
new file mode 100644
index 000000000..143a6d7f5
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/service/auth_funcs.go
@@ -0,0 +1,95 @@
+package service
+
+import (
+ "os"
+ "path"
+ "strings"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+// AuthFuncsFile returns a file that contains a dummy implementation of the
+// authorization functions needed to instantiate the service endpoints.
+func AuthFuncsFile(genpkg string, root *expr.RootExpr) *codegen.File {
+ var (
+ apiPkg = strings.ToLower(codegen.Goify(root.API.Name, false))
+ rootPath = "."
+ filepath = "auth.go"
+ )
+ {
+ if _, err := os.Stat(filepath); !os.IsNotExist(err) {
+ return nil // file already exists, skip it.
+ }
+ idx := strings.LastIndex(genpkg, string(os.PathSeparator))
+ if idx > 0 {
+ rootPath = genpkg[:idx]
+ }
+ }
+
+ var (
+ sections []*codegen.SectionTemplate
+ generate bool
+ )
+ {
+ specs := []*codegen.ImportSpec{
+ {Path: "context"},
+ {Path: "fmt"},
+ {Path: "goa.design/goa", Name: "goa"},
+ {Path: "goa.design/goa/security"},
+ {Path: rootPath, Name: apiPkg},
+ }
+ for _, svc := range root.Services {
+ pkgName := Services.Get(svc.Name).PkgName
+ specs = append(specs, &codegen.ImportSpec{
+ Path: path.Join(genpkg, codegen.SnakeCase(svc.Name)),
+ Name: pkgName,
+ })
+ }
+ header := codegen.Header("", apiPkg, specs)
+ sections = []*codegen.SectionTemplate{header}
+ for _, s := range root.Services {
+ svc := Services.Get(s.Name)
+ if len(svc.Schemes) > 0 {
+ generate = true
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "security-authfuncs",
+ Source: dummyAuthFuncsT,
+ Data: svc,
+ })
+ }
+ }
+ }
+ if len(sections) == 0 || !generate {
+ return nil
+ }
+
+ return &codegen.File{
+ Path: filepath,
+ SectionTemplates: sections,
+ SkipExist: true,
+ }
+}
+
+// data: Data
+const dummyAuthFuncsT = `{{ range .Schemes }}
+{{ printf "%sAuth implements the authorization logic for service %q for the %q security scheme." .Type $.Name .SchemeName | comment }}
+func (s *{{ $.VarName }}srvc) {{ .Type }}Auth(ctx context.Context, {{ if eq .Type "Basic" }}user, pass{{ else if eq .Type "APIKey" }}key{{ else }}token{{ end }} string, scheme *security.{{ .Type }}Scheme) (context.Context, error) {
+ //
+ // TBD: add authorization logic.
+ //
+ // In case of authorization failure this function should return
+ // one of the generated error structs, e.g.:
+ //
+ // return ctx, myservice.MakeUnauthorizedError("invalid token")
+ //
+ // Alternatively this function may return an instance of
+ // goa.ServiceError with a Name field value that matches one of
+ // the design error names, e.g:
+ //
+ // return ctx, goa.PermanentError("unauthorized", "invalid token")
+ //
+ return ctx, fmt.Errorf("not implemented")
+}
+{{- end }}
+`
diff --git a/vendor/goa.design/goa/codegen/service/client.go b/vendor/goa.design/goa/codegen/service/client.go
new file mode 100644
index 000000000..60af29a37
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/service/client.go
@@ -0,0 +1,96 @@
+package service
+
+import (
+ "path/filepath"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+const (
+ // ClientStructName is the name of the generated client data structure.
+ ClientStructName = "Client"
+)
+
+// ClientFile returns the client file for the given service.
+func ClientFile(service *expr.ServiceExpr) *codegen.File {
+ path := filepath.Join(codegen.Gendir, codegen.SnakeCase(service.Name), "client.go")
+ data := endpointData(service)
+ svc := Services.Get(service.Name)
+ var (
+ sections []*codegen.SectionTemplate
+ )
+ {
+ header := codegen.Header(service.Name+" client", svc.PkgName,
+ []*codegen.ImportSpec{
+ {Path: "context"},
+ {Name: "goa", Path: "goa.design/goa"},
+ })
+ def := &codegen.SectionTemplate{
+ Name: "client-struct",
+ Source: serviceClientT,
+ Data: data,
+ }
+ init := &codegen.SectionTemplate{
+ Name: "client-init",
+ Source: serviceClientInitT,
+ Data: data,
+ }
+ sections = []*codegen.SectionTemplate{header, def, init}
+ for _, m := range data.Methods {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-method",
+ Source: serviceClientMethodT,
+ Data: m,
+ })
+ }
+ }
+
+ return &codegen.File{Path: path, SectionTemplates: sections}
+}
+
+// input: EndpointsData
+const serviceClientT = `// {{ .ClientVarName }} is the {{ printf "%q" .Name }} service client.
+type {{ .ClientVarName }} struct {
+{{- range .Methods}}
+ {{ .VarName }}Endpoint goa.Endpoint
+{{- end }}
+}
+`
+
+// input: EndpointsData
+const serviceClientInitT = `{{ printf "New%s initializes a %q service client given the endpoints." .ClientVarName .Name | comment }}
+func New{{ .ClientVarName }}({{ .ClientInitArgs }} goa.Endpoint) *{{ .ClientVarName }} {
+ return &{{ .ClientVarName }}{
+{{- range .Methods }}
+ {{ .VarName }}Endpoint: {{ .ArgName }},
+{{- end }}
+ }
+}
+`
+
+// input: EndpointsData
+const serviceClientMethodT = `
+{{ printf "%s calls the %q endpoint of the %q service." .VarName .Name .ServiceName | comment }}
+{{- if .Errors }}
+{{ printf "%s may return the following errors:" .VarName | comment }}
+ {{- range .Errors }}
+// - {{ printf "%q" .ErrName}} (type {{ .TypeRef }}){{ if .Description }}: {{ .Description }}{{ end }}
+ {{- end }}
+// - error: internal error
+{{- end }}
+func (c *{{ .ClientVarName }}) {{ .VarName }}(ctx context.Context, {{ if .PayloadRef }}p {{ .PayloadRef }}{{ end }})({{ if .ClientStream }}res {{ .ClientStream.Interface }}, {{ else if .ResultRef }}res {{ .ResultRef }}, {{ end }}err error) {
+ {{- if .ResultRef }}
+ var ires interface{}
+ {{- end }}
+ {{ if .ResultRef }}ires{{ else }}_{{ end }}, err = c.{{ .VarName}}Endpoint(ctx, {{ if .PayloadRef }}p{{ else }}nil{{ end }})
+ {{- if not .ResultRef }}
+ return
+ {{- else }}
+ if err != nil {
+ return
+ }
+ return ires.({{ if .ClientStream }}{{ .ClientStream.Interface }}{{ else }}{{ .ResultRef }}{{ end }}), nil
+ {{- end }}
+}
+`
diff --git a/vendor/goa.design/goa/codegen/service/convert.go b/vendor/goa.design/goa/codegen/service/convert.go
new file mode 100644
index 000000000..d8e11a4e9
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/service/convert.go
@@ -0,0 +1,732 @@
+package service
+
+import (
+ "fmt"
+ "os"
+ "path"
+ "path/filepath"
+ "reflect"
+ "strconv"
+ "strings"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+// ConvertData contains the info needed to render convert and create functions.
+type ConvertData struct {
+ // Name is the name of the function.
+ Name string
+ // ReceiverTypeRef is a reference to the receiver type.
+ ReceiverTypeRef string
+ // TypeRef is a reference to the external type.
+ TypeRef string
+ // TypeName is the name of the external type.
+ TypeName string
+ // Code is the function code.
+ Code string
+}
+
+func commonPath(sep byte, paths ...string) string {
+ // Handle special cases.
+ switch len(paths) {
+ case 0:
+ return ""
+ case 1:
+ return path.Clean(paths[0])
+ }
+
+ // Note, we treat string as []byte, not []rune as is often
+ // done in Go. (And sep as byte, not rune). This is because
+ // most/all supported OS' treat paths as string of non-zero
+ // bytes. A filename may be displayed as a sequence of Unicode
+ // runes (typically encoded as UTF-8) but paths are
+ // not required to be valid UTF-8 or in any normalized form
+ // (e.g. "é" (U+00C9) and "é" (U+0065,U+0301) are different
+ // file names.
+ c := []byte(path.Clean(paths[0]))
+
+ // We add a trailing sep to handle the case where the
+ // common prefix directory is included in the path list
+ // (e.g. /home/user1, /home/user1/foo, /home/user1/bar).
+ // path.Clean will have cleaned off trailing / separators with
+ // the exception of the root directory, "/" (in which case we
+ // make it "//", but this will get fixed up to "/" bellow).
+ c = append(c, sep)
+
+ // Ignore the first path since it's already in c
+ for _, v := range paths[1:] {
+ // Clean up each path before testing it
+ v = path.Clean(v) + string(sep)
+
+ // Find the first non-common byte and truncate c
+ if len(v) < len(c) {
+ c = c[:len(v)]
+ }
+ for i := 0; i < len(c); i++ {
+ if v[i] != c[i] {
+ c = c[:i]
+ break
+ }
+ }
+ }
+
+ // Remove trailing non-separator characters and the final separator
+ for i := len(c) - 1; i >= 0; i-- {
+ if c[i] == sep {
+ c = c[:i]
+ break
+ }
+ }
+
+ return string(c)
+}
+
+// getPkgImport returns the correct import path of a package.
+// It's needed because the "reflect" package provides the binary import path
+// ("goa.design/goa/vendor/some/package") for vendored packages
+// instead the source import path ("some/package")
+func getPkgImport(pkg, cwd string) string {
+ gosrc := path.Join(filepath.ToSlash(os.Getenv("GOPATH")), "src")
+ cwd = filepath.ToSlash(cwd)
+
+ // check for go modules
+ if !strings.HasPrefix(cwd, gosrc) {
+ return pkg
+ }
+
+ pkgpath := path.Join(gosrc, pkg)
+ parentpath := commonPath(os.PathSeparator, cwd, pkgpath)
+
+ // check for external packages
+ if parentpath == gosrc {
+ return pkg
+ }
+
+ rootpkg := string(parentpath[len(gosrc)+1:])
+
+ // check for vendored packages
+ vendorPrefix := path.Join(rootpkg, "vendor")
+ if strings.HasPrefix(pkg, vendorPrefix) {
+ return string(pkg[len(vendorPrefix)+1:])
+ }
+
+ return pkg
+}
+
+func getExternalTypeInfo(external interface{}) (string, string, error) {
+ cwd, err := os.Getwd()
+ if err != nil {
+ return "", "", err
+ }
+ pkg := reflect.TypeOf(external)
+ pkgImport := getPkgImport(pkg.PkgPath(), cwd)
+ alias := strings.Split(pkg.String(), ".")[0]
+ return pkgImport, alias, nil
+}
+
+// ConvertFile returns the file containing the conversion and creation functions
+// if any.
+func ConvertFile(root *expr.RootExpr, service *expr.ServiceExpr) (*codegen.File, error) {
+ // Filter conversion and creation functions that are relevant for this
+ // service
+ svc := Services.Get(service.Name)
+ var conversions, creations []*expr.TypeMap
+ for _, c := range root.Conversions {
+ for _, m := range service.Methods {
+ if ut, ok := m.Payload.Type.(expr.UserType); ok {
+ if ut.Name() == c.User.Name() {
+ conversions = append(conversions, c)
+ break
+ }
+ }
+ }
+ for _, m := range service.Methods {
+ if ut, ok := m.Result.Type.(expr.UserType); ok {
+ if ut.Name() == c.User.Name() {
+ conversions = append(conversions, c)
+ break
+ }
+ }
+ }
+ for _, t := range svc.UserTypes {
+ if c.User.Name() == t.Name {
+ conversions = append(conversions, c)
+ break
+ }
+ }
+ }
+ for _, c := range root.Creations {
+ for _, m := range service.Methods {
+ if ut, ok := m.Payload.Type.(expr.UserType); ok {
+ if ut.Name() == c.User.Name() {
+ creations = append(creations, c)
+ break
+ }
+ }
+ }
+ for _, m := range service.Methods {
+ if ut, ok := m.Result.Type.(expr.UserType); ok {
+ if ut.Name() == c.User.Name() {
+ creations = append(creations, c)
+ break
+ }
+ }
+ }
+ for _, t := range svc.UserTypes {
+ if c.User.Name() == t.Name {
+ creations = append(creations, c)
+ break
+ }
+ }
+ }
+ if len(conversions) == 0 && len(creations) == 0 {
+ return nil, nil
+ }
+
+ // Retrieve external packages info
+ ppm := make(map[string]string)
+ for _, c := range conversions {
+ pkgImport, alias, err := getExternalTypeInfo(c.External)
+ if err != nil {
+ return nil, err
+ }
+ ppm[pkgImport] = alias
+ }
+ for _, c := range creations {
+ pkgImport, alias, err := getExternalTypeInfo(c.External)
+ if err != nil {
+ return nil, err
+ }
+ ppm[pkgImport] = alias
+ }
+ pkgs := make([]*codegen.ImportSpec, len(ppm))
+ i := 0
+ for pp, alias := range ppm {
+ pkgs[i] = &codegen.ImportSpec{Name: alias, Path: pp}
+ i++
+ }
+
+ // Build header section
+ pkgs = append(pkgs, &codegen.ImportSpec{Path: "context"})
+ pkgs = append(pkgs, &codegen.ImportSpec{Path: "goa.design/goa"})
+ path := filepath.Join(codegen.Gendir, codegen.SnakeCase(service.Name), "convert.go")
+ sections := []*codegen.SectionTemplate{
+ codegen.Header(service.Name+" service type conversion functions", svc.PkgName, pkgs),
+ }
+
+ var (
+ names = map[string]struct{}{}
+
+ transFuncs []*codegen.TransformFunctionData
+ )
+
+ // Build conversion sections if any
+ for _, c := range conversions {
+ var dt expr.DataType
+ if err := buildDesignType(&dt, reflect.TypeOf(c.External), c.User); err != nil {
+ return nil, err
+ }
+ t := reflect.TypeOf(c.External)
+ tgtPkg := t.String()
+ tgtPkg = tgtPkg[:strings.Index(tgtPkg, ".")]
+ srcCA := TypeContext(&expr.AttributeExpr{Type: c.User}, "", svc.Scope)
+ tgtCA := codegen.NewGoContextAttr(&expr.AttributeExpr{Type: dt}, tgtPkg, svc.Scope)
+ code, tf, err := codegen.GoTransform(srcCA, tgtCA, "t", "v", "transform")
+ if err != nil {
+ return nil, err
+ }
+ transFuncs = codegen.AppendHelpers(transFuncs, tf)
+ base := "ConvertTo" + t.Name()
+ name := uniquify(base, names)
+ ref := t.String()
+ if expr.IsObject(c.User) {
+ ref = "*" + ref
+ }
+ data := ConvertData{
+ Name: name,
+ ReceiverTypeRef: srcCA.Attribute.Ref(),
+ TypeName: t.Name(),
+ TypeRef: ref,
+ Code: code,
+ }
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "convert-to",
+ Source: convertT,
+ Data: data,
+ })
+ }
+
+ // Build creation sections if any
+ for _, c := range creations {
+ var dt expr.DataType
+ if err := buildDesignType(&dt, reflect.TypeOf(c.External), c.User); err != nil {
+ return nil, err
+ }
+ t := reflect.TypeOf(c.External)
+ srcPkg := t.String()
+ srcPkg = srcPkg[:strings.Index(srcPkg, ".")]
+ srcCA := codegen.NewGoContextAttr(&expr.AttributeExpr{Type: dt}, srcPkg, svc.Scope)
+ tgtCA := TypeContext(&expr.AttributeExpr{Type: c.User}, "", svc.Scope)
+ code, tf, err := codegen.GoTransform(srcCA, tgtCA, "v", "temp", "transform")
+ if err != nil {
+ return nil, err
+ }
+ transFuncs = codegen.AppendHelpers(transFuncs, tf)
+ base := "CreateFrom" + t.Name()
+ name := uniquify(base, names)
+ ref := t.String()
+ if expr.IsObject(c.User) {
+ ref = "*" + ref
+ }
+ data := ConvertData{
+ Name: name,
+ ReceiverTypeRef: tgtCA.Attribute.Ref(),
+ TypeRef: ref,
+ Code: code,
+ }
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "create-from",
+ Source: createT,
+ Data: data,
+ })
+ }
+
+ // Build transformation helper functions section if any.
+ seen := make(map[string]struct{})
+ for _, tf := range transFuncs {
+ if _, ok := seen[tf.Name]; ok {
+ continue
+ }
+ seen[tf.Name] = struct{}{}
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "convert-create-helper",
+ Source: transformHelperT,
+ Data: tf,
+ })
+ }
+
+ return &codegen.File{Path: path, SectionTemplates: sections}, nil
+}
+
+// uniquify checks if base is a key of taken and if not returns it. Otherwise
+// uniquify appends integers to base starting at 2 and incremented by 1 each
+// time a key already exists for the value. uniquify returns the unique value
+// and updates taken with it.
+func uniquify(base string, taken map[string]struct{}) string {
+ name := base
+ idx := 2
+ _, ok := taken[name]
+ for ok {
+ name = base + strconv.Itoa(idx)
+ idx++
+ _, ok = taken[name]
+ }
+ taken[name] = struct{}{}
+ return name
+}
+
+type dtRec struct {
+ path string
+ seen map[string]expr.DataType
+}
+
+func (r dtRec) append(p string) dtRec {
+ r.path += p
+ return r
+}
+
+// buildDesignType builds a user type that represents the given external type.
+// ref is the user type the data type being built is converted to or created
+// from. It's used to compute the non-generated type field names and can be nil
+// if no matching attribute exists.
+func buildDesignType(dt *expr.DataType, t reflect.Type, ref expr.DataType, recs ...dtRec) error {
+ // check compatibility
+ if ref != nil {
+ if err := compatible(ref, t); err != nil {
+ return fmt.Errorf("%q: %s", t.Name(), err)
+ }
+ }
+
+ // handle recursive data structures
+ var rec dtRec
+ if recs != nil {
+ rec = recs[0]
+ if s, ok := rec.seen[t.Name()]; ok {
+ *dt = s
+ return nil
+ }
+ } else {
+ rec.path = ""
+ rec.seen = make(map[string]expr.DataType)
+ }
+
+ switch t.Kind() {
+ case reflect.Bool:
+ *dt = expr.Boolean
+
+ case reflect.Int:
+ *dt = expr.Int
+
+ case reflect.Int32:
+ *dt = expr.Int32
+
+ case reflect.Int64:
+ *dt = expr.Int64
+
+ case reflect.Uint:
+ *dt = expr.UInt
+
+ case reflect.Uint32:
+ *dt = expr.UInt32
+
+ case reflect.Uint64:
+ *dt = expr.UInt64
+
+ case reflect.Float32:
+ *dt = expr.Float32
+
+ case reflect.Float64:
+ *dt = expr.Float64
+
+ case reflect.String:
+ *dt = expr.String
+
+ case reflect.Slice:
+ e := t.Elem()
+ if e.Kind() == reflect.Uint8 {
+ *dt = expr.Bytes
+ return nil
+ }
+ var eref expr.DataType
+ if ref != nil {
+ eref = expr.AsArray(ref).ElemType.Type
+ }
+ var elem expr.DataType
+ if err := buildDesignType(&elem, e, eref, rec.append("[0]")); err != nil {
+ return fmt.Errorf("%s", err)
+ }
+ *dt = &expr.Array{ElemType: &expr.AttributeExpr{Type: elem}}
+
+ case reflect.Map:
+ var kref, vref expr.DataType
+ if ref != nil {
+ m := expr.AsMap(ref)
+ kref = m.KeyType.Type
+ vref = m.ElemType.Type
+ }
+ var kt expr.DataType
+ if err := buildDesignType(&kt, t.Key(), kref, rec.append(".key")); err != nil {
+ return fmt.Errorf("%s", err)
+ }
+ var vt expr.DataType
+ if err := buildDesignType(&vt, t.Elem(), vref, rec.append(".value")); err != nil {
+ return fmt.Errorf("%s", err)
+ }
+ *dt = &expr.Map{KeyType: &expr.AttributeExpr{Type: kt}, ElemType: &expr.AttributeExpr{Type: vt}}
+
+ case reflect.Struct:
+ var oref *expr.Object
+ if ref != nil {
+ oref = expr.AsObject(ref)
+ }
+
+ // Build list of fields that should not be ignored.
+ var fields []reflect.StructField
+ for i := 0; i < t.NumField(); i++ {
+ f := t.FieldByIndex([]int{i})
+ atn, _ := attributeName(oref, f.Name)
+ if oref != nil {
+ if at := oref.Attribute(atn); at != nil {
+ if m := at.Meta["struct.field.external"]; len(m) > 0 {
+ if m[0] == "-" {
+ continue
+ }
+ }
+ }
+ }
+ fields = append(fields, f)
+ }
+
+ // Avoid infinite recursions
+ obj := expr.Object(make([]*expr.NamedAttributeExpr, len(fields)))
+ ut := &expr.UserTypeExpr{
+ AttributeExpr: &expr.AttributeExpr{Type: &obj},
+ TypeName: t.Name(),
+ }
+ *dt = ut
+ rec.seen[t.Name()] = ut
+ var required []string
+ for i, f := range fields {
+ recf := rec.append("." + f.Name)
+ atn, fn := attributeName(oref, f.Name)
+ var aref expr.DataType
+ if oref != nil {
+ if at := oref.Attribute(atn); at != nil {
+ aref = at.Type
+ }
+ }
+ var fdt expr.DataType
+ if f.Type.Kind() == reflect.Ptr {
+ if err := buildDesignType(&fdt, f.Type.Elem(), aref, recf); err != nil {
+ return fmt.Errorf("%q.%s: %s", t.Name(), f.Name, err)
+ }
+ if expr.IsArray(fdt) {
+ return fmt.Errorf("%s: field of type pointer to slice are not supported, use slice instead", rec.path)
+ }
+ if expr.IsMap(fdt) {
+ return fmt.Errorf("%s: field of type pointer to map are not supported, use map instead", rec.path)
+ }
+ } else if f.Type.Kind() == reflect.Struct {
+ return fmt.Errorf("%s: fields of type struct must use pointers", recf.path)
+ } else {
+ if isPrimitive(f.Type) {
+ required = append(required, atn)
+ }
+ if err := buildDesignType(&fdt, f.Type, aref, rec.append("."+f.Name)); err != nil {
+ return fmt.Errorf("%q.%s: %s", t.Name(), f.Name, err)
+ }
+ }
+ name := atn
+ if fn != "" {
+ name = name + ":" + fn
+ }
+ obj[i] = &expr.NamedAttributeExpr{
+ Name: name,
+ Attribute: &expr.AttributeExpr{Type: fdt},
+ }
+ }
+ if len(required) > 0 {
+ ut.Validation = &expr.ValidationExpr{Required: required}
+ }
+ return nil
+
+ case reflect.Ptr:
+ rec.path = "*(" + rec.path + ")"
+ if err := buildDesignType(dt, t.Elem(), ref, rec); err != nil {
+ return err
+ }
+ if !expr.IsObject(*dt) {
+ return fmt.Errorf("%s: only pointer to struct can be converted", rec.path)
+ }
+ default:
+ *dt = expr.Any
+ }
+ return nil
+}
+
+// attributeName computes the name of the attribute for the given field name and
+// object that must contain the matching attribute.
+func attributeName(obj *expr.Object, name string) (string, string) {
+ if obj == nil {
+ return name, ""
+ }
+ // first look for a "struct.field.external" meta
+ for _, nat := range *obj {
+ if m := nat.Attribute.Meta["struct.field.external"]; len(m) > 0 {
+ if m[0] == name {
+ return nat.Name, name
+ }
+ }
+ }
+ // next look for an exact match
+ for _, nat := range *obj {
+ if nat.Name == name {
+ return name, ""
+ }
+ }
+ // next try to lower case first letter
+ ln := strings.ToLower(name[0:1]) + name[1:]
+ for _, nat := range *obj {
+ if nat.Name == ln {
+ return ln, name
+ }
+ }
+ // finally look for a snake case representation
+ sn := codegen.SnakeCase(name)
+ for _, nat := range *obj {
+ if nat.Name == sn {
+ return sn, name
+ }
+ }
+ // no match, return field name
+ return name, ""
+}
+
+// isPrimitive is true if the given kind matches a goa primitive type.
+func isPrimitive(t reflect.Type) bool {
+ switch t.Kind() {
+ case reflect.Bool:
+ fallthrough
+ case reflect.Int:
+ fallthrough
+ case reflect.Int32:
+ fallthrough
+ case reflect.Int64:
+ fallthrough
+ case reflect.Uint:
+ fallthrough
+ case reflect.Uint32:
+ fallthrough
+ case reflect.Uint64:
+ fallthrough
+ case reflect.Float32:
+ fallthrough
+ case reflect.Float64:
+ fallthrough
+ case reflect.Interface:
+ fallthrough
+ case reflect.String:
+ return true
+ case reflect.Slice:
+ e := t.Elem()
+ if e.Kind() == reflect.Uint8 {
+ return true
+ }
+ return false
+ default:
+ return false
+ }
+}
+
+type compRec struct {
+ path string
+ seen map[string]struct{}
+}
+
+func (r compRec) append(p string) compRec {
+ r.path += p
+ return r
+}
+
+// compatible checks the user and external type definitions map recursively . It
+// returns nil if they do, an error otherwise.
+func compatible(from expr.DataType, to reflect.Type, recs ...compRec) error {
+ // deference if needed
+ if to.Kind() == reflect.Ptr {
+ return compatible(from, to.Elem(), recs...)
+ }
+
+ toName := to.Name()
+ if toName == "" {
+ toName = to.Kind().String()
+ }
+
+ // handle recursive data structures
+ var rec compRec
+ if recs != nil {
+ rec = recs[0]
+ if _, ok := rec.seen[from.Hash()+"-"+toName]; ok {
+ return nil
+ }
+ } else {
+ rec = compRec{path: "", seen: make(map[string]struct{})}
+ }
+ rec.seen[from.Hash()+"-"+toName] = struct{}{}
+
+ if expr.IsArray(from) {
+ if to.Kind() != reflect.Slice {
+ return fmt.Errorf("types don't match: %s must be a slice", rec.path)
+ }
+ return compatible(
+ expr.AsArray(from).ElemType.Type,
+ to.Elem(),
+ rec.append("[0]"),
+ )
+ }
+
+ if expr.IsMap(from) {
+ if to.Kind() != reflect.Map {
+ return fmt.Errorf("types don't match: %s is not a map", rec.path)
+ }
+ if err := compatible(
+ expr.AsMap(from).ElemType.Type,
+ to.Elem(),
+ rec.append(".value"),
+ ); err != nil {
+ return err
+ }
+ return compatible(
+ expr.AsMap(from).KeyType.Type,
+ to.Key(),
+ rec.append(".key"),
+ )
+ }
+
+ if expr.IsObject(from) {
+ if to.Kind() != reflect.Struct {
+ return fmt.Errorf("types don't match: %s is a %s, expected a struct", rec.path, toName)
+ }
+ obj := expr.AsObject(from)
+ ma := expr.NewMappedAttributeExpr(&expr.AttributeExpr{Type: obj})
+ for _, nat := range *obj {
+ var (
+ fname string
+ ok bool
+ field reflect.StructField
+ )
+ {
+ if ef, k := nat.Attribute.Meta["struct.field.external"]; k {
+ fname = ef[0]
+ if fname == "-" {
+ continue
+ }
+ field, ok = to.FieldByName(ef[0])
+ } else {
+ ef := codegen.Goify(ma.ElemName(nat.Name), true)
+ fname = ef
+ field, ok = to.FieldByName(ef)
+ }
+ }
+ if !ok {
+ return fmt.Errorf("types don't match: could not find field %q of external type %q matching attribute %q of type %q",
+ fname, toName, nat.Name, from.Name())
+ }
+ err := compatible(
+ nat.Attribute.Type,
+ field.Type,
+ rec.append("."+fname),
+ )
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+ }
+
+ if isPrimitive(to) {
+ var dt expr.DataType
+ if err := buildDesignType(&dt, to, nil); err != nil {
+ return err
+ }
+ if expr.Equal(dt, from) {
+ return nil
+ }
+ }
+
+ return fmt.Errorf("types don't match: type of %s is %s but type of corresponding attribute is %s", rec.path, toName, from.Name())
+}
+
+// input: ConvertData
+const convertT = `{{ printf "%s creates an instance of %s initialized from t." .Name .TypeName | comment }}
+func (t {{ .ReceiverTypeRef }}) {{ .Name }}() {{ .TypeRef }} {
+ {{ .Code }}
+ return v
+}
+`
+
+// input: ConvertData
+const createT = `{{ printf "%s initializes t from the fields of v" .Name | comment }}
+func (t {{ .ReceiverTypeRef }}) {{ .Name }}(v {{ .TypeRef }}) {
+ {{ .Code }}
+ *t = *temp
+}
+`
+
+// input: TransformFunctionData
+const transformHelperT = `{{ printf "%s builds a value of type %s from a value of type %s." .Name .ResultTypeRef .ParamTypeRef | comment }}
+func {{ .Name }}(v {{ .ParamTypeRef }}) {{ .ResultTypeRef }} {
+ {{ .Code }}
+ return res
+}
+`
diff --git a/vendor/goa.design/goa/codegen/service/endpoint.go b/vendor/goa.design/goa/codegen/service/endpoint.go
new file mode 100644
index 000000000..84eac1311
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/service/endpoint.go
@@ -0,0 +1,347 @@
+package service
+
+import (
+ "fmt"
+ "path/filepath"
+ "strings"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+type (
+ // EndpointsData contains the data necessary to render the
+ // service endpoints struct template.
+ EndpointsData struct {
+ // Name is the service name.
+ Name string
+ // Description is the service description.
+ Description string
+ // VarName is the endpoint struct name.
+ VarName string
+ // ClientVarName is the client struct name.
+ ClientVarName string
+ // ServiceVarName is the service interface name.
+ ServiceVarName string
+ // Methods lists the endpoint struct methods.
+ Methods []*EndpointMethodData
+ // ClientInitArgs lists the arguments needed to instantiate the client.
+ ClientInitArgs string
+ // Schemes contains the security schemes types used by the
+ // method.
+ Schemes []string
+ }
+
+ // EndpointMethodData describes a single endpoint method.
+ EndpointMethodData struct {
+ *MethodData
+ // ArgName is the name of the argument used to initialize the client
+ // struct method field.
+ ArgName string
+ // ClientVarName is the corresponding client struct field name.
+ ClientVarName string
+ // ServiceName is the name of the owner service.
+ ServiceName string
+ // ServiceVarName is the name of the owner service Go interface.
+ ServiceVarName string
+ // Errors list the possible errors defined in the design if any.
+ Errors []*ErrorInitData
+ // Requirements list the security requirements that apply to the
+ // endpoint. One requirement contains a list of schemes, the
+ // incoming requests must validate at least one scheme in each
+ // requirement to be authorized.
+ Requirements []*RequirementData
+ // Schemes contains the security schemes types used by the
+ // method.
+ Schemes []string
+ }
+)
+
+const (
+ // EndpointsStructName is the name of the generated endpoints data
+ // structure.
+ EndpointsStructName = "Endpoints"
+
+ // ServiceInterfaceName is the name of the generated service interface.
+ ServiceInterfaceName = "Service"
+)
+
+// EndpointFile returns the endpoint file for the given service.
+func EndpointFile(genpkg string, service *expr.ServiceExpr) *codegen.File {
+ path := filepath.Join(codegen.Gendir, codegen.SnakeCase(service.Name), "endpoints.go")
+ svc := Services.Get(service.Name)
+ data := endpointData(service)
+ var (
+ sections []*codegen.SectionTemplate
+ )
+ {
+ header := codegen.Header(service.Name+" endpoints", svc.PkgName,
+ []*codegen.ImportSpec{
+ {Path: "context"},
+ {Path: "fmt"},
+ {Name: "goa", Path: "goa.design/goa"},
+ {Path: "goa.design/goa/security"},
+ {Path: genpkg + "/" + codegen.SnakeCase(service.Name) + "/" + "views", Name: svc.ViewsPkg},
+ })
+ def := &codegen.SectionTemplate{
+ Name: "endpoints-struct",
+ Source: serviceEndpointsT,
+ Data: data,
+ }
+ sections = []*codegen.SectionTemplate{header, def}
+ for _, m := range data.Methods {
+ if m.ServerStream != nil {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "endpoint-input-struct",
+ Source: serviceEndpointInputStructT,
+ Data: m,
+ })
+ }
+ }
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "endpoints-init",
+ Source: serviceEndpointsInitT,
+ Data: data,
+ })
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "endpoints-use",
+ Source: serviceEndpointsUseT,
+ Data: data,
+ })
+ for _, m := range data.Methods {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "endpoint-method",
+ Source: serviceEndpointMethodT,
+ Data: m,
+ FuncMap: map[string]interface{}{
+ "payloadVar": payloadVar,
+ },
+ })
+ }
+ }
+
+ return &codegen.File{Path: path, SectionTemplates: sections}
+}
+
+func endpointData(service *expr.ServiceExpr) *EndpointsData {
+ svc := Services.Get(service.Name)
+ methods := make([]*EndpointMethodData, len(svc.Methods))
+ var schemes []string
+ names := make([]string, len(svc.Methods))
+ for i, m := range svc.Methods {
+ methods[i] = &EndpointMethodData{
+ MethodData: m,
+ ArgName: codegen.Goify(m.VarName, false),
+ ServiceName: svc.Name,
+ ServiceVarName: ServiceInterfaceName,
+ ClientVarName: ClientStructName,
+ Errors: m.Errors,
+ Requirements: m.Requirements,
+ Schemes: m.Schemes,
+ }
+ names[i] = codegen.Goify(m.VarName, false)
+ for _, s := range m.Schemes {
+ found := false
+ for _, s2 := range schemes {
+ if s == s2 {
+ found = true
+ break
+ }
+ }
+ if !found {
+ schemes = append(schemes, s)
+ }
+ }
+ }
+ desc := fmt.Sprintf("%s wraps the %q service endpoints.", EndpointsStructName, service.Name)
+ return &EndpointsData{
+ Name: service.Name,
+ Description: desc,
+ VarName: EndpointsStructName,
+ ClientVarName: ClientStructName,
+ ServiceVarName: ServiceInterfaceName,
+ ClientInitArgs: strings.Join(names, ", "),
+ Methods: methods,
+ Schemes: schemes,
+ }
+}
+
+func payloadVar(e *EndpointMethodData) string {
+ if e.ServerStream != nil {
+ return "ep.Payload"
+ }
+ return "p"
+}
+
+// input: EndpointsData
+const serviceEndpointsT = `{{ comment .Description }}
+type {{ .VarName }} struct {
+{{- range .Methods}}
+ {{ .VarName }} goa.Endpoint
+{{- end }}
+}
+`
+
+// input: EndpointsData
+const serviceEndpointsInitT = `{{ printf "New%s wraps the methods of the %q service with endpoints." .VarName .Name | comment }}
+func New{{ .VarName }}(s {{ .ServiceVarName }}) *{{ .VarName }} {
+{{- if .Schemes }}
+ // Casting service to Auther interface
+ a := s.(Auther)
+{{- end }}
+ return &{{ .VarName }}{
+{{- range .Methods }}
+ {{ .VarName }}: New{{ .VarName }}Endpoint(s{{ range .Schemes }}, a.{{ . }}Auth{{ end }}),
+{{- end }}
+ }
+}
+`
+
+// input: EndpointMethodData
+const serviceEndpointInputStructT = `{{ printf "%s is the input type of %q endpoint that holds the method payload and the server stream." .ServerStream.EndpointStruct .Name | comment }}
+type {{ .ServerStream.EndpointStruct }} struct {
+{{- if .PayloadRef }}
+ {{ comment "Payload is the method payload." }}
+ Payload {{ .PayloadRef }}
+{{- end }}
+ {{ printf "Stream is the server stream used by the %q method to send data." .Name | comment }}
+ Stream {{ .ServerStream.Interface }}
+}
+`
+
+// input: EndpointMethodData
+const serviceEndpointMethodT = `{{ printf "New%sEndpoint returns an endpoint function that calls the method %q of service %q." .VarName .Name .ServiceName | comment }}
+func New{{ .VarName }}Endpoint(s {{ .ServiceVarName }}{{ range .Schemes }}, auth{{ . }}Fn security.Auth{{ . }}Func{{ end }}) goa.Endpoint {
+ return func(ctx context.Context, req interface{}) (interface{}, error) {
+{{- if .ServerStream }}
+ ep := req.(*{{ .ServerStream.EndpointStruct }})
+{{- else if .PayloadRef }}
+ p := req.({{ .PayloadRef }})
+{{- end }}
+{{- $payload := payloadVar . }}
+{{- if .Requirements }}
+ var err error
+ {{- range $ridx, $r := .Requirements }}
+ {{- if ne $ridx 0 }}
+ if err != nil {
+ {{- end }}
+ {{- range $sidx, $s := .Schemes }}
+ {{- if ne $sidx 0 }}
+ if err == nil {
+ {{- end }}
+ {{- if eq .Type "Basic" }}
+ sc := security.BasicScheme{
+ Name: {{ printf "%q" .SchemeName }},
+ }
+ {{- if .UsernamePointer }}
+ var user string
+ if {{ $payload }}.{{ .UsernameField }} != nil {
+ user = *{{ $payload }}.{{ .UsernameField }}
+ }
+ {{- end }}
+ {{- if .PasswordPointer }}
+ var pass string
+ if {{ $payload }}.{{ .PasswordField }} != nil {
+ pass = *{{ $payload }}.{{ .PasswordField }}
+ }
+ {{- end }}
+ ctx, err = auth{{ .Type }}Fn(ctx, {{ if .UsernamePointer }}user{{ else }}{{ $payload }}.{{ .UsernameField }}{{ end }},
+ {{- if .PasswordPointer }}pass{{ else }}{{ $payload }}.{{ .PasswordField }}{{ end }}, &sc)
+
+ {{- else if eq .Type "APIKey" }}
+ sc := security.APIKeyScheme{
+ Name: {{ printf "%q" .SchemeName }},
+ }
+ {{- if $s.CredPointer }}
+ var key string
+ if {{ $payload }}.{{ $s.CredField }} != nil {
+ key = *{{ $payload }}.{{ $s.CredField }}
+ }
+ {{- end }}
+ ctx, err = auth{{ .Type }}Fn(ctx, {{ if $s.CredPointer }}key{{ else }}{{ $payload }}.{{ $s.CredField }}{{ end }}, &sc)
+
+ {{- else if eq .Type "JWT" }}
+ sc := security.JWTScheme{
+ Name: {{ printf "%q" .SchemeName }},
+ Scopes: []string{ {{- range .Scopes }}{{ printf "%q" . }}, {{ end }} },
+ RequiredScopes: []string{ {{- range $r.Scopes }}{{ printf "%q" . }}, {{ end }} },
+ }
+ {{- if $s.CredPointer }}
+ var token string
+ if {{ $payload }}.{{ $s.CredField }} != nil {
+ token = *{{ $payload }}.{{ $s.CredField }}
+ }
+ {{- end }}
+ ctx, err = auth{{ .Type }}Fn(ctx, {{ if $s.CredPointer }}token{{ else }}{{ $payload }}.{{ $s.CredField }}{{ end }}, &sc)
+
+ {{- else if eq .Type "OAuth2" }}
+ sc := security.OAuth2Scheme{
+ Name: {{ printf "%q" .SchemeName }},
+ Scopes: []string{ {{- range .Scopes }}{{ printf "%q" . }}, {{ end }} },
+ RequiredScopes: []string{ {{- range $r.Scopes }}{{ printf "%q" . }}, {{ end }} },
+ {{- if .Flows }}
+ Flows: []*security.OAuthFlow{
+ {{- range .Flows }}
+ &security.OAuthFlow{
+ Type: "{{ .Type }}",
+ {{- if .AuthorizationURL }}
+ AuthorizationURL: {{ printf "%q" .AuthorizationURL }},
+ {{- end }}
+ {{- if .TokenURL }}
+ TokenURL: {{ printf "%q" .TokenURL }},
+ {{- end }}
+ {{- if .RefreshURL }}
+ RefreshURL: {{ printf "%q" .RefreshURL }},
+ {{- end }}
+ },
+ {{- end }}
+ },
+ {{- end }}
+ }
+ {{- if $s.CredPointer }}
+ var token string
+ if {{ $payload }}.{{ $s.CredField }} != nil {
+ token = *{{ $payload }}.{{ $s.CredField }}
+ }
+ {{- end }}
+ ctx, err = auth{{ .Type }}Fn(ctx, {{ if $s.CredPointer }}token{{ else }}{{ $payload }}.{{ $s.CredField }}{{ end }}, &sc)
+
+ {{- end }}
+ {{- if ne $sidx 0 }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- if ne $ridx 0 }}
+ }
+ {{- end }}
+ {{- end }}
+ if err != nil {
+ return nil, err
+ }
+{{- end }}
+{{- if .ServerStream }}
+ return nil, s.{{ .VarName }}(ctx, {{ if .PayloadRef }}{{ $payload }}, {{ end }}ep.Stream)
+{{- else if .ViewedResult }}
+ res,{{ if not .ViewedResult.ViewName }} view,{{ end }} err := s.{{ .VarName }}(ctx{{ if .PayloadRef }}, {{ $payload }}{{ end }})
+ if err != nil {
+ return nil, err
+ }
+ vres := {{ $.ViewedResult.Init.Name }}(res, {{ if .ViewedResult.ViewName }}{{ printf "%q" .ViewedResult.ViewName }}{{ else }}view{{ end }})
+ return vres, nil
+{{- else if .ResultRef }}
+ return s.{{ .VarName }}(ctx{{ if .PayloadRef }}, {{ $payload }}{{ end }})
+{{- else }}
+ return {{ if not .ResultRef }}nil, {{ end }}s.{{ .VarName }}(ctx{{ if .PayloadRef }}, {{ $payload }}{{ end }})
+{{- end }}
+ }
+}
+`
+
+// input: EndpointMethodData
+const serviceEndpointsUseT = `{{ printf "Use applies the given middleware to all the %q service endpoints." .Name | comment }}
+func (e *{{ .VarName }}) Use(m func(goa.Endpoint) goa.Endpoint) {
+{{- range .Methods }}
+ e.{{ .VarName }} = m(e.{{ .VarName }})
+{{- end }}
+}
+`
diff --git a/vendor/goa.design/goa/codegen/service/example_svc.go b/vendor/goa.design/goa/codegen/service/example_svc.go
new file mode 100644
index 000000000..6aa2110a4
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/service/example_svc.go
@@ -0,0 +1,143 @@
+package service
+
+import (
+ "os"
+ "path/filepath"
+ "strings"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+type (
+ // basicEndpointData contains the data needed to render a basic endpoint
+ // implementation in the example service file.
+ basicEndpointData struct {
+ *MethodData
+ // ServiceVarName is the service variable name.
+ ServiceVarName string
+ // PayloadFullRef is the fully qualified reference to the payload.
+ PayloadFullRef string
+ // ResultFullName is the fully qualified name of the result.
+ ResultFullName string
+ // ResultFullRef is the fully qualified reference to the result.
+ ResultFullRef string
+ // ResultIsStruct indicates that the result type is a struct.
+ ResultIsStruct bool
+ // ResultView is the view to render the result. It is set only if the
+ // result type uses views.
+ ResultView string
+ }
+)
+
+// ExampleServiceFiles returns a basic service implementation for every
+// service expression.
+func ExampleServiceFiles(genpkg string, root *expr.RootExpr) []*codegen.File {
+ var fw []*codegen.File
+ for _, svc := range root.Services {
+ if f := exampleServiceFile(genpkg, root, svc); f != nil {
+ fw = append(fw, f)
+ }
+ }
+ return fw
+}
+
+// exampleServiceFile returns a basic implementation of the given service.
+func exampleServiceFile(genpkg string, root *expr.RootExpr, svc *expr.ServiceExpr) *codegen.File {
+ path := codegen.SnakeCase(svc.Name) + ".go"
+ if _, err := os.Stat(path); !os.IsNotExist(err) {
+ return nil // file already exists, skip it.
+ }
+ data := Services.Get(svc.Name)
+ apiPkg := strings.ToLower(codegen.Goify(root.API.Name, false))
+ sections := []*codegen.SectionTemplate{
+ codegen.Header("", apiPkg, []*codegen.ImportSpec{
+ {Path: "context"},
+ {Path: "log"},
+ {Path: filepath.Join(genpkg, codegen.SnakeCase(svc.Name)), Name: data.PkgName},
+ }),
+ {Name: "basic-service-struct", Source: svcStructT, Data: data},
+ {Name: "basic-service-init", Source: svcInitT, Data: data},
+ }
+ for _, m := range svc.Methods {
+ sections = append(sections, basicEndpointSection(m, data))
+ }
+
+ return &codegen.File{
+ Path: path,
+ SectionTemplates: sections,
+ SkipExist: true,
+ }
+}
+
+// basicEndpointSection returns a section with a basic implementation for the
+// given method.
+func basicEndpointSection(m *expr.MethodExpr, svcData *Data) *codegen.SectionTemplate {
+ md := svcData.Method(m.Name)
+ ed := &basicEndpointData{
+ MethodData: md,
+ ServiceVarName: svcData.VarName,
+ }
+ if m.Payload.Type != expr.Empty {
+ ed.PayloadFullRef = svcData.Scope.GoFullTypeRef(m.Payload, svcData.PkgName)
+ }
+ if m.Result.Type != expr.Empty {
+ ed.ResultFullName = svcData.Scope.GoFullTypeName(m.Result, svcData.PkgName)
+ ed.ResultFullRef = svcData.Scope.GoFullTypeRef(m.Result, svcData.PkgName)
+ ed.ResultIsStruct = expr.IsObject(m.Result.Type)
+ if md.ViewedResult != nil {
+ view := "default"
+ if m.Result.Meta != nil {
+ if v, ok := m.Result.Meta["view"]; ok {
+ view = v[0]
+ }
+ }
+ ed.ResultView = view
+ }
+ }
+ return &codegen.SectionTemplate{
+ Name: "basic-endpoint",
+ Source: endpointT,
+ Data: ed,
+ }
+}
+
+const (
+ // input: service.Data
+ svcStructT = `{{ printf "%s service example implementation.\nThe example methods log the requests and return zero values." .Name | comment }}
+type {{ .VarName }}srvc struct {
+ logger *log.Logger
+}
+`
+
+ // input: service.Data
+ svcInitT = `{{ printf "New%s returns the %s service implementation." .StructName .Name | comment }}
+func New{{ .StructName }}(logger *log.Logger) {{ .PkgName }}.Service {
+ return &{{ .VarName }}srvc{logger}
+}
+`
+
+ // input: basicEndpointData
+ endpointT = `{{ comment .Description }}
+{{- if .ServerStream }}
+func (s *{{ .ServiceVarName }}srvc) {{ .VarName }}(ctx context.Context{{ if .PayloadFullRef }}, p {{ .PayloadFullRef }}{{ end }}, stream {{ .ServerStream.Interface }}) (err error) {
+{{- else }}
+func (s *{{ .ServiceVarName }}srvc) {{ .VarName }}(ctx context.Context{{ if .PayloadFullRef }}, p {{ .PayloadFullRef }}{{ end }}) ({{ if .ResultFullRef }}res {{ .ResultFullRef }}, {{ if .ViewedResult }}{{ if not .ViewedResult.ViewName }}view string, {{ end }}{{ end }} {{ end }}err error) {
+{{- end }}
+{{- if and (and .ResultFullRef .ResultIsStruct) (not .ServerStream) }}
+ res = &{{ .ResultFullName }}{}
+{{- end }}
+{{- if .ViewedResult }}
+ {{- if not .ViewedResult.ViewName }}
+ {{- if .ServerStream }}
+ stream.SetView({{ printf "%q" .ResultView }})
+ {{- else }}
+ view = {{ printf "%q" .ResultView }}
+ {{- end }}
+ {{- end }}
+{{- end }}
+ s.logger.Print("{{ .ServiceVarName }}.{{ .Name }}")
+ return
+}
+`
+)
diff --git a/vendor/goa.design/goa/codegen/service/service.go b/vendor/goa.design/goa/codegen/service/service.go
new file mode 100644
index 000000000..8a8ab5cac
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/service/service.go
@@ -0,0 +1,310 @@
+package service
+
+import (
+ "fmt"
+ "path/filepath"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+// File returns the service file for the given service.
+func File(genpkg string, service *expr.ServiceExpr) *codegen.File {
+ path := filepath.Join(codegen.Gendir, codegen.SnakeCase(service.Name), "service.go")
+ svc := Services.Get(service.Name)
+ header := codegen.Header(
+ service.Name+" service",
+ svc.PkgName,
+ []*codegen.ImportSpec{
+ {Path: "context"},
+ {Path: "goa.design/goa"},
+ {Path: "goa.design/goa/security"},
+ {Path: genpkg + "/" + codegen.SnakeCase(service.Name) + "/" + "views", Name: svc.ViewsPkg},
+ })
+ def := &codegen.SectionTemplate{
+ Name: "service",
+ Source: serviceT,
+ Data: svc,
+ FuncMap: map[string]interface{}{
+ "streamInterfaceFor": streamInterfaceFor,
+ },
+ }
+
+ sections := []*codegen.SectionTemplate{header, def}
+ seen := make(map[string]struct{})
+
+ for _, m := range svc.Methods {
+ if m.PayloadDef != "" {
+ if _, ok := seen[m.Payload]; !ok {
+ seen[m.Payload] = struct{}{}
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "service-payload",
+ Source: payloadT,
+ Data: m,
+ })
+ }
+ }
+ if m.StreamingPayloadDef != "" {
+ if _, ok := seen[m.StreamingPayload]; !ok {
+ seen[m.StreamingPayload] = struct{}{}
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "service-streamig-payload",
+ Source: streamingPayloadT,
+ Data: m,
+ })
+ }
+ }
+ if m.ResultDef != "" {
+ if _, ok := seen[m.Result]; !ok {
+ seen[m.Result] = struct{}{}
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "service-result",
+ Source: resultT,
+ Data: m,
+ })
+ }
+ }
+ }
+
+ for _, ut := range svc.UserTypes {
+ if _, ok := seen[ut.Name]; !ok {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "service-user-type",
+ Source: userTypeT,
+ Data: ut,
+ })
+ }
+ }
+
+ var errorTypes []*UserTypeData
+ for _, et := range svc.ErrorTypes {
+ if et.Type == expr.ErrorResult {
+ continue
+ }
+ if _, ok := seen[et.Name]; !ok {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "error-user-type",
+ Source: userTypeT,
+ Data: et,
+ })
+ errorTypes = append(errorTypes, et)
+ }
+ }
+
+ for _, et := range errorTypes {
+ if et.Type == expr.ErrorResult {
+ continue
+ }
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "service-error",
+ Source: errorT,
+ FuncMap: map[string]interface{}{"errorName": errorName},
+ Data: et,
+ })
+ }
+ for _, er := range svc.ErrorInits {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "error-init-func",
+ Source: errorInitT,
+ Data: er,
+ })
+ }
+
+ // transform result type functions
+ for _, t := range svc.ViewedResultTypes {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "viewed-result-type-to-service-result-type",
+ Source: typeInitT,
+ Data: t.ResultInit,
+ })
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "service-result-type-to-viewed-result-type",
+ Source: typeInitT,
+ Data: t.Init,
+ })
+ }
+ var projh []*codegen.TransformFunctionData
+ for _, t := range svc.ProjectedTypes {
+ for _, i := range t.TypeInits {
+ projh = codegen.AppendHelpers(projh, i.Helpers)
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "projected-type-to-service-type",
+ Source: typeInitT,
+ Data: i,
+ })
+ }
+ for _, i := range t.Projections {
+ projh = codegen.AppendHelpers(projh, i.Helpers)
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "service-type-to-projected-type",
+ Source: typeInitT,
+ Data: i,
+ })
+ }
+ }
+
+ for _, h := range projh {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "transform-helpers",
+ Source: transformHelperT,
+ Data: h,
+ })
+ }
+
+ return &codegen.File{Path: path, SectionTemplates: sections}
+}
+
+func errorName(et *UserTypeData) string {
+ obj := expr.AsObject(et.Type)
+ if obj != nil {
+ for _, att := range *obj {
+ if _, ok := att.Attribute.Meta["struct:error:name"]; ok {
+ return fmt.Sprintf("e.%s", codegen.Goify(att.Name, true))
+ }
+ }
+ }
+ return fmt.Sprintf("%q", et.Name)
+}
+
+// streamInterfaceFor builds the data to generate the client and server stream
+// interfaces for the given endpoint.
+func streamInterfaceFor(typ string, m *MethodData, stream *StreamData) map[string]interface{} {
+ return map[string]interface{}{
+ "Type": typ,
+ "Endpoint": m.Name,
+ "Stream": stream,
+ // If a view is explicitly set (ViewName is not empty) in the Result
+ // expression, we can use that view to render the result type instead
+ // of iterating through the list of views defined in the result type.
+ "IsViewedResult": m.ViewedResult != nil && m.ViewedResult.ViewName == "",
+ }
+}
+
+// serviceT is the template used to write an service definition.
+const serviceT = `
+{{ comment .Description }}
+type Service interface {
+{{- range .Methods }}
+ {{ comment .Description }}
+ {{- if .ViewedResult }}
+ {{- if not .ViewedResult.ViewName }}
+ {{ comment "The \"view\" return value must have one of the following views" }}
+ {{- range .ViewedResult.Views }}
+ {{- if .Description }}
+ {{ printf "// - %q: %s" .Name .Description }}
+ {{- else }}
+ {{ printf "// - %q" .Name }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- if .ServerStream }}
+ {{ .VarName }}(context.Context{{ if .Payload }}, {{ .PayloadRef }}{{ end }}, {{ .ServerStream.Interface }}) (err error)
+ {{- else }}
+ {{ .VarName }}(context.Context{{ if .Payload }}, {{ .PayloadRef }}{{ end }}) ({{ if .Result }}res {{ .ResultRef }}, {{ if .ViewedResult }}{{ if not .ViewedResult.ViewName }}view string, {{ end }}{{ end }}{{ end }}err error)
+ {{- end }}
+{{- end }}
+}
+
+{{- if .Schemes }}
+// Auther defines the authorization functions to be implemented by the service.
+type Auther interface {
+ {{- range .Schemes }}
+ {{ printf "%sAuth implements the authorization logic for the %s security scheme." .Type .Type | comment }}
+ {{ .Type }}Auth(ctx context.Context, {{ if eq .Type "Basic" }}user, pass{{ else if eq .Type "APIKey" }}key{{ else }}token{{ end }} string, schema *security.{{ .Type }}Scheme) (context.Context, error)
+ {{- end }}
+}
+{{- end }}
+
+// ServiceName is the name of the service as defined in the design. This is the
+// same value that is set in the endpoint request contexts under the ServiceKey
+// key.
+const ServiceName = {{ printf "%q" .Name }}
+
+// MethodNames lists the service method names as defined in the design. These
+// are the same values that are set in the endpoint request contexts under the
+// MethodKey key.
+var MethodNames = [{{ len .Methods }}]string{ {{ range .Methods }}{{ printf "%q" .Name }}, {{ end }} }
+{{- range .Methods }}
+ {{- if .ServerStream }}
+ {{ template "stream_interface" (streamInterfaceFor "server" . .ServerStream) }}
+ {{ template "stream_interface" (streamInterfaceFor "client" . .ClientStream) }}
+ {{- end }}
+{{- end }}
+
+{{- define "stream_interface" }}
+{{ printf "%s is the interface a %q endpoint %s stream must satisfy." .Stream.Interface .Endpoint .Type | comment }}
+type {{ .Stream.Interface }} interface {
+ {{- if .Stream.SendTypeRef }}
+ {{ comment .Stream.SendDesc }}
+ {{ .Stream.SendName }}({{ .Stream.SendTypeRef }}) error
+ {{- end }}
+ {{- if .Stream.RecvTypeRef }}
+ {{ comment .Stream.RecvDesc }}
+ {{ .Stream.RecvName }}() ({{ .Stream.RecvTypeRef }}, error)
+ {{- end }}
+ {{- if .Stream.MustClose }}
+ {{ comment "Close closes the stream." }}
+ Close() error
+ {{- end }}
+ {{- if and .IsViewedResult (eq .Type "server") }}
+ {{ comment "SetView sets the view used to render the result before streaming." }}
+ SetView(view string)
+ {{- end }}
+}
+{{- end }}
+`
+
+const payloadT = `{{ comment .PayloadDesc }}
+type {{ .Payload }} {{ .PayloadDef }}
+`
+
+const streamingPayloadT = `{{ comment .StreamingPayloadDesc }}
+type {{ .StreamingPayload }} {{ .StreamingPayloadDef }}
+`
+
+const resultT = `{{ comment .ResultDesc }}
+type {{ .Result }} {{ .ResultDef }}
+`
+
+const userTypeT = `{{ comment .Description }}
+type {{ .VarName }} {{ .Def }}
+`
+
+const errorT = `// Error returns an error description.
+func (e {{ .Ref }}) Error() string {
+ return {{ printf "%q" .Description }}
+}
+
+// ErrorName returns {{ printf "%q" .Name }}.
+func (e {{ .Ref }}) ErrorName() string {
+ return {{ errorName . }}
+}
+`
+
+// input: map[string]{"Type": TypeData, "Error": ErrorData}
+const errorInitT = `{{ printf "%s builds a %s from an error." .Name .TypeName | comment }}
+func {{ .Name }}(err error) {{ .TypeRef }} {
+ return &{{ .TypeName }}{
+ Name: {{ printf "%q" .ErrName }},
+ ID: goa.NewErrorID(),
+ Message: err.Error(),
+ {{- if .Temporary }}
+ Temporary: true,
+ {{- end }}
+ {{- if .Timeout }}
+ Timeout: true,
+ {{- end }}
+ {{- if .Fault }}
+ Fault: true,
+ {{- end }}
+ }
+}
+`
+
+// input: InitData
+const typeInitT = `{{ comment .Description }}
+func {{ .Name }}({{ range .Args }}{{ .Name }} {{ .Ref }}, {{ end }}) {{ .ReturnTypeRef }} {
+ {{ .Code }}
+}
+`
diff --git a/vendor/goa.design/goa/codegen/service/service_data.go b/vendor/goa.design/goa/codegen/service/service_data.go
new file mode 100644
index 000000000..9fca69ff5
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/service/service_data.go
@@ -0,0 +1,1609 @@
+package service
+
+import (
+ "bytes"
+ "fmt"
+ "strings"
+ "text/template"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+// Services holds the data computed from the design needed to generate the code
+// of the services.
+var Services = make(ServicesData)
+
+var (
+ // initTypeTmpl is the template used to render the code that initializes a
+ // projected type or viewed result type or a result type.
+ initTypeCodeTmpl = template.Must(template.New("initTypeCode").Funcs(template.FuncMap{"goify": codegen.Goify}).Parse(initTypeCodeT))
+ // validateTypeCodeTmpl is the template used to render the code to
+ // validate a projected type or a viewed result type.
+ validateTypeCodeTmpl = template.Must(template.New("validateType").Funcs(template.FuncMap{"goify": codegen.Goify}).Parse(validateTypeT))
+)
+
+type (
+ // ServicesData encapsulates the data computed from the service designs.
+ ServicesData map[string]*Data
+
+ // Data contains the data used to render the code related to a
+ // single service.
+ Data struct {
+ // Name is the service name.
+ Name string
+ // Description is the service description.
+ Description string
+ // StructName is the service struct name.
+ StructName string
+ // VarName is the service variable name (first letter in
+ // lowercase).
+ VarName string
+ // PkgName is the name of the package containing the generated
+ // service code.
+ PkgName string
+ // ViewsPkg is the name of the package containing the view types.
+ ViewsPkg string
+ // Methods lists the service interface methods.
+ Methods []*MethodData
+ // Schemes is the list of security schemes required by the
+ // service methods.
+ Schemes []*SchemeData
+ // UserTypes lists the type definitions that the service
+ // depends on.
+ UserTypes []*UserTypeData
+ // ErrorTypes lists the error type definitions that the service
+ // depends on.
+ ErrorTypes []*UserTypeData
+ // Errors list the information required to generate error init
+ // functions.
+ ErrorInits []*ErrorInitData
+ // ProjectedTypes lists the types which uses pointers for all fields to
+ // define view specific validation logic.
+ ProjectedTypes []*ProjectedTypeData
+ // ViewedResultTypes lists all the viewed method result types.
+ ViewedResultTypes []*ViewedResultTypeData
+ // Scope initialized with all the service types.
+ Scope *codegen.NameScope
+ // ViewScope initialized with all the viewed types.
+ ViewScope *codegen.NameScope
+ }
+
+ // ErrorInitData describes an error returned by a service method of type
+ // ErrorResult.
+ ErrorInitData struct {
+ // Name is the name of the init function.
+ Name string
+ // Description is the error description.
+ Description string
+ // ErrName is the name of the error.
+ ErrName string
+ // TypeName is the error struct type name.
+ TypeName string
+ // TypeRef is the reference to the error type.
+ TypeRef string
+ // Temporary indicates whether the error is temporary.
+ Temporary bool
+ // Timeout indicates whether the error is due to timeouts.
+ Timeout bool
+ // Fault indicates whether the error is server-side fault.
+ Fault bool
+ }
+
+ // MethodData describes a single service method.
+ MethodData struct {
+ // Name is the method name.
+ Name string
+ // Description is the method description.
+ Description string
+ // VarName is the Go method name.
+ VarName string
+ // Payload is the name of the payload type if any,
+ Payload string
+ // PayloadDef is the payload type definition if any.
+ PayloadDef string
+ // PayloadRef is a reference to the payload type if any,
+ PayloadRef string
+ // PayloadDesc is the payload type description if any.
+ PayloadDesc string
+ // PayloadEx is an example of a valid payload value.
+ PayloadEx interface{}
+ // StreamingPayload is the name of the streaming payload type if any.
+ StreamingPayload string
+ // StreamingPayloadDef is the streaming payload type definition if any.
+ StreamingPayloadDef string
+ // StreamingPayloadRef is a reference to the streaming payload type if any.
+ StreamingPayloadRef string
+ // StreamingPayloadDesc is the streaming payload type description if any.
+ StreamingPayloadDesc string
+ // StreamingPayloadEx is an example of a valid streaming payload value.
+ StreamingPayloadEx interface{}
+ // Result is the name of the result type if any.
+ Result string
+ // ResultDef is the result type definition if any.
+ ResultDef string
+ // ResultRef is the reference to the result type if any.
+ ResultRef string
+ // ResultDesc is the result type description if any.
+ ResultDesc string
+ // ResultEx is an example of a valid result value.
+ ResultEx interface{}
+ // Errors list the possible errors defined in the design if any.
+ Errors []*ErrorInitData
+ // Requirements contains the security requirements for the
+ // method.
+ Requirements []*RequirementData
+ // Schemes contains the security schemes types used by the
+ // method.
+ Schemes []string
+ // ViewedResult contains the data required to generate the code handling
+ // views if any.
+ ViewedResult *ViewedResultTypeData
+ // ServerStream indicates that the service method receives a payload
+ // stream or sends a result stream or both.
+ ServerStream *StreamData
+ // ClientStream indicates that the service method receives a result
+ // stream or sends a payload result or both.
+ ClientStream *StreamData
+ // StreamKind is the kind of the stream (payload or result or bidirectional).
+ StreamKind expr.StreamKind
+ }
+
+ // StreamData is the data used to generate client and server interfaces that
+ // a streaming endpoint implements. It is initialized if a method defines a
+ // streaming payload or result or both.
+ StreamData struct {
+ // Interface is the name of the stream interface.
+ Interface string
+ // VarName is the name of the struct type that implements the stream
+ // interface.
+ VarName string
+ // SendName is the name of the send function.
+ SendName string
+ // SendDesc is the description for the send function.
+ SendDesc string
+ // SendTypeName is the type name sent through the stream.
+ SendTypeName string
+ // SendTypeRef is the reference to the type sent through the stream.
+ SendTypeRef string
+ // RecvName is the name of the receive function.
+ RecvName string
+ // RecvDesc is the description for the recv function.
+ RecvDesc string
+ // RecvTypeName is the type name received from the stream.
+ RecvTypeName string
+ // RecvTypeRef is the reference to the type received from the stream.
+ RecvTypeRef string
+ // MustClose indicates whether the stream should implement the Close()
+ // function.
+ MustClose bool
+ // EndpointStruct is the name of the endpoint struct that holds a payload
+ // reference (if any) and the endpoint server stream. It is set only if the
+ // client sends a normal payload and server streams a result.
+ EndpointStruct string
+ // Kind is the kind of the stream (payload or result or bidirectional).
+ Kind expr.StreamKind
+ }
+
+ // RequirementData lists the schemes and scopes defined by a single
+ // security requirement.
+ RequirementData struct {
+ // Schemes list the requirement schemes.
+ Schemes []*SchemeData
+ // Scopes list the required scopes.
+ Scopes []string
+ }
+
+ // UserTypeData contains the data describing a data type.
+ UserTypeData struct {
+ // Name is the type name.
+ Name string
+ // VarName is the corresponding Go type name.
+ VarName string
+ // Description is the type human description.
+ Description string
+ // Def is the type definition Go code.
+ Def string
+ // Ref is the reference to the type.
+ Ref string
+ // Type is the underlying type.
+ Type expr.UserType
+ }
+
+ // SchemeData describes a single security scheme.
+ SchemeData struct {
+ // Kind is the type of scheme, one of "Basic", "APIKey", "JWT"
+ // or "OAuth2".
+ Type string
+ // SchemeName is the name of the scheme.
+ SchemeName string
+ // Name refers to a header or parameter name, based on In's
+ // value.
+ Name string
+ // UsernameField is the name of the payload field that should be
+ // initialized with the basic auth username if any.
+ UsernameField string
+ // UsernamePointer is true if the username field is a pointer.
+ UsernamePointer bool
+ // UsernameAttr is the name of the attribute that contains the
+ // username.
+ UsernameAttr string
+ // UsernameRequired specifies whether the attribute that
+ // contains the username is required.
+ UsernameRequired bool
+ // PasswordField is the name of the payload field that should be
+ // initialized with the basic auth password if any.
+ PasswordField string
+ // PasswordPointer is true if the password field is a pointer.
+ PasswordPointer bool
+ // PasswordAttr is the name of the attribute that contains the
+ // password.
+ PasswordAttr string
+ // PasswordRequired specifies whether the attribute that
+ // contains the password is required.
+ PasswordRequired bool
+ // CredField contains the name of the payload field that should
+ // be initialized with the API key, the JWT token or the OAuth2
+ // access token.
+ CredField string
+ // CredPointer is true if the credential field is a pointer.
+ CredPointer bool
+ // CredRequired specifies if the key is a required attribute.
+ CredRequired bool
+ // KeyAttr is the name of the attribute that contains
+ // the security tag (for APIKey, OAuth2, and JWT schemes).
+ KeyAttr string
+ // Scopes lists the scopes that apply to the scheme.
+ Scopes []string
+ // Flows describes the OAuth2 flows.
+ Flows []*expr.FlowExpr
+ // In indicates the request element that holds the credential.
+ In string
+ }
+
+ // ViewedResultTypeData contains the data used to generate a viewed result type
+ // (i.e. a method result type with more than one view). The viewed result
+ // type holds the projected type and a view based on which it creates the
+ // projected type. It also contains the code to validate the viewed result
+ // type and the functions to initialize a viewed result type from a result
+ // type and vice versa.
+ ViewedResultTypeData struct {
+ // the viewed result type
+ *UserTypeData
+ // Views lists the views defined on the viewed result type.
+ Views []*ViewData
+ // Validate is the validation run on the viewed result type.
+ Validate *ValidateData
+ // Init is the constructor code to initialize a viewed result type from
+ // a result type.
+ Init *InitData
+ // ResultInit is the constructor code to initialize a result type
+ // from the viewed result type.
+ ResultInit *InitData
+ // FullName is the fully qualified name of the viewed result type.
+ FullName string
+ // FullRef is the complete reference to the viewed result type
+ // (including views package name).
+ FullRef string
+ // IsCollection indicates whether the viewed result type is a collection.
+ IsCollection bool
+ // ViewName is the view name to use to render the result type. It is set
+ // only if the result type has at most one view.
+ ViewName string
+ // ViewsPkg is the views package name.
+ ViewsPkg string
+ }
+
+ // ViewData contains data about a result type view.
+ ViewData struct {
+ // Name is the view name.
+ Name string
+ // Description is the view description.
+ Description string
+ }
+
+ // ProjectedTypeData contains the data used to generate a projected type for
+ // the corresponding user type or result type in the service package. The
+ // generated type uses pointers for all fields. It also contains the data
+ // to generate view-based validation logic and transformation functions to
+ // convert a projected type to its corresponding service type and vice versa.
+ ProjectedTypeData struct {
+ // the projected type
+ *UserTypeData
+ // Validations lists the validation functions to run on the projected type.
+ // If the projected type corresponds to a result type then a validation
+ // function for each view is generated. For user types, only one validation
+ // function is generated.
+ Validations []*ValidateData
+ // Projections contains the code to create a projected type based on
+ // views. If the projected type corresponds to a result type, then a
+ // function for each view is generated.
+ Projections []*InitData
+ // TypeInits contains the code to convert a projected type to its
+ // corresponding service type. If the projected type corresponds to a
+ // result type, then a function for each view is generated.
+ TypeInits []*InitData
+ // ViewsPkg is the views package name.
+ ViewsPkg string
+ }
+
+ // InitData contains the data to render a constructor.
+ InitData struct {
+ // Name is the name of the constructor function.
+ Name string
+ // Description is the function description.
+ Description string
+ // Args lists arguments to this function.
+ Args []*InitArgData
+ // ReturnTypeRef is the reference to the return type.
+ ReturnTypeRef string
+ // Code is the transformation code.
+ Code string
+ // Helpers contain the helpers used in the transformation code.
+ Helpers []*codegen.TransformFunctionData
+ }
+
+ // InitArgData represents a single constructor argument.
+ InitArgData struct {
+ // Name is the argument name.
+ Name string
+ // Ref is the reference to the argument type.
+ Ref string
+ }
+
+ // ValidateData contains data to render a validate function.
+ ValidateData struct {
+ // Name is the validation function name.
+ Name string
+ // Ref is the reference to the type on which the validation function
+ // is defined.
+ Ref string
+ // Description is the description for the validation function.
+ Description string
+ // Validate is the validation code.
+ Validate string
+ }
+)
+
+// TypeContext returns a contextual attribute for service types.
+// Service types are Go types and uses non-pointers to hold attributes
+// having default values.
+func TypeContext(att *expr.AttributeExpr, pkg string, scope *codegen.NameScope) *codegen.ContextualAttribute {
+ return &codegen.ContextualAttribute{
+ Attribute: codegen.NewGoAttribute(att, pkg, scope),
+ Required: true,
+ UseDefault: true,
+ }
+}
+
+// ProjectedTypeContext returns a contextual attribute for a projected type.
+// Projected types are Go types that uses pointers for all attributes
+// (even the required ones).
+func ProjectedTypeContext(att *expr.AttributeExpr, pkg string, scope *codegen.NameScope) *codegen.ContextualAttribute {
+ return &codegen.ContextualAttribute{
+ Attribute: codegen.NewGoAttribute(att, pkg, scope),
+ Pointer: true,
+ UseDefault: true,
+ }
+}
+
+// Get retrieves the data for the service with the given name computing it if
+// needed. It returns nil if there is no service with the given name.
+func (d ServicesData) Get(name string) *Data {
+ if data, ok := d[name]; ok {
+ return data
+ }
+ service := expr.Root.Service(name)
+ if service == nil {
+ return nil
+ }
+ d[name] = d.analyze(service)
+ return d[name]
+}
+
+// Method returns the service method data for the method with the given name,
+// nil if there isn't one.
+func (s *Data) Method(name string) *MethodData {
+ for _, m := range s.Methods {
+ if m.Name == name {
+ return m
+ }
+ }
+ return nil
+}
+
+// Scheme returns the scheme data with the given scheme name in the
+// security requirements.
+func Scheme(reqs []*RequirementData, name string) *SchemeData {
+ for _, req := range reqs {
+ for _, sch := range req.Schemes {
+ if sch.SchemeName == name {
+ return sch
+ }
+ }
+ }
+ return nil
+}
+
+// Dup creates a copy of the scheme data.
+func (s *SchemeData) Dup() *SchemeData {
+ return &SchemeData{
+ Type: s.Type,
+ SchemeName: s.SchemeName,
+ Name: s.Name,
+ UsernameField: s.UsernameField,
+ UsernamePointer: s.UsernamePointer,
+ UsernameAttr: s.UsernameAttr,
+ UsernameRequired: s.UsernameRequired,
+ PasswordField: s.PasswordField,
+ PasswordPointer: s.PasswordPointer,
+ PasswordAttr: s.PasswordAttr,
+ PasswordRequired: s.PasswordRequired,
+ CredField: s.CredField,
+ CredPointer: s.CredPointer,
+ CredRequired: s.CredRequired,
+ KeyAttr: s.KeyAttr,
+ Scopes: s.Scopes,
+ Flows: s.Flows,
+ In: s.In,
+ }
+}
+
+// AppendScheme appends a scheme data to schemes only if it doesn't exist.
+func AppendScheme(s []*SchemeData, d *SchemeData) []*SchemeData {
+ found := false
+ for _, se := range s {
+ if se.Name == d.Name {
+ found = true
+ break
+ }
+ }
+ if found {
+ return s
+ }
+ return append(s, d)
+}
+
+// analyze creates the data necessary to render the code of the given service.
+// It records the user types needed by the service definition in userTypes.
+func (d ServicesData) analyze(service *expr.ServiceExpr) *Data {
+ var (
+ scope *codegen.NameScope
+ viewScope *codegen.NameScope
+ pkgName string
+ viewspkg string
+ types []*UserTypeData
+ errTypes []*UserTypeData
+ errorInits []*ErrorInitData
+ projTypes []*ProjectedTypeData
+ viewedRTs []*ViewedResultTypeData
+ seenErrors map[string]struct{}
+ seen map[string]struct{}
+ seenProj map[string]*ProjectedTypeData
+ seenViewed map[string]*ViewedResultTypeData
+ )
+ {
+ scope = codegen.NewNameScope()
+ viewScope = codegen.NewNameScope()
+ pkgName = scope.HashedUnique(service, strings.ToLower(codegen.Goify(service.Name, false)), "svc")
+ viewspkg = pkgName + "views"
+ seen = make(map[string]struct{})
+ seenErrors = make(map[string]struct{})
+ seenProj = make(map[string]*ProjectedTypeData)
+ seenViewed = make(map[string]*ViewedResultTypeData)
+
+ // A function to convert raw object type to user type.
+ makeUserType := func(att *expr.AttributeExpr, name string) {
+ if _, ok := att.Type.(*expr.Object); ok {
+ att.Type = &expr.UserTypeExpr{
+ AttributeExpr: expr.DupAtt(att),
+ TypeName: name,
+ }
+ }
+ if ut, ok := att.Type.(expr.UserType); ok {
+ seen[ut.ID()] = struct{}{}
+ }
+ }
+
+ for _, e := range service.Methods {
+ name := codegen.Goify(e.Name, true)
+ // Create user type for raw object payloads
+ makeUserType(e.Payload, name+"Payload")
+ // Create user type for raw object streaming payloads
+ makeUserType(e.StreamingPayload, name+"StreamingPayload")
+ // Create user type for raw object results
+ makeUserType(e.Result, name+"Result")
+ }
+ recordError := func(er *expr.ErrorExpr) {
+ errTypes = append(errTypes, collectTypes(er.AttributeExpr, scope, seen)...)
+ if er.Type == expr.ErrorResult {
+ if _, ok := seenErrors[er.Name]; ok {
+ return
+ }
+ seenErrors[er.Name] = struct{}{}
+ errorInits = append(errorInits, buildErrorInitData(er, scope))
+ }
+ }
+ for _, er := range service.Errors {
+ recordError(er)
+ }
+
+ // A function to collect inner user types from an attribute expression
+ collectUserTypes := func(att *expr.AttributeExpr) {
+ if ut, ok := att.Type.(expr.UserType); ok {
+ att = ut.Attribute()
+ }
+ types = append(types, collectTypes(att, scope, seen)...)
+ }
+ for _, m := range service.Methods {
+ // collect inner user types
+ collectUserTypes(m.Payload)
+ collectUserTypes(m.StreamingPayload)
+ collectUserTypes(m.Result)
+ if _, ok := m.Result.Type.(*expr.ResultTypeExpr); ok {
+ // collect projected types for the corresponding result type
+ projected := expr.DupAtt(m.Result)
+ projTypes = append(projTypes, collectProjectedTypes(projected, m.Result, viewspkg, scope, viewScope, seenProj)...)
+ }
+ for _, er := range m.Errors {
+ recordError(er)
+ }
+ }
+ }
+
+ for _, t := range expr.Root.Types {
+ if svcs, ok := t.Attribute().Meta["type:generate:force"]; ok {
+ att := &expr.AttributeExpr{Type: t}
+ if len(svcs) > 0 {
+ // Force generate type only in the specified services
+ for _, svc := range svcs {
+ if svc == service.Name {
+ types = append(types, collectTypes(att, scope, seen)...)
+ break
+ }
+ }
+ } else {
+ // Force generate type in all the services
+ types = append(types, collectTypes(att, scope, seen)...)
+ }
+ }
+ }
+
+ var (
+ methods []*MethodData
+ schemes []*SchemeData
+ )
+ {
+ methods = make([]*MethodData, len(service.Methods))
+ for i, e := range service.Methods {
+ m := buildMethodData(e, pkgName, service, scope)
+ if rt, ok := e.Result.Type.(*expr.ResultTypeExpr); ok {
+ if vrt, ok := seenViewed[m.Result]; ok {
+ m.ViewedResult = vrt
+ } else {
+ projected := seenProj[rt.ID()]
+ projAtt := &expr.AttributeExpr{Type: projected.Type}
+ vrt := buildViewedResultType(e.Result, projAtt, viewspkg, scope, viewScope)
+ viewedRTs = append(viewedRTs, vrt)
+ seenViewed[vrt.Name] = vrt
+ m.ViewedResult = vrt
+ }
+ }
+ methods[i] = m
+ for _, r := range m.Requirements {
+ for _, s := range r.Schemes {
+ found := false
+ for _, s2 := range schemes {
+ if s.SchemeName == s2.SchemeName {
+ found = true
+ break
+ }
+ }
+ if !found {
+ schemes = append(schemes, s)
+ }
+ }
+ }
+ }
+ }
+
+ var (
+ desc string
+ )
+ {
+ desc = service.Description
+ if desc == "" {
+ desc = fmt.Sprintf("Service is the %s service interface.", service.Name)
+ }
+ }
+
+ data := &Data{
+ Name: service.Name,
+ Description: desc,
+ VarName: codegen.Goify(service.Name, false),
+ StructName: codegen.Goify(service.Name, true),
+ PkgName: pkgName,
+ ViewsPkg: viewspkg,
+ Methods: methods,
+ Schemes: schemes,
+ UserTypes: types,
+ ErrorTypes: errTypes,
+ ErrorInits: errorInits,
+ ProjectedTypes: projTypes,
+ ViewedResultTypes: viewedRTs,
+ Scope: scope,
+ ViewScope: viewScope,
+ }
+ d[service.Name] = data
+
+ return data
+}
+
+// collectTypes recurses through the attribute to gather all user types and
+// records them in userTypes.
+func collectTypes(at *expr.AttributeExpr, scope *codegen.NameScope, seen map[string]struct{}) (data []*UserTypeData) {
+ if at == nil || at.Type == expr.Empty {
+ return
+ }
+ collect := func(at *expr.AttributeExpr) []*UserTypeData { return collectTypes(at, scope, seen) }
+ switch dt := at.Type.(type) {
+ case expr.UserType:
+ if _, ok := seen[dt.ID()]; ok {
+ return nil
+ }
+ data = append(data, &UserTypeData{
+ Name: dt.Name(),
+ VarName: scope.GoTypeName(at),
+ Description: dt.Attribute().Description,
+ Def: scope.GoTypeDef(dt.Attribute(), false, true),
+ Ref: scope.GoTypeRef(at),
+ Type: dt,
+ })
+ seen[dt.ID()] = struct{}{}
+ data = append(data, collect(dt.Attribute())...)
+ case *expr.Object:
+ for _, nat := range *dt {
+ data = append(data, collect(nat.Attribute)...)
+ }
+ case *expr.Array:
+ data = append(data, collect(dt.ElemType)...)
+ case *expr.Map:
+ data = append(data, collect(dt.KeyType)...)
+ data = append(data, collect(dt.ElemType)...)
+ }
+ return
+}
+
+// buildErrorInitData creates the data needed to generate code around endpoint error return values.
+func buildErrorInitData(er *expr.ErrorExpr, scope *codegen.NameScope) *ErrorInitData {
+ _, temporary := er.AttributeExpr.Meta["goa:error:temporary"]
+ _, timeout := er.AttributeExpr.Meta["goa:error:timeout"]
+ _, fault := er.AttributeExpr.Meta["goa:error:fault"]
+ return &ErrorInitData{
+ Name: fmt.Sprintf("Make%s", codegen.Goify(er.Name, true)),
+ Description: er.Description,
+ ErrName: er.Name,
+ TypeName: scope.GoTypeName(er.AttributeExpr),
+ TypeRef: scope.GoTypeRef(er.AttributeExpr),
+ Temporary: temporary,
+ Timeout: timeout,
+ Fault: fault,
+ }
+}
+
+// buildMethodData creates the data needed to render the given endpoint. It
+// records the user types needed by the service definition in userTypes.
+func buildMethodData(m *expr.MethodExpr, svcPkgName string, service *expr.ServiceExpr, scope *codegen.NameScope) *MethodData {
+ var (
+ vname string
+ desc string
+ payloadName string
+ payloadDef string
+ payloadRef string
+ payloadDesc string
+ payloadEx interface{}
+ spayloadName string
+ spayloadDef string
+ spayloadRef string
+ spayloadDesc string
+ spayloadEx interface{}
+ rname string
+ resultDef string
+ resultRef string
+ resultDesc string
+ resultEx interface{}
+ errors []*ErrorInitData
+ reqs []*RequirementData
+ schemes []string
+ svrStream *StreamData
+ cliStream *StreamData
+ )
+ vname = codegen.Goify(m.Name, true)
+ desc = m.Description
+ if desc == "" {
+ desc = codegen.Goify(m.Name, true) + " implements " + m.Name + "."
+ }
+ if m.Payload.Type != expr.Empty {
+ payloadName = scope.GoTypeName(m.Payload)
+ payloadRef = scope.GoTypeRef(m.Payload)
+ if dt, ok := m.Payload.Type.(expr.UserType); ok {
+ payloadDef = scope.GoTypeDef(dt.Attribute(), false, true)
+ }
+ payloadDesc = m.Payload.Description
+ if payloadDesc == "" {
+ payloadDesc = fmt.Sprintf("%s is the payload type of the %s service %s method.",
+ payloadName, m.Service.Name, m.Name)
+ }
+ payloadEx = m.Payload.Example(expr.Root.API.Random())
+ }
+ if m.StreamingPayload.Type != expr.Empty {
+ spayloadName = scope.GoTypeName(m.StreamingPayload)
+ spayloadRef = scope.GoTypeRef(m.StreamingPayload)
+ if dt, ok := m.StreamingPayload.Type.(expr.UserType); ok {
+ spayloadDef = scope.GoTypeDef(dt.Attribute(), false, true)
+ }
+ spayloadDesc = m.StreamingPayload.Description
+ if spayloadDesc == "" {
+ spayloadDesc = fmt.Sprintf("%s is the streaming payload type of the %s service %s method.",
+ spayloadName, m.Service.Name, m.Name)
+ }
+ spayloadEx = m.StreamingPayload.Example(expr.Root.API.Random())
+ }
+ if m.Result.Type != expr.Empty {
+ rname = scope.GoTypeName(m.Result)
+ resultRef = scope.GoTypeRef(m.Result)
+ if dt, ok := m.Result.Type.(expr.UserType); ok {
+ resultDef = scope.GoTypeDef(dt.Attribute(), false, true)
+ }
+ resultDesc = m.Result.Description
+ if resultDesc == "" {
+ resultDesc = fmt.Sprintf("%s is the result type of the %s service %s method.",
+ rname, m.Service.Name, m.Name)
+ }
+ resultEx = m.Result.Example(expr.Root.API.Random())
+ }
+ if len(m.Errors) > 0 {
+ errors = make([]*ErrorInitData, len(m.Errors))
+ for i, er := range m.Errors {
+ errors[i] = buildErrorInitData(er, scope)
+ }
+ }
+ if m.IsStreaming() {
+ svrStream = &StreamData{
+ Interface: vname + "ServerStream",
+ VarName: m.Name + "ServerStream",
+ EndpointStruct: vname + "EndpointInput",
+ Kind: m.Stream,
+ SendName: "Send",
+ SendDesc: fmt.Sprintf("Send streams instances of %q.", rname),
+ SendTypeName: rname,
+ SendTypeRef: resultRef,
+ MustClose: true,
+ }
+ cliStream = &StreamData{
+ Interface: vname + "ClientStream",
+ VarName: m.Name + "ClientStream",
+ Kind: m.Stream,
+ RecvName: "Recv",
+ RecvDesc: fmt.Sprintf("Recv reads instances of %q from the stream.", rname),
+ RecvTypeName: rname,
+ RecvTypeRef: resultRef,
+ }
+ if m.Stream == expr.ClientStreamKind || m.Stream == expr.BidirectionalStreamKind {
+ switch m.Stream {
+ case expr.ClientStreamKind:
+ if resultRef != "" {
+ svrStream.SendName = "SendAndClose"
+ svrStream.SendDesc = fmt.Sprintf("SendAndClose streams instances of %q and closes the stream.", rname)
+ svrStream.MustClose = false
+ cliStream.RecvName = "CloseAndRecv"
+ cliStream.RecvDesc = fmt.Sprintf("CloseAndRecv stops sending messages to the stream and reads instances of %q from the stream.", rname)
+ } else {
+ cliStream.MustClose = true
+ }
+ case expr.BidirectionalStreamKind:
+ cliStream.MustClose = true
+ }
+ svrStream.RecvName = "Recv"
+ svrStream.RecvDesc = fmt.Sprintf("Recv reads instances of %q from the stream.", spayloadName)
+ svrStream.RecvTypeName = spayloadName
+ svrStream.RecvTypeRef = spayloadRef
+ cliStream.SendName = "Send"
+ cliStream.SendDesc = fmt.Sprintf("Send streams instances of %q.", spayloadName)
+ cliStream.SendTypeName = spayloadName
+ cliStream.SendTypeRef = spayloadRef
+ }
+ }
+ for _, req := range m.Requirements {
+ var rs []*SchemeData
+ for _, s := range req.Schemes {
+ rs = append(rs, buildSchemeData(s, m))
+ found := false
+ for _, es := range schemes {
+ if es == s.Kind.String() {
+ found = true
+ break
+ }
+ }
+ if !found {
+ schemes = append(schemes, s.Kind.String())
+ }
+ }
+ reqs = append(reqs, &RequirementData{Schemes: rs, Scopes: req.Scopes})
+ }
+
+ return &MethodData{
+ Name: m.Name,
+ VarName: vname,
+ Description: desc,
+ Payload: payloadName,
+ PayloadDef: payloadDef,
+ PayloadRef: payloadRef,
+ PayloadDesc: payloadDesc,
+ PayloadEx: payloadEx,
+ StreamingPayload: spayloadName,
+ StreamingPayloadDef: spayloadDef,
+ StreamingPayloadRef: spayloadRef,
+ StreamingPayloadDesc: spayloadDesc,
+ StreamingPayloadEx: spayloadEx,
+ Result: rname,
+ ResultDef: resultDef,
+ ResultRef: resultRef,
+ ResultDesc: resultDesc,
+ ResultEx: resultEx,
+ Errors: errors,
+ Requirements: reqs,
+ Schemes: schemes,
+ ServerStream: svrStream,
+ ClientStream: cliStream,
+ StreamKind: m.Stream,
+ }
+}
+
+// buildSchemeData builds the scheme data for the given scheme and method expr.
+func buildSchemeData(s *expr.SchemeExpr, m *expr.MethodExpr) *SchemeData {
+ if !expr.IsObject(m.Payload.Type) {
+ return nil
+ }
+ switch s.Kind {
+ case expr.BasicAuthKind:
+ userAtt := expr.TaggedAttribute(m.Payload, "security:username")
+ user := codegen.Goify(userAtt, true)
+ passAtt := expr.TaggedAttribute(m.Payload, "security:password")
+ pass := codegen.Goify(passAtt, true)
+ return &SchemeData{
+ Type: s.Kind.String(),
+ SchemeName: s.SchemeName,
+ UsernameAttr: userAtt,
+ UsernameField: user,
+ UsernamePointer: m.Payload.IsPrimitivePointer(userAtt, true),
+ UsernameRequired: m.Payload.IsRequired(userAtt),
+ PasswordAttr: passAtt,
+ PasswordField: pass,
+ PasswordPointer: m.Payload.IsPrimitivePointer(passAtt, true),
+ PasswordRequired: m.Payload.IsRequired(passAtt),
+ }
+ case expr.APIKeyKind:
+ if keyAtt := expr.TaggedAttribute(m.Payload, "security:apikey:"+s.SchemeName); keyAtt != "" {
+ key := codegen.Goify(keyAtt, true)
+ return &SchemeData{
+ Type: s.Kind.String(),
+ Name: s.Name,
+ SchemeName: s.SchemeName,
+ CredField: key,
+ CredPointer: m.Payload.IsPrimitivePointer(keyAtt, true),
+ CredRequired: m.Payload.IsRequired(keyAtt),
+ KeyAttr: keyAtt,
+ In: s.In,
+ }
+ }
+ case expr.JWTKind:
+ if keyAtt := expr.TaggedAttribute(m.Payload, "security:token"); keyAtt != "" {
+ key := codegen.Goify(keyAtt, true)
+ var scopes []string
+ if len(s.Scopes) > 0 {
+ scopes = make([]string, len(s.Scopes))
+ for i, s := range s.Scopes {
+ scopes[i] = s.Name
+ }
+ }
+ return &SchemeData{
+ Type: s.Kind.String(),
+ Name: s.Name,
+ SchemeName: s.SchemeName,
+ CredField: key,
+ CredPointer: m.Payload.IsPrimitivePointer(keyAtt, true),
+ CredRequired: m.Payload.IsRequired(keyAtt),
+ KeyAttr: keyAtt,
+ Scopes: scopes,
+ In: s.In,
+ }
+ }
+ case expr.OAuth2Kind:
+ if keyAtt := expr.TaggedAttribute(m.Payload, "security:accesstoken"); keyAtt != "" {
+ key := codegen.Goify(keyAtt, true)
+ var scopes []string
+ if len(s.Scopes) > 0 {
+ scopes = make([]string, len(s.Scopes))
+ for i, s := range s.Scopes {
+ scopes[i] = s.Name
+ }
+ }
+ return &SchemeData{
+ Type: s.Kind.String(),
+ Name: s.Name,
+ SchemeName: s.SchemeName,
+ CredField: key,
+ CredPointer: m.Payload.IsPrimitivePointer(keyAtt, true),
+ CredRequired: m.Payload.IsRequired(keyAtt),
+ KeyAttr: keyAtt,
+ Scopes: scopes,
+ Flows: s.Flows,
+ In: s.In,
+ }
+ }
+ }
+ return nil
+}
+
+// collectProjectedTypes builds a projected type for every user type found
+// when recursing through the attributes. It stores the projected types in
+// data.
+func collectProjectedTypes(projected, att *expr.AttributeExpr, viewspkg string, scope, viewScope *codegen.NameScope, seen map[string]*ProjectedTypeData) (data []*ProjectedTypeData) {
+ collect := func(projected, att *expr.AttributeExpr) []*ProjectedTypeData {
+ return collectProjectedTypes(projected, att, viewspkg, scope, viewScope, seen)
+ }
+ switch pt := projected.Type.(type) {
+ case expr.UserType:
+ dt := att.Type.(expr.UserType)
+ if pd, ok := seen[dt.ID()]; ok {
+ // a projected type is already created for this user type. We change the
+ // attribute type to this seen projected type. The seen projected type
+ // can be nil if the attribute type has a ciruclar type definition in
+ // which case we don't change the attribute type until the projected type
+ // is created during the recursion.
+ if pd != nil {
+ projected.Type = pd.Type
+ }
+ return
+ }
+ seen[dt.ID()] = nil
+ pt.Rename(pt.Name() + "View")
+ // We recurse before building the projected type so that user types within
+ // a projected type is also converted to their respective projected types.
+ types := collect(pt.Attribute(), dt.Attribute())
+ pd := buildProjectedType(projected, att, viewspkg, scope, viewScope)
+ seen[dt.ID()] = pd
+ data = append(data, pd)
+ data = append(data, types...)
+ case *expr.Array:
+ dt := att.Type.(*expr.Array)
+ data = append(data, collect(pt.ElemType, dt.ElemType)...)
+ case *expr.Map:
+ dt := att.Type.(*expr.Map)
+ data = append(data, collect(pt.KeyType, dt.KeyType)...)
+ data = append(data, collect(pt.ElemType, dt.ElemType)...)
+ case *expr.Object:
+ dt := att.Type.(*expr.Object)
+ for _, n := range *pt {
+ data = append(data, collect(n.Attribute, dt.Attribute(n.Name))...)
+ }
+ }
+ return
+}
+
+// buildProjectedType builds projected type for the given user type.
+//
+// viewspkg is the name of the views package
+//
+func buildProjectedType(projected, att *expr.AttributeExpr, viewspkg string, scope, viewScope *codegen.NameScope) *ProjectedTypeData {
+ var (
+ projections []*InitData
+ typeInits []*InitData
+ validations []*ValidateData
+
+ varname = viewScope.GoTypeName(projected)
+ pt = projected.Type.(expr.UserType)
+ )
+ {
+ if _, isrt := pt.(*expr.ResultTypeExpr); isrt {
+ typeInits = buildTypeInits(projected, att, viewspkg, scope, viewScope)
+ projections = buildProjections(projected, att, viewspkg, scope, viewScope)
+ }
+ validations = buildValidations(projected, viewScope)
+ }
+ return &ProjectedTypeData{
+ UserTypeData: &UserTypeData{
+ Name: varname,
+ Description: fmt.Sprintf("%s is a type that runs validations on a projected type.", varname),
+ VarName: varname,
+ Def: viewScope.GoTypeDef(pt.Attribute(), true, true),
+ Ref: viewScope.GoTypeRef(projected),
+ Type: pt,
+ },
+ Projections: projections,
+ TypeInits: typeInits,
+ Validations: validations,
+ ViewsPkg: viewspkg,
+ }
+}
+
+// buildViewedResultType builds a viewed result type from the given result type
+// and projected type.
+func buildViewedResultType(att, projected *expr.AttributeExpr, viewspkg string, scope, viewScope *codegen.NameScope) *ViewedResultTypeData {
+ // collect result type views
+ var (
+ viewName string
+ views []*ViewData
+
+ rt = att.Type.(*expr.ResultTypeExpr)
+ isarr = expr.IsArray(att.Type)
+ )
+ {
+ if !rt.HasMultipleViews() {
+ viewName = expr.DefaultView
+ }
+ if v, ok := att.Meta["view"]; ok && len(v) > 0 {
+ viewName = v[0]
+ }
+ views = make([]*ViewData, 0, len(rt.Views))
+ for _, view := range rt.Views {
+ views = append(views, &ViewData{Name: view.Name, Description: view.Description})
+ }
+ }
+
+ // build validation data
+ var (
+ validate *ValidateData
+
+ resvar = scope.GoTypeName(att)
+ resref = scope.GoTypeRef(att)
+ )
+ {
+ data := map[string]interface{}{
+ "Projected": scope.GoTypeName(projected),
+ "ArgVar": "result",
+ "Source": "result",
+ "Views": views,
+ "IsViewed": true,
+ }
+ buf := &bytes.Buffer{}
+ if err := validateTypeCodeTmpl.Execute(buf, data); err != nil {
+ panic(err) // bug
+ }
+ name := "Validate" + resvar
+ validate = &ValidateData{
+ Name: name,
+ Description: fmt.Sprintf("%s runs the validations defined on the viewed result type %s.", name, resvar),
+ Ref: resref,
+ Validate: buf.String(),
+ }
+ }
+
+ // build constructor to initialize viewed result type from result type
+ var (
+ init *InitData
+
+ vresref = viewScope.GoFullTypeRef(att, viewspkg)
+ )
+ {
+ data := map[string]interface{}{
+ "ToViewed": true,
+ "ArgVar": "res",
+ "ReturnVar": "vres",
+ "Views": views,
+ "ReturnTypeRef": vresref,
+ "IsCollection": isarr,
+ "TargetType": scope.GoFullTypeName(att, viewspkg),
+ "InitName": "new" + viewScope.GoTypeName(projected),
+ }
+ buf := &bytes.Buffer{}
+ if err := initTypeCodeTmpl.Execute(buf, data); err != nil {
+ panic(err) // bug
+ }
+ name := "NewViewed" + resvar
+ init = &InitData{
+ Name: name,
+ Description: fmt.Sprintf("%s initializes viewed result type %s from result type %s using the given view.", name, resvar, resvar),
+ Args: []*InitArgData{
+ {Name: "res", Ref: scope.GoTypeRef(att)},
+ {Name: "view", Ref: "string"},
+ },
+ ReturnTypeRef: vresref,
+ Code: buf.String(),
+ }
+ }
+
+ // build constructor to initialize result type from viewed result type
+ var resinit *InitData
+ {
+ data := map[string]interface{}{
+ "ToResult": true,
+ "ArgVar": "vres",
+ "ReturnVar": "res",
+ "Views": views,
+ "ReturnTypeRef": resref,
+ "InitName": "new" + scope.GoTypeName(att),
+ }
+ buf := &bytes.Buffer{}
+ if err := initTypeCodeTmpl.Execute(buf, data); err != nil {
+ panic(err) // bug
+ }
+ name := "New" + resvar
+ resinit = &InitData{
+ Name: name,
+ Description: fmt.Sprintf("%s initializes result type %s from viewed result type %s.", name, resvar, resvar),
+ Args: []*InitArgData{{Name: "vres", Ref: scope.GoFullTypeRef(att, viewspkg)}},
+ ReturnTypeRef: resref,
+ Code: buf.String(),
+ }
+ }
+
+ projT := wrapProjected(projected.Type.(expr.UserType))
+ return &ViewedResultTypeData{
+ UserTypeData: &UserTypeData{
+ Name: resvar,
+ Description: fmt.Sprintf("%s is the viewed result type that is projected based on a view.", resvar),
+ VarName: resvar,
+ Def: viewScope.GoTypeDef(projT.Attribute(), false, true),
+ Ref: resref,
+ Type: projT,
+ },
+ FullName: scope.GoFullTypeName(att, viewspkg),
+ FullRef: vresref,
+ ResultInit: resinit,
+ Init: init,
+ Views: views,
+ Validate: validate,
+ IsCollection: isarr,
+ ViewName: viewName,
+ ViewsPkg: viewspkg,
+ }
+}
+
+// wrapProjected builds a viewed result type by wrapping the given projected
+// in a result type with "projected" and "view" attributes.
+func wrapProjected(projected expr.UserType) expr.UserType {
+ rt := projected.(*expr.ResultTypeExpr)
+ pratt := &expr.NamedAttributeExpr{
+ Name: "projected",
+ Attribute: &expr.AttributeExpr{Type: rt, Description: "Type to project"},
+ }
+ prview := &expr.NamedAttributeExpr{
+ Name: "view",
+ Attribute: &expr.AttributeExpr{Type: expr.String, Description: "View to render"},
+ }
+ return &expr.ResultTypeExpr{
+ UserTypeExpr: &expr.UserTypeExpr{
+ AttributeExpr: &expr.AttributeExpr{
+ Type: &expr.Object{pratt, prview},
+ Validation: &expr.ValidationExpr{Required: []string{"projected", "view"}},
+ },
+ TypeName: rt.TypeName,
+ },
+ Identifier: rt.Identifier,
+ Views: rt.Views,
+ }
+}
+
+// buildTypeInits builds the data to generate the constructor code to
+// initialize a result type from a projected type.
+func buildTypeInits(projected, att *expr.AttributeExpr, viewspkg string, scope, viewScope *codegen.NameScope) []*InitData {
+ prt := projected.Type.(*expr.ResultTypeExpr)
+ pobj := expr.AsObject(projected.Type)
+ parr := expr.AsArray(projected.Type)
+ if parr != nil {
+ // result type collection
+ pobj = expr.AsObject(parr.ElemType.Type)
+ }
+
+ // For every view defined in the result type, build a constructor function
+ // to create the result type from a projected type based on the view.
+ var init []*InitData
+ {
+ init = make([]*InitData, 0, len(prt.Views))
+ for _, view := range prt.Views {
+ var (
+ typ expr.DataType
+
+ obj = &expr.Object{}
+ )
+ {
+ walkViewAttrs(pobj, view, func(name string, att, _ *expr.AttributeExpr) {
+ obj.Set(name, att)
+ })
+ typ = obj
+ if parr != nil {
+ typ = &expr.Array{ElemType: &expr.AttributeExpr{
+ Type: &expr.ResultTypeExpr{
+ UserTypeExpr: &expr.UserTypeExpr{
+ AttributeExpr: &expr.AttributeExpr{Type: obj},
+ TypeName: scope.GoTypeName(parr.ElemType),
+ },
+ },
+ }}
+ }
+ }
+ src := &expr.AttributeExpr{
+ Type: &expr.ResultTypeExpr{
+ UserTypeExpr: &expr.UserTypeExpr{
+ AttributeExpr: &expr.AttributeExpr{Type: typ},
+ TypeName: scope.GoTypeName(projected),
+ },
+ Views: prt.Views,
+ Identifier: prt.Identifier,
+ },
+ }
+
+ var (
+ name string
+ code string
+ helpers []*codegen.TransformFunctionData
+
+ srcCA = ProjectedTypeContext(src, viewspkg, viewScope)
+ tgtCA = TypeContext(att, "", scope)
+ resvar = scope.GoTypeName(att)
+ )
+ {
+ name = "new" + resvar
+ if view.Name != expr.DefaultView {
+ name += codegen.Goify(view.Name, true)
+ }
+ code, helpers = buildConstructorCode(srcCA, tgtCA, "vres", "res", view.Name)
+ }
+
+ init = append(init, &InitData{
+ Name: name,
+ Description: fmt.Sprintf("%s converts projected type %s to service type %s.", name, resvar, resvar),
+ Args: []*InitArgData{{Name: "vres", Ref: viewScope.GoFullTypeRef(projected, viewspkg)}},
+ ReturnTypeRef: scope.GoTypeRef(att),
+ Code: code,
+ Helpers: helpers,
+ })
+ }
+ }
+ return init
+}
+
+// buildProjections builds the data to generate the constructor code to
+// project a result type to a projected type based on a view.
+func buildProjections(projected, att *expr.AttributeExpr, viewspkg string, scope, viewScope *codegen.NameScope) []*InitData {
+ var (
+ projections []*InitData
+
+ rt = att.Type.(*expr.ResultTypeExpr)
+ )
+
+ projections = make([]*InitData, 0, len(rt.Views))
+ for _, view := range rt.Views {
+ var (
+ typ expr.DataType
+
+ obj = &expr.Object{}
+ )
+ {
+ pobj := expr.AsObject(projected.Type)
+ parr := expr.AsArray(projected.Type)
+ if parr != nil {
+ // result type collection
+ pobj = expr.AsObject(parr.ElemType.Type)
+ }
+ walkViewAttrs(pobj, view, func(name string, att, _ *expr.AttributeExpr) {
+ obj.Set(name, att)
+ })
+ typ = obj
+ if parr != nil {
+ typ = &expr.Array{ElemType: &expr.AttributeExpr{
+ Type: &expr.ResultTypeExpr{
+ UserTypeExpr: &expr.UserTypeExpr{
+ AttributeExpr: &expr.AttributeExpr{Type: obj},
+ TypeName: parr.ElemType.Type.Name(),
+ },
+ },
+ }}
+ }
+ }
+ tgt := &expr.AttributeExpr{
+ Type: &expr.ResultTypeExpr{
+ UserTypeExpr: &expr.UserTypeExpr{
+ AttributeExpr: &expr.AttributeExpr{Type: typ},
+ TypeName: projected.Type.Name(),
+ },
+ Views: rt.Views,
+ Identifier: rt.Identifier,
+ },
+ }
+
+ var (
+ name string
+ code string
+ helpers []*codegen.TransformFunctionData
+
+ srcCA = TypeContext(att, "", scope)
+ tgtCA = ProjectedTypeContext(tgt, viewspkg, viewScope)
+ tname = scope.GoTypeName(projected)
+ )
+ {
+ name = "new" + tname
+ if view.Name != expr.DefaultView {
+ name += codegen.Goify(view.Name, true)
+ }
+ code, helpers = buildConstructorCode(srcCA, tgtCA, "res", "vres", view.Name)
+ }
+
+ projections = append(projections, &InitData{
+ Name: name,
+ Description: fmt.Sprintf("%s projects result type %s into projected type %s using the %q view.", name, scope.GoTypeName(att), tname, view.Name),
+ Args: []*InitArgData{{Name: "res", Ref: scope.GoTypeRef(att)}},
+ ReturnTypeRef: viewScope.GoFullTypeRef(projected, viewspkg),
+ Code: code,
+ Helpers: helpers,
+ })
+ }
+ return projections
+}
+
+// buildValidationData builds the data required to generate validations for the
+// projected types.
+func buildValidations(projected *expr.AttributeExpr, scope *codegen.NameScope) []*ValidateData {
+ var (
+ validations []*ValidateData
+
+ ut = projected.Type.(expr.UserType)
+ tname = scope.GoTypeName(projected)
+ )
+ if rt, isrt := ut.(*expr.ResultTypeExpr); isrt {
+ // for result types we create a validation function containing view
+ // specific validation logic for each view
+ arr := expr.AsArray(projected.Type)
+ for _, view := range rt.Views {
+ data := map[string]interface{}{
+ "Projected": tname,
+ "ArgVar": "result",
+ "Source": "result",
+ "IsCollection": arr != nil,
+ }
+ var (
+ name string
+ vn string
+ )
+ {
+ name = "Validate" + tname
+ if view.Name != "default" {
+ vn = codegen.Goify(view.Name, true)
+ name += vn
+ }
+ }
+
+ if arr != nil {
+ // dealing with an array type
+ data["Source"] = "item"
+ data["ValidateVar"] = "Validate" + scope.GoTypeName(arr.ElemType) + vn
+ } else {
+ var (
+ ca *codegen.ContextualAttribute
+ fields []map[string]interface{}
+
+ o = &expr.Object{}
+ )
+ {
+ walkViewAttrs(expr.AsObject(projected.Type), view, func(name string, attr, vatt *expr.AttributeExpr) {
+ if rt, ok := attr.Type.(*expr.ResultTypeExpr); ok {
+ // use explicitly specified view (if any) for the attribute,
+ // otherwise use default
+ vw := ""
+ if v, ok := vatt.Meta["view"]; ok && len(v) > 0 && v[0] != expr.DefaultView {
+ vw = v[0]
+ }
+ fields = append(fields, map[string]interface{}{
+ "Name": name,
+ "ValidateVar": "Validate" + scope.GoTypeName(attr) + codegen.Goify(vw, true),
+ "IsRequired": rt.Attribute().IsRequired(name),
+ })
+ } else {
+ o.Set(name, attr)
+ }
+ })
+ ca = ProjectedTypeContext(&expr.AttributeExpr{Type: o, Validation: rt.Validation}, "", scope)
+ }
+ data["Validate"] = codegen.RecursiveValidationCode(ca, "result")
+ data["Fields"] = fields
+ }
+
+ buf := &bytes.Buffer{}
+ if err := validateTypeCodeTmpl.Execute(buf, data); err != nil {
+ panic(err) // bug
+ }
+
+ validations = append(validations, &ValidateData{
+ Name: name,
+ Description: fmt.Sprintf("%s runs the validations defined on %s using the %q view.", name, tname, view.Name),
+ Ref: scope.GoTypeRef(projected),
+ Validate: buf.String(),
+ })
+ }
+ } else {
+ // for a user type or a result type with single view, we generate only one validation
+ // function containing the validation logic
+ name := "Validate" + tname
+ ca := ProjectedTypeContext(ut.Attribute(), "", scope)
+ validations = append(validations, &ValidateData{
+ Name: name,
+ Description: fmt.Sprintf("%s runs the validations defined on %s.", name, tname),
+ Ref: scope.GoTypeRef(projected),
+ Validate: codegen.RecursiveValidationCode(ca, "result"),
+ })
+ }
+ return validations
+}
+
+// buildConstructorCode builds the transformation code to create a projected
+// type from a service type and vice versa.
+//
+// source and target contains the projected/service contextual attributes
+//
+// sourceVar and targetVar contains the variable name that holds the source and
+// target data structures in the transformation code.
+//
+// view is used to generate the constructor function name.
+//
+func buildConstructorCode(source, target *codegen.ContextualAttribute, sourceVar, targetVar, view string) (string, []*codegen.TransformFunctionData) {
+ var (
+ helpers []*codegen.TransformFunctionData
+ buf bytes.Buffer
+ )
+ src := source.Attribute.Expr()
+ tgt := target.Attribute.Expr()
+ rt := src.Type.(*expr.ResultTypeExpr)
+ arr := expr.AsArray(tgt.Type)
+
+ data := map[string]interface{}{
+ "ArgVar": sourceVar,
+ "ReturnVar": targetVar,
+ "IsCollection": arr != nil,
+ "TargetType": target.Attribute.Name(),
+ }
+
+ if arr != nil {
+ // result type collection
+ init := "new" + target.Attribute.Scope().GoTypeName(arr.ElemType)
+ if view != "" && view != expr.DefaultView {
+ init += codegen.Goify(view, true)
+ }
+ data["InitName"] = init
+ if err := initTypeCodeTmpl.Execute(&buf, data); err != nil {
+ panic(err) // bug
+ }
+ return buf.String(), helpers
+ }
+
+ // service type to projected type (or vice versa)
+ targetRTs := &expr.Object{}
+ tatt := expr.DupAtt(tgt)
+ tobj := expr.AsObject(tatt.Type)
+ for _, nat := range *tobj {
+ if _, ok := nat.Attribute.Type.(*expr.ResultTypeExpr); ok {
+ targetRTs.Set(nat.Name, nat.Attribute)
+ tobj.Delete(nat.Name)
+ }
+ }
+ data["Source"] = sourceVar
+ data["Target"] = targetVar
+
+ var (
+ code string
+ err error
+ )
+ {
+
+ // build code for target with no result types
+ if code, helpers, err = codegen.GoTransform(source, target.Dup(tatt, true), sourceVar, targetVar, "transform"); err != nil {
+ panic(err) // bug
+ }
+ }
+ data["Code"] = code
+
+ if view != "" {
+ data["InitName"] = target.Dup(src, true).Attribute.Name()
+ }
+ fields := make([]map[string]interface{}, 0, len(*targetRTs))
+ // iterate through the result types found in the target and add the
+ // code to initialize them
+ for _, nat := range *targetRTs {
+ finit := "new" + target.Attribute.Scope().GoTypeName(nat.Attribute)
+ if view != "" {
+ v := ""
+ if vatt := rt.View(view).AttributeExpr.Find(nat.Name); vatt != nil {
+ if attv, ok := vatt.Meta["view"]; ok && len(attv) > 0 && attv[0] != expr.DefaultView {
+ // view is explicitly set for the result type on the attribute
+ v = attv[0]
+ }
+ }
+ finit += codegen.Goify(v, true)
+ }
+ fields = append(fields, map[string]interface{}{
+ "VarName": codegen.Goify(nat.Name, true),
+ "FieldInit": finit,
+ })
+ }
+ data["Fields"] = fields
+
+ if err := initTypeCodeTmpl.Execute(&buf, data); err != nil {
+ panic(err) // bug
+ }
+ return buf.String(), helpers
+}
+
+// walkViewAttrs iterates through the attributes in att that are found in the
+// given view and executes the walker function.
+func walkViewAttrs(obj *expr.Object, view *expr.ViewExpr, walker func(name string, attr, vatt *expr.AttributeExpr)) {
+ for _, nat := range *expr.AsObject(view.Type) {
+ if attr := obj.Attribute(nat.Name); attr != nil {
+ walker(nat.Name, attr, nat.Attribute)
+ }
+ }
+}
+
+const (
+ initTypeCodeT = `{{- if or .ToResult .ToViewed -}}
+ var {{ .ReturnVar }} {{ .ReturnTypeRef }}
+ switch {{ if .ToResult }}{{ .ArgVar }}.View{{ else }}view{{ end }} {
+ {{- range .Views }}
+ case {{ printf "%q" .Name }}{{ if eq .Name "default" }}, ""{{ end }}:
+ {{- if $.ToViewed }}
+ p := {{ $.InitName }}{{ if ne .Name "default" }}{{ goify .Name true }}{{ end }}({{ $.ArgVar }})
+ {{ $.ReturnVar }} = {{ if not $.IsCollection }}&{{ end }}{{ $.TargetType }}{ p, {{ printf "%q" .Name }} }
+ {{- else }}
+ {{ $.ReturnVar }} = {{ $.InitName }}{{ if ne .Name "default" }}{{ goify .Name true }}{{ end }}({{ $.ArgVar }}.Projected)
+ {{- end }}
+ {{- end }}
+ }
+{{- else if .IsCollection -}}
+ {{ .ReturnVar }} := make({{ .TargetType }}, len({{ .ArgVar }}))
+ for i, n := range {{ .ArgVar }} {
+ {{ .ReturnVar }}[i] = {{ .InitName }}(n)
+ }
+{{- else -}}
+ {{ .Code }}
+ {{- range .Fields }}
+ if {{ $.Source }}.{{ .VarName }} != nil {
+ {{ $.Target }}.{{ .VarName }} = {{ .FieldInit }}({{ $.Source }}.{{ .VarName }})
+ }
+ {{- end }}
+{{- end }}
+return {{ .ReturnVar }}`
+
+ validateTypeT = `{{- if .IsViewed -}}
+switch {{ .ArgVar }}.View {
+ {{- range .Views }}
+case {{ printf "%q" .Name }}{{ if eq .Name "default" }}, ""{{ end }}:
+ err = Validate{{ $.Projected }}{{ if ne .Name "default" }}{{ goify .Name true }}{{ end }}({{ $.ArgVar }}.Projected)
+ {{- end }}
+default:
+ err = goa.InvalidEnumValueError("view", {{ .Source }}.View, []interface{}{ {{ range .Views }}{{ printf "%q" .Name }}, {{ end }} })
+}
+{{- else -}}
+ {{- if .IsCollection -}}
+for _, {{ $.Source }} := range {{ $.ArgVar }} {
+ if err2 := {{ .ValidateVar }}({{ $.Source }}); err2 != nil {
+ err = goa.MergeErrors(err, err2)
+ }
+}
+ {{- else -}}
+ {{ .Validate }}
+ {{- range .Fields -}}
+ {{- if .IsRequired -}}
+if {{ $.Source }}.{{ goify .Name true }} == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError({{ printf "%q" .Name }}, {{ printf "%q" $.Source }}))
+}
+ {{- end }}
+if {{ $.Source }}.{{ goify .Name true }} != nil {
+ if err2 := {{ .ValidateVar }}({{ $.Source }}.{{ goify .Name true }}); err2 != nil {
+ err = goa.MergeErrors(err, err2)
+ }
+}
+ {{- end -}}
+ {{- end -}}
+{{- end -}}
+`
+)
diff --git a/vendor/goa.design/goa/codegen/service/views.go b/vendor/goa.design/goa/codegen/service/views.go
new file mode 100644
index 000000000..a5a04919c
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/service/views.go
@@ -0,0 +1,73 @@
+package service
+
+import (
+ "path/filepath"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+// ViewsFile returns the views file for the given service to render result
+// types (if any) using the defined views.
+func ViewsFile(genpkg string, service *expr.ServiceExpr) *codegen.File {
+ svc := Services.Get(service.Name)
+ if len(svc.ProjectedTypes) == 0 {
+ return nil
+ }
+ path := filepath.Join(codegen.Gendir, codegen.SnakeCase(service.Name), "views", "view.go")
+ var (
+ sections []*codegen.SectionTemplate
+ )
+ {
+ header := codegen.Header(service.Name+" views", "views",
+ []*codegen.ImportSpec{
+ {Path: "goa.design/goa", Name: "goa"},
+ {Path: "unicode/utf8"},
+ })
+ sections = []*codegen.SectionTemplate{header}
+
+ // type definitions
+ for _, t := range svc.ViewedResultTypes {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "viewed-result-type",
+ Source: userTypeT,
+ Data: t.UserTypeData,
+ })
+ }
+ for _, t := range svc.ProjectedTypes {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "projected-type",
+ Source: userTypeT,
+ Data: t.UserTypeData,
+ })
+ }
+
+ // validations
+ for _, t := range svc.ViewedResultTypes {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "validate-viewed-result-type",
+ Source: validateT,
+ Data: t.Validate,
+ })
+ }
+ for _, t := range svc.ProjectedTypes {
+ for _, v := range t.Validations {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "validate-projected-type",
+ Source: validateT,
+ Data: v,
+ })
+ }
+ }
+ }
+
+ return &codegen.File{Path: path, SectionTemplates: sections}
+}
+
+// input: ValidateData
+const validateT = `{{ comment .Description }}
+func {{ .Name }}(result {{ .Ref }}) (err error) {
+ {{ .Validate }}
+ return
+}
+`
diff --git a/vendor/goa.design/goa/codegen/testing.go b/vendor/goa.design/goa/codegen/testing.go
new file mode 100644
index 000000000..1ba59b3af
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/testing.go
@@ -0,0 +1,168 @@
+package codegen
+
+import (
+ "bytes"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "os/exec"
+ "strings"
+ "testing"
+
+ "goa.design/goa/eval"
+ "goa.design/goa/expr"
+
+ "github.com/sergi/go-diff/diffmatchpatch"
+)
+
+// RunDSL returns the DSL root resulting from running the given DSL.
+func RunDSL(t *testing.T, dsl func()) *expr.RootExpr {
+ eval.Reset()
+ expr.Root = new(expr.RootExpr)
+ expr.Root.GeneratedTypes = &expr.GeneratedRoot{}
+ eval.Register(expr.Root)
+ eval.Register(expr.Root.GeneratedTypes)
+ expr.Root.API = expr.NewAPIExpr("test api", func() {})
+ expr.Root.API.Servers = []*expr.ServerExpr{expr.Root.API.DefaultServer()}
+ if !eval.Execute(dsl, nil) {
+ t.Fatal(eval.Context.Error())
+ }
+ if err := eval.RunDSL(); err != nil {
+ t.Fatal(err)
+ }
+ return expr.Root
+}
+
+// RunDSLWithFunc returns the DSL root resulting from running the given DSL.
+// It executes a function to add any top-level types to the design Root before
+// running the DSL.
+func RunDSLWithFunc(t *testing.T, dsl func(), fn func()) *expr.RootExpr {
+ eval.Reset()
+ expr.Root = new(expr.RootExpr)
+ expr.Root.GeneratedTypes = &expr.GeneratedRoot{}
+ eval.Register(expr.Root)
+ eval.Register(expr.Root.GeneratedTypes)
+ expr.Root.API = expr.NewAPIExpr("test api", func() {})
+ expr.Root.API.Servers = []*expr.ServerExpr{expr.Root.API.DefaultServer()}
+ fn()
+ if !eval.Execute(dsl, nil) {
+ t.Fatal(eval.Context.Error())
+ }
+ if err := eval.RunDSL(); err != nil {
+ t.Fatal(err)
+ }
+ return expr.Root
+}
+
+// SectionCode generates and formats the code for the given section.
+func SectionCode(t *testing.T, section *SectionTemplate) string {
+ return sectionCodeWithPrefix(t, section, "package foo\n")
+}
+
+// SectionsCode generates and formats the code for the given sections.
+func SectionsCode(t *testing.T, sections []*SectionTemplate) string {
+ codes := make([]string, len(sections))
+ for i, section := range sections {
+ codes[i] = sectionCodeWithPrefix(t, section, "package foo\n")
+ }
+ return strings.Join(codes, "\n")
+}
+
+// SectionCodeFromImportsAndMethods generates and formats the code for given import and method definition sections.
+func SectionCodeFromImportsAndMethods(t *testing.T, importSection *SectionTemplate, methodSection *SectionTemplate) string {
+ var code bytes.Buffer
+ if err := importSection.Write(&code); err != nil {
+ t.Fatal(err)
+ }
+
+ return sectionCodeWithPrefix(t, methodSection, code.String())
+}
+
+func sectionCodeWithPrefix(t *testing.T, section *SectionTemplate, prefix string) string {
+ var code bytes.Buffer
+ if err := section.Write(&code); err != nil {
+ t.Fatal(err)
+ }
+
+ codestr := code.String()
+
+ if len(prefix) > 0 {
+ codestr = fmt.Sprintf("%s\n%s", prefix, codestr)
+ }
+
+ return FormatTestCode(t, codestr)
+}
+
+// FormatTestCode formats the given Go code. The code must correspond to the
+// content of a valid Go source file (i.e. start with "package")
+func FormatTestCode(t *testing.T, code string) string {
+ tmp := CreateTempFile(t, code)
+ defer os.Remove(tmp)
+ if err := finalizeGoSource(tmp); err != nil {
+ t.Fatal(err)
+ }
+ content, err := ioutil.ReadFile(tmp)
+ if err != nil {
+ t.Fatal(err)
+ }
+ return strings.Join(strings.Split(string(content), "\n")[2:], "\n")
+}
+
+// Diff returns a diff between s1 and s2. It uses the diff tool if installed
+// otherwise degrades to using the dmp package.
+func Diff(t *testing.T, s1, s2 string) string {
+ _, err := exec.LookPath("diff")
+ supportsDiff := (err == nil)
+ if !supportsDiff {
+ dmp := diffmatchpatch.New()
+ diffs := dmp.DiffMain(s1, s2, false)
+ return dmp.DiffPrettyText(diffs)
+ }
+ left := CreateTempFile(t, s1)
+ right := CreateTempFile(t, s2)
+ defer os.Remove(left)
+ defer os.Remove(right)
+ cmd := exec.Command("diff", left, right)
+ diffb, _ := cmd.CombinedOutput()
+ return strings.Replace(string(diffb), "\t", " ␉ ", -1)
+}
+
+// NewUseDefaultContext returns a contextual attribute which uses non-pointers
+// for attributes with default values. It is used only in tests.
+func NewUseDefaultContext(att Attributor) *ContextualAttribute {
+ return &ContextualAttribute{
+ Attribute: att,
+ Required: true,
+ Pointer: false,
+ UseDefault: true,
+ }
+}
+
+// NewPointerContext returns a contextual attribute which uses pointers for all
+// attributes.
+func NewPointerContext(att Attributor) *ContextualAttribute {
+ return &ContextualAttribute{
+ Attribute: att,
+ Required: false,
+ Pointer: true,
+ UseDefault: false,
+ }
+}
+
+// CreateTempFile creates a temporary file and writes the given content.
+// It is used only for testing.
+func CreateTempFile(t *testing.T, content string) string {
+ f, err := ioutil.TempFile("", "")
+ if err != nil {
+ t.Fatal(err)
+ }
+ _, err = f.WriteString(content)
+ if err != nil {
+ os.Remove(f.Name())
+ t.Fatal(err)
+ }
+ if err := f.Close(); err != nil {
+ t.Fatal(err)
+ }
+ return f.Name()
+}
diff --git a/vendor/goa.design/goa/codegen/transformer.go b/vendor/goa.design/goa/codegen/transformer.go
new file mode 100644
index 000000000..1acf0c5fc
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/transformer.go
@@ -0,0 +1,258 @@
+package codegen
+
+import (
+ "fmt"
+
+ "goa.design/goa/expr"
+)
+
+type (
+ // Transformer produces code that initializes data structure defined by
+ // target from an instance of the data structure described by source. The
+ // data structures can be objects, arrays or maps. The algorithm matches
+ // object fields by name and ignores object fields in target that don't
+ // have a match in source.
+ Transformer interface {
+ // Transform returns the code that initializes data structure defined by
+ // target attribute from an instance of the data structure defined by
+ // source. It leverages mapped attributes so that attribute names may use
+ // the "name:elem" syntax to define the name of the design attribute and
+ // the name of the corresponding generated field. It returns an error
+ // if target is not compatible with source (different type, fields of
+ // different type etc).
+ Transform(source, target *ContextualAttribute, ta *TransformAttrs) (code string, err error)
+ // TransformObject returns the code to initialize a target data structure
+ // defined by object type from an instance of source data structure defined
+ // by an object type. The algorithm matches object fields by name and
+ // ignores object fields in target that don't have a match in source.
+ // It returns an error if source and target are different types or have
+ // fields of different types.
+ TransformObject(source, target *ContextualAttribute, ta *TransformAttrs) (code string, err error)
+ // TransformArray returns the code to initialize a target array from a
+ // source array. It returns an error if source and target are not arrays
+ // and have fields of different types in the array element.
+ TransformArray(source, target *ContextualAttribute, ta *TransformAttrs) (code string, err error)
+ // TransformMap returns the code to initialize a target map from a
+ // source map. It returns an error if source and target are not maps
+ // and have fields of different types in the map key and element.
+ TransformMap(source, target *ContextualAttribute, ta *TransformAttrs) (code string, err error)
+ // MakeCompatible checks whether target is compatible with the source
+ // (same type, fields of different type, etc) and returns an error if
+ // target cannot be made compatible to the source. If no error, it returns
+ // the compatible source and target attributes with the updated transform
+ // attributes to make them compatible.
+ MakeCompatible(source, target *ContextualAttribute, ta *TransformAttrs, suffix string) (src, tgt *ContextualAttribute, newTA *TransformAttrs, err error)
+ Converter
+ }
+
+ // Referencer refers to a type.
+ Referencer interface {
+ // Name returns the type name.
+ Name() string
+ // Ref returns the reference to the type.
+ Ref() string
+ }
+
+ // Definer generates code that defines a type.
+ Definer interface {
+ // Def returns the code defining a type. Pointer and useDefault paramerters
+ // are used to determine if the type fields must be a pointer.
+ Def(pointer, useDefault bool) string
+ }
+
+ // Attributor is the interface implemented by code generators to generate
+ // code for an attribute type.
+ Attributor interface {
+ Scoper
+ Referencer
+ Definer
+ // Field produces a valid field name for the attribute type.
+ Field(name string, firstUpper bool) string
+ // Expr returns the underlying attribute expression.
+ Expr() *expr.AttributeExpr
+ // Dup creates a copy of the attributor by setting the underlying
+ // attribute expression.
+ Dup(*expr.AttributeExpr) Attributor
+ }
+
+ // Converter is the interface implemented by code generators to generate
+ // code to convert source attribute type to a target attribute type.
+ Converter interface {
+ // ConvertType produces code to initialze target attribute type from a
+ // source attribute type held by variable in sourceVar. It is not a
+ // recursive function.
+ ConvertType(source, target Attributor, sourceVar string) (code string)
+ }
+
+ // ContextualAttribute determines how an attribute behaves based on certain
+ // properties during code generation.
+ ContextualAttribute struct {
+ // Attribute is the attribute expression for which the code is generated.
+ Attribute Attributor
+ // NonPointer if true indicates that the attribute type is not generated
+ // as a pointer irrespective of whether the attribue is required or has
+ // a default value.
+ NonPointer bool
+ // Pointer if true indicates that the attribute type is generated as a
+ // pointer even if the attribute is required or has a default value.
+ // Array and map types are are always non-pointers. Object types are always
+ // pointers.
+ Pointer bool
+ // UseDefault if true indicates that attribute type must be a non-pointer
+ // if it has a default value except object type which is always a pointer.
+ UseDefault bool
+ // Required if true indicates that the attribute is required.
+ Required bool
+ }
+
+ // TransformAttrs are the attributes that help in the transformation.
+ TransformAttrs struct {
+ // SourceVar and TargetVar are the source and target variable names used
+ // in the transformation code.
+ SourceVar, TargetVar string
+ // NewVar is used to determine the assignment operator to initialize
+ // TargetVar.
+ NewVar bool
+ }
+
+ // TransformFunctionData describes a helper function used to transform
+ // user types. These are necessary to prevent potential infinite
+ // recursion when a type attribute is defined recursively. For example:
+ //
+ // var Recursive = Type("Recursive", func() {
+ // Attribute("r", "Recursive")
+ // }
+ //
+ // Transforming this type requires generating an intermediary function:
+ //
+ // func recursiveToRecursive(r *Recursive) *service.Recursive {
+ // var t service.Recursive
+ // if r.R != nil {
+ // t.R = recursiveToRecursive(r.R)
+ // }
+ // }
+ //
+ TransformFunctionData struct {
+ Name string
+ ParamTypeRef string
+ ResultTypeRef string
+ Code string
+ }
+)
+
+// NewGoContextAttr returns a default Go contextual attribute that produces Go
+// code.
+func NewGoContextAttr(att *expr.AttributeExpr, pkg string, scope *NameScope) *ContextualAttribute {
+ return &ContextualAttribute{Attribute: NewGoAttribute(att, pkg, scope)}
+}
+
+// IsPointer checks if the attribute type is a pointer. It returns false
+// if attribute type is an array, map, byte array, or an interface. If Pointer
+// property is true, IsPointer returns true. If NonPointer property is true,
+// IsPointer returns false. If both Pointer and NonPointer are false, the
+// following table shows how the attribute properties affect the return value
+//
+// UseDefault | Required | IsPointer
+// T | T | T
+// F | F | T
+// T | F | F if default value exists, else T
+// F | T | T
+//
+func (c *ContextualAttribute) IsPointer() bool {
+ if dt := c.Attribute.Expr().Type.Kind(); dt == expr.BytesKind || dt == expr.AnyKind {
+ return false
+ }
+ if c.NonPointer {
+ return false
+ }
+ if c.Pointer {
+ return true
+ }
+ return !c.Required && c.DefaultValue() == nil
+}
+
+// DefaultValue returns the default value of the attribute type if UseDefault
+// is true. It returns nil otherwise.
+func (c *ContextualAttribute) DefaultValue() interface{} {
+ if c.UseDefault {
+ return c.Attribute.Expr().DefaultValue
+ }
+ return nil
+}
+
+// Def returns the attribute type definition.
+func (c *ContextualAttribute) Def() string {
+ return c.Attribute.Def(c.Pointer, c.UseDefault)
+}
+
+// Dup creates a shallow copy of the contextual attribute with the given
+// attributor and its requiredness.
+func (c *ContextualAttribute) Dup(attr *expr.AttributeExpr, required bool) *ContextualAttribute {
+ return &ContextualAttribute{
+ Attribute: c.Attribute.Dup(attr),
+ Required: required,
+ NonPointer: c.NonPointer,
+ Pointer: c.Pointer,
+ UseDefault: c.UseDefault,
+ }
+}
+
+// IsCompatible returns an error if a and b are not both objects, both arrays,
+// both maps or both the same primitive type. actx and bctx are used to build
+// the error message if any.
+func IsCompatible(a, b expr.DataType, actx, bctx string) error {
+ switch {
+ case expr.IsObject(a):
+ if !expr.IsObject(b) {
+ return fmt.Errorf("%s is an object but %s type is %s", actx, bctx, b.Name())
+ }
+ case expr.IsArray(a):
+ if !expr.IsArray(b) {
+ return fmt.Errorf("%s is an array but %s type is %s", actx, bctx, b.Name())
+ }
+ case expr.IsMap(a):
+ if !expr.IsMap(b) {
+ return fmt.Errorf("%s is a hash but %s type is %s", actx, bctx, b.Name())
+ }
+ default:
+ if a.Kind() != b.Kind() {
+ return fmt.Errorf("%s is a %s but %s type is %s", actx, a.Name(), bctx, b.Name())
+ }
+ }
+ return nil
+}
+
+// AppendHelpers takes care of only appending helper functions from newH that
+// are not already in oldH.
+func AppendHelpers(oldH, newH []*TransformFunctionData) []*TransformFunctionData {
+ for _, h := range newH {
+ found := false
+ for _, h2 := range oldH {
+ if h.Name == h2.Name {
+ found = true
+ break
+ }
+ }
+ if !found {
+ oldH = append(oldH, h)
+ }
+ }
+ return oldH
+}
+
+// HelperName returns the transformation function name to initialize a target
+// user type from an instance of a source user type.
+func HelperName(source, target Attributor, prefix string) string {
+ var (
+ sname string
+ tname string
+ )
+ {
+ sname = Goify(source.Name(), true)
+ tname = Goify(target.Name(), true)
+ if prefix == "" {
+ prefix = "transform"
+ }
+ }
+ return Goify(prefix+sname+"To"+tname, false)
+}
diff --git a/vendor/goa.design/goa/codegen/types.go b/vendor/goa.design/goa/codegen/types.go
new file mode 100644
index 000000000..ccbc05fcc
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/types.go
@@ -0,0 +1,66 @@
+package codegen
+
+import (
+ "fmt"
+ "sort"
+ "strings"
+
+ "goa.design/goa/expr"
+)
+
+// GoNativeTypeName returns the Go built-in type corresponding to the given
+// primitive type. GoNativeType panics if t is not a primitive type.
+func GoNativeTypeName(t expr.DataType) string {
+ switch t.Kind() {
+ case expr.BooleanKind:
+ return "bool"
+ case expr.IntKind:
+ return "int"
+ case expr.Int32Kind:
+ return "int32"
+ case expr.Int64Kind:
+ return "int64"
+ case expr.UIntKind:
+ return "uint"
+ case expr.UInt32Kind:
+ return "uint32"
+ case expr.UInt64Kind:
+ return "uint64"
+ case expr.Float32Kind:
+ return "float32"
+ case expr.Float64Kind:
+ return "float64"
+ case expr.StringKind:
+ return "string"
+ case expr.BytesKind:
+ return "[]byte"
+ case expr.AnyKind:
+ return "interface{}"
+ default:
+ panic(fmt.Sprintf("cannot compute native Go type for %T", t)) // bug
+ }
+}
+
+// AttributeTags computes the struct field tags from its metadata if any.
+func AttributeTags(parent, att *expr.AttributeExpr) string {
+ var elems []string
+ keys := make([]string, len(att.Meta))
+ i := 0
+ for k := range att.Meta {
+ keys[i] = k
+ i++
+ }
+ sort.Strings(keys)
+ for _, key := range keys {
+ val := att.Meta[key]
+ if strings.HasPrefix(key, "struct:tag:") {
+ name := key[11:]
+ value := strings.Join(val, ",")
+ elems = append(elems, fmt.Sprintf("%s:\"%s\"", name, value))
+ }
+ }
+ if len(elems) > 0 {
+ return " `" + strings.Join(elems, " ") + "`"
+ }
+ return ""
+}
diff --git a/vendor/goa.design/goa/codegen/validation.go b/vendor/goa.design/goa/codegen/validation.go
new file mode 100644
index 000000000..c10ed5552
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/validation.go
@@ -0,0 +1,457 @@
+package codegen
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "strings"
+ "text/template"
+
+ "goa.design/goa/expr"
+)
+
+var (
+ enumValT *template.Template
+ formatValT *template.Template
+ patternValT *template.Template
+ minMaxValT *template.Template
+ lengthValT *template.Template
+ requiredValT *template.Template
+ arrayValT *template.Template
+ mapValT *template.Template
+ userValT *template.Template
+)
+
+func init() {
+ fm := template.FuncMap{
+ "slice": toSlice,
+ "oneof": oneof,
+ "constant": constant,
+ "goifyAtt": GoifyAtt,
+ "add": func(a, b int) int { return a + b },
+ }
+ enumValT = template.Must(template.New("enum").Funcs(fm).Parse(enumValTmpl))
+ formatValT = template.Must(template.New("format").Funcs(fm).Parse(formatValTmpl))
+ patternValT = template.Must(template.New("pattern").Funcs(fm).Parse(patternValTmpl))
+ minMaxValT = template.Must(template.New("minMax").Funcs(fm).Parse(minMaxValTmpl))
+ lengthValT = template.Must(template.New("length").Funcs(fm).Parse(lengthValTmpl))
+ requiredValT = template.Must(template.New("req").Funcs(fm).Parse(requiredValTmpl))
+ arrayValT = template.Must(template.New("array").Funcs(fm).Parse(arrayValTmpl))
+ mapValT = template.Must(template.New("map").Funcs(fm).Parse(mapValTmpl))
+ userValT = template.Must(template.New("user").Funcs(fm).Parse(userValTmpl))
+}
+
+// ValidationCode produces Go code that runs the validations defined in the
+// given attribute definition if any against the content of the variable named
+// target. The generated code assumes that there is a pre-existing "err"
+// variable of type error. It initializes that variable in case a validation
+// fails.
+//
+// context is used to produce helpful messages in case of error.
+//
+func ValidationCode(ca *ContextualAttribute, target, context string) string {
+ att := ca.Attribute.Expr()
+ validation := att.Validation
+ if validation == nil {
+ return ""
+ }
+ var (
+ kind = att.Type.Kind()
+ isNativePointer = kind == expr.BytesKind || kind == expr.AnyKind
+ isPointer = ca.IsPointer()
+ tval = target
+ )
+ if isPointer && expr.IsPrimitive(att.Type) && !isNativePointer {
+ tval = "*" + tval
+ }
+ data := map[string]interface{}{
+ "attribute": att,
+ "isPointer": isPointer,
+ "context": context,
+ "target": target,
+ "targetVal": tval,
+ "string": kind == expr.StringKind,
+ "array": expr.IsArray(att.Type),
+ "map": expr.IsMap(att.Type),
+ }
+ runTemplate := func(tmpl *template.Template, data interface{}) string {
+ var buf bytes.Buffer
+ if err := tmpl.Execute(&buf, data); err != nil {
+ panic(err) // bug
+ }
+ return buf.String()
+ }
+ var res []string
+ if values := validation.Values; values != nil {
+ data["values"] = values
+ if val := runTemplate(enumValT, data); val != "" {
+ res = append(res, val)
+ }
+ }
+ if format := validation.Format; format != "" {
+ data["format"] = string(format)
+ if val := runTemplate(formatValT, data); val != "" {
+ res = append(res, val)
+ }
+ }
+ if pattern := validation.Pattern; pattern != "" {
+ data["pattern"] = pattern
+ if val := runTemplate(patternValT, data); val != "" {
+ res = append(res, val)
+ }
+ }
+ if min := validation.Minimum; min != nil {
+ data["min"] = *min
+ data["isMin"] = true
+ delete(data, "max")
+ if val := runTemplate(minMaxValT, data); val != "" {
+ res = append(res, val)
+ }
+ }
+ if max := validation.Maximum; max != nil {
+ data["max"] = *max
+ data["isMin"] = false
+ delete(data, "min")
+ if val := runTemplate(minMaxValT, data); val != "" {
+ res = append(res, val)
+ }
+ }
+ if minLength := validation.MinLength; minLength != nil {
+ data["minLength"] = minLength
+ data["isMinLength"] = true
+ delete(data, "maxLength")
+ if val := runTemplate(lengthValT, data); val != "" {
+ res = append(res, val)
+ }
+ }
+ if maxLength := validation.MaxLength; maxLength != nil {
+ data["maxLength"] = maxLength
+ data["isMinLength"] = false
+ delete(data, "minLength")
+ if val := runTemplate(lengthValT, data); val != "" {
+ res = append(res, val)
+ }
+ }
+ if req := validation.Required; len(req) > 0 {
+ obj := expr.AsObject(att.Type)
+ for _, r := range req {
+ reqAtt := obj.Attribute(r)
+ if reqAtt == nil {
+ continue
+ }
+ if !ca.Pointer && expr.IsPrimitive(reqAtt.Type) &&
+ reqAtt.Type.Kind() != expr.BytesKind &&
+ reqAtt.Type.Kind() != expr.AnyKind {
+
+ continue
+ }
+ data["req"] = r
+ data["reqAtt"] = reqAtt
+ res = append(res, runTemplate(requiredValT, data))
+ }
+ }
+ return strings.Join(res, "\n")
+}
+
+// RecursiveValidationCode produces Go code that runs the validations defined in
+// the given attribute and its children recursively against the value held by
+// the variable named target.
+func RecursiveValidationCode(a *ContextualAttribute, target string) string {
+ seen := make(map[string]*bytes.Buffer)
+ return recurseValidationCode(a, target, target, seen).String()
+}
+
+func recurseValidationCode(ca *ContextualAttribute, target, context string, seen map[string]*bytes.Buffer) *bytes.Buffer {
+ var (
+ buf = new(bytes.Buffer)
+ first = true
+ att = ca.Attribute.Expr()
+ )
+
+ // Break infinite recursions
+ if ut, ok := att.Type.(expr.UserType); ok {
+ if buf, ok := seen[ut.ID()]; ok {
+ return buf
+ }
+ seen[ut.ID()] = buf
+ }
+
+ validation := ValidationCode(ca, target, context)
+ if validation != "" {
+ buf.WriteString(validation)
+ first = false
+ }
+
+ runUserValT := func(name, target string) string {
+ var buf bytes.Buffer
+ data := map[string]interface{}{
+ "name": Goify(name, true),
+ "target": target,
+ }
+ if err := userValT.Execute(&buf, data); err != nil {
+ panic(err) // bug
+ }
+ return fmt.Sprintf("if %s != nil {\n\t%s\n}", target, buf.String())
+ }
+
+ if o := expr.AsObject(att.Type); o != nil {
+ for _, nat := range *o {
+ validation := recurseAttribute(ca, nat, target, context, seen)
+ if validation != "" {
+ if !first {
+ buf.WriteByte('\n')
+ } else {
+ first = false
+ }
+ buf.WriteString(validation)
+ }
+ }
+ } else if a := expr.AsArray(att.Type); a != nil {
+ elem := ca.Dup(a.ElemType, true)
+ elem.Pointer = false
+ val := recurseValidationCode(elem, "e", context+"[*]", seen).String()
+ if val != "" {
+ switch a.ElemType.Type.(type) {
+ case expr.UserType:
+ // For user and result types, call the Validate method
+ val = runUserValT(elem.Attribute.Name(), "e")
+ }
+ data := map[string]interface{}{
+ "target": target,
+ "validation": val,
+ }
+ if !first {
+ buf.WriteByte('\n')
+ } else {
+ first = false
+ }
+ if err := arrayValT.Execute(buf, data); err != nil {
+ panic(err) // bug
+ }
+ }
+ } else if m := expr.AsMap(att.Type); m != nil {
+ key := ca.Dup(m.KeyType, true)
+ key.Pointer = false
+ keyVal := recurseValidationCode(key, "k", context+".key", seen).String()
+ elem := ca.Dup(m.ElemType, true)
+ elem.Pointer = false
+ valueVal := recurseValidationCode(elem, "v", context+"[key]", seen).String()
+ if keyVal != "" || valueVal != "" {
+ if keyVal != "" {
+ if _, ok := m.KeyType.Type.(expr.UserType); ok {
+ keyVal = runUserValT(key.Attribute.Name(), "k")
+ } else {
+ keyVal = "\n" + keyVal
+ }
+ }
+ if valueVal != "" {
+ if _, ok := m.ElemType.Type.(expr.UserType); ok {
+ valueVal = runUserValT(elem.Attribute.Name(), "v")
+ } else {
+ valueVal = "\n" + valueVal
+ }
+ }
+ data := map[string]interface{}{
+ "target": target,
+ "keyValidation": keyVal,
+ "valueValidation": valueVal,
+ }
+ if !first {
+ buf.WriteByte('\n')
+ } else {
+ first = false
+ }
+ if err := mapValT.Execute(buf, data); err != nil {
+ panic(err) // bug
+ }
+ }
+ }
+ return buf
+}
+
+func recurseAttribute(ca *ContextualAttribute, nat *expr.NamedAttributeExpr, target, context string, seen map[string]*bytes.Buffer) string {
+ var (
+ validation string
+
+ att = ca.Attribute.Expr()
+ )
+ ca = ca.Dup(nat.Attribute, att.IsRequired(nat.Name))
+ if ut, ok := nat.Attribute.Type.(expr.UserType); ok {
+ // We need to check empirically whether there are validations to be
+ // generated, we can't just generate and check whether something was
+ // generated to avoid infinite recursions.
+ hasValidations := false
+ done := errors.New("done")
+ Walk(ut.Attribute(), func(a *expr.AttributeExpr) error {
+ if a.Validation != nil {
+ if ca.Pointer {
+ hasValidations = true
+ return done
+ }
+ // For public data structures there is a case
+ // where there is validation but no actual
+ // validation code: if the validation is a
+ // required validation that applies to
+ // attributes that cannot be nil i.e. primitive
+ // types.
+ if !a.Validation.HasRequiredOnly() {
+ hasValidations = true
+ return done
+ }
+ obj := expr.AsObject(a.Type)
+ for _, name := range a.Validation.Required {
+ if att := obj.Attribute(name); att != nil && !expr.IsPrimitive(att.Type) {
+ hasValidations = true
+ return done
+ }
+ }
+ }
+ return nil
+ })
+ if hasValidations {
+ var buf bytes.Buffer
+ tgt := fmt.Sprintf("%s.%s", target, ca.Attribute.Field(nat.Name, true))
+ if expr.IsArray(nat.Attribute.Type) {
+ buf.Write(recurseValidationCode(ca, tgt, context, seen).Bytes())
+ } else {
+ if err := userValT.Execute(&buf, map[string]interface{}{"name": Goify(ut.Name(), true), "target": tgt}); err != nil {
+ panic(err) // bug
+ }
+ }
+ validation = buf.String()
+ }
+ } else {
+ validation = recurseValidationCode(
+ ca,
+ fmt.Sprintf("%s.%s", target, ca.Attribute.Field(nat.Name, true)),
+ fmt.Sprintf("%s.%s", context, nat.Name),
+ seen,
+ ).String()
+ }
+ if validation != "" {
+ if expr.IsObject(nat.Attribute.Type) {
+ validation = fmt.Sprintf("if %s.%s != nil {\n%s\n}",
+ target, ca.Attribute.Field(nat.Name, true), validation)
+ }
+ }
+ return validation
+}
+
+// toSlice returns Go code that represents the given slice.
+func toSlice(val []interface{}) string {
+ elems := make([]string, len(val))
+ for i, v := range val {
+ elems[i] = fmt.Sprintf("%#v", v)
+ }
+ return fmt.Sprintf("[]interface{}{%s}", strings.Join(elems, ", "))
+}
+
+// oneof produces code that compares target with each element of vals and ORs
+// the result, e.g. "target == 1 || target == 2".
+func oneof(target string, vals []interface{}) string {
+ elems := make([]string, len(vals))
+ for i, v := range vals {
+ elems[i] = fmt.Sprintf("%s == %#v", target, v)
+ }
+ return strings.Join(elems, " || ")
+}
+
+// constant returns the Go constant name of the format with the given value.
+func constant(formatName string) string {
+ switch formatName {
+ case "date":
+ return "goa.FormatDate"
+ case "date-time":
+ return "goa.FormatDateTime"
+ case "uuid":
+ return "goa.FormatUUID"
+ case "email":
+ return "goa.FormatEmail"
+ case "hostname":
+ return "goa.FormatHostname"
+ case "ipv4":
+ return "goa.FormatIPv4"
+ case "ipv6":
+ return "goa.FormatIPv6"
+ case "ip":
+ return "goa.FormatIP"
+ case "uri":
+ return "goa.FormatURI"
+ case "mac":
+ return "goa.FormatMAC"
+ case "cidr":
+ return "goa.FormatCIDR"
+ case "regexp":
+ return "goa.FormatRegexp"
+ case "json":
+ return "goa.FormatJSON"
+ case "rfc1123":
+ return "goa.FormatRFC1123"
+ }
+ panic("unknown format") // bug
+}
+
+const (
+ arrayValTmpl = `for _, e := range {{ .target }} {
+{{ .validation }}
+}`
+
+ mapValTmpl = `for {{if .keyValidation }}k{{ else }}_{{ end }}, {{ if .valueValidation }}v{{ else }}_{{ end }} := range {{ .target }} {
+{{- .keyValidation }}
+{{- .valueValidation }}
+}`
+
+ userValTmpl = `if err2 := Validate{{ .name }}({{ .target }}); err2 != nil {
+ err = goa.MergeErrors(err, err2)
+}`
+
+ enumValTmpl = `{{ if .isPointer -}}
+if {{ .target }} != nil {
+{{ end -}}
+if !({{ oneof .targetVal .values }}) {
+ err = goa.MergeErrors(err, goa.InvalidEnumValueError({{ printf "%q" .context }}, {{ .targetVal }}, {{ slice .values }}))
+{{ if .isPointer -}}
+}
+{{ end -}}
+}`
+
+ patternValTmpl = `{{ if .isPointer -}}
+if {{ .target }} != nil {
+{{ end -}}
+ err = goa.MergeErrors(err, goa.ValidatePattern({{ printf "%q" .context }}, {{ .targetVal }}, {{ printf "%q" .pattern }}))
+{{- if .isPointer }}
+}
+{{- end }}`
+
+ formatValTmpl = `{{ if .isPointer -}}
+if {{ .target }} != nil {
+{{ end -}}
+ err = goa.MergeErrors(err, goa.ValidateFormat({{ printf "%q" .context }}, {{ .targetVal}}, {{ constant .format }}))
+{{ if .isPointer -}}
+}
+{{- end }}`
+
+ minMaxValTmpl = `{{ if .isPointer -}}
+if {{ .target }} != nil {
+{{ end -}}
+ if {{ .targetVal }} {{ if .isMin }}<{{ else }}>{{ end }} {{ if .isMin }}{{ .min }}{{ else }}{{ .max }}{{ end }} {
+ err = goa.MergeErrors(err, goa.InvalidRangeError({{ printf "%q" .context }}, {{ .targetVal }}, {{ if .isMin }}{{ .min }}, true{{ else }}{{ .max }}, false{{ end }}))
+{{ if .isPointer -}}
+}
+{{ end -}}
+}`
+
+ lengthValTmpl = `{{ $target := or (and (or (or .array .map) .nonzero) .target) .targetVal -}}
+{{ if and .isPointer .string -}}
+if {{ .target }} != nil {
+{{ end -}}
+if {{ if .string }}utf8.RuneCountInString({{ $target }}){{ else }}len({{ $target }}){{ end }} {{ if .isMinLength }}<{{ else }}>{{ end }} {{ if .isMinLength }}{{ .minLength }}{{ else }}{{ .maxLength }}{{ end }} {
+ err = goa.MergeErrors(err, goa.InvalidLengthError({{ printf "%q" .context }}, {{ $target }}, {{ if .string }}utf8.RuneCountInString({{ $target }}){{ else }}len({{ $target }}){{ end }}, {{ if .isMinLength }}{{ .minLength }}, true{{ else }}{{ .maxLength }}, false{{ end }}))
+}{{- if and .isPointer .string }}
+}
+{{- end }}`
+
+ requiredValTmpl = `if {{ $.target }}.{{ goifyAtt $.reqAtt .req true }} == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("{{ .req }}", {{ printf "%q" $.context }}))
+}`
+)
diff --git a/vendor/goa.design/goa/codegen/walk.go b/vendor/goa.design/goa/codegen/walk.go
new file mode 100644
index 000000000..60f9a4290
--- /dev/null
+++ b/vendor/goa.design/goa/codegen/walk.go
@@ -0,0 +1,73 @@
+package codegen
+
+import "goa.design/goa/expr"
+
+// MappedAttributeWalker is the type of functions given to WalkMappedAttr. name
+// is the name of the attribute, elem the name of the corresponding transport
+// element (e.g. HTTP header). required is true if the attribute is required.
+type MappedAttributeWalker func(name, elem string, required bool, a *expr.AttributeExpr) error
+
+// Walk traverses the data structure recursively and calls the given function
+// once on each attribute starting with a.
+func Walk(a *expr.AttributeExpr, walker func(*expr.AttributeExpr) error) error {
+ return walk(a, walker, make(map[string]bool))
+}
+
+// WalkType traverses the data structure recursively and calls the given function
+// once on each attribute starting with the user type attribute.
+func WalkType(u expr.UserType, walker func(*expr.AttributeExpr) error) error {
+ return walk(u.Attribute(), walker, map[string]bool{u.ID(): true})
+}
+
+// WalkMappedAttr iterates over the mapped attributes. It calls the given
+// function giving each attribute as it iterates. WalkMappedAttr stops if there
+// is no more attribute to iterate over or if the iterator function returns an
+// error in which case it returns the error.
+func WalkMappedAttr(ma *expr.MappedAttributeExpr, it MappedAttributeWalker) error {
+ o := expr.AsObject(ma.Type)
+ for _, nat := range *o {
+ if err := it(nat.Name, ma.ElemName(nat.Name), ma.IsRequired(nat.Name), nat.Attribute); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// Recursive implementation of the Walk methods. Takes care of avoiding infinite
+// recursions by keeping track of types that have already been walked.
+func walk(at *expr.AttributeExpr, walker func(*expr.AttributeExpr) error, seen map[string]bool) error {
+ if err := walker(at); err != nil {
+ return err
+ }
+ walkUt := func(ut expr.UserType) error {
+ if _, ok := seen[ut.ID()]; ok {
+ return nil
+ }
+ seen[ut.ID()] = true
+ return walk(ut.Attribute(), walker, seen)
+ }
+ switch actual := at.Type.(type) {
+ case expr.Primitive:
+ return nil
+ case *expr.Array:
+ return walk(actual.ElemType, walker, seen)
+ case *expr.Map:
+ if err := walk(actual.KeyType, walker, seen); err != nil {
+ return err
+ }
+ return walk(actual.ElemType, walker, seen)
+ case *expr.Object:
+ for _, cat := range *actual {
+ if err := walk(cat.Attribute, walker, seen); err != nil {
+ return err
+ }
+ }
+ case *expr.UserTypeExpr:
+ return walkUt(actual)
+ case *expr.ResultTypeExpr:
+ return walkUt(actual.UserTypeExpr)
+ default:
+ panic("unknown attribute type") // bug
+ }
+ return nil
+}
diff --git a/vendor/goa.design/goa/dsl/api.go b/vendor/goa.design/goa/dsl/api.go
new file mode 100644
index 000000000..2cf1ed8e6
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/api.go
@@ -0,0 +1,273 @@
+package dsl
+
+import (
+ "goa.design/goa/eval"
+ "goa.design/goa/expr"
+)
+
+// API defines a network service API. It provides the API name, description and other global
+// properties. There may only be one API declaration in a given design package.
+//
+// API is a top level DSL. API takes two arguments: the name of the API and the
+// defining DSL.
+//
+// The API properties are leveraged by the OpenAPI specification. The server
+// expressions are also used by the server and the client tool code generators.
+//
+// Example:
+//
+// var _ = API("adder", func() {
+// Title("title") // Title used in documentation
+// Description("description") // Description used in documentation
+// Version("2.0") // Version of API
+// TermsOfService("terms") // Terms of use
+// Contact(func() { // Contact info
+// Name("contact name")
+// Email("contact email")
+// URL("contact URL")
+// })
+// License(func() { // License
+// Name("license name")
+// URL("license URL")
+// })
+// Docs(func() { // Documentation links
+// Description("doc description")
+// URL("doc URL")
+// })
+// }
+//
+func API(name string, fn func()) *expr.APIExpr {
+ if name == "" {
+ eval.ReportError("API first argument cannot be empty")
+ return nil
+ }
+ if _, ok := eval.Current().(eval.TopExpr); !ok {
+ eval.IncompatibleDSL()
+ return nil
+ }
+ expr.Root.API = expr.NewAPIExpr(name, fn)
+ return expr.Root.API
+}
+
+// Title sets the API title. It is used by the generated OpenAPI specification.
+//
+// Title must appear in a API expression.
+//
+// Title accepts a single string argument.
+//
+// Example:
+//
+// var _ = API("divider", func() {
+// Title("divider API")
+// })
+//
+func Title(val string) {
+ if s, ok := eval.Current().(*expr.APIExpr); ok {
+ s.Title = val
+ return
+ }
+ eval.IncompatibleDSL()
+}
+
+// Version specifies the API version. One design describes one version.
+//
+// Version must appear in a API expression.
+//
+// Version accepts a single string argument.
+//
+// Example:
+//
+// var _ = API("divider", func() {
+// Version("1.0")
+// })
+//
+func Version(ver string) {
+ if s, ok := eval.Current().(*expr.APIExpr); ok {
+ s.Version = ver
+ return
+ }
+ eval.IncompatibleDSL()
+}
+
+// Contact sets the API contact information.
+//
+// Contact must appear in a API expression.
+//
+// Contact takes a single argument which is the defining DSL.
+//
+// Example:
+//
+// var _ = API("divider", func() {
+// Contact(func() {
+// Name("support")
+// Email("support@goa.design")
+// URL("https://goa.design")
+// })
+// })
+//
+func Contact(fn func()) {
+ contact := new(expr.ContactExpr)
+ if !eval.Execute(fn, contact) {
+ return
+ }
+ if a, ok := eval.Current().(*expr.APIExpr); ok {
+ a.Contact = contact
+ return
+ }
+ eval.IncompatibleDSL()
+}
+
+// License sets the API license information.
+//
+// License must appear in a API expression.
+//
+// License takes a single argument which is the defining DSL.
+//
+// Example:
+//
+// var _ = API("divider", func() {
+// License(func() {
+// Name("MIT")
+// URL("https://github.com/goadesign/goa/blob/master/LICENSE")
+// })
+// })
+//
+func License(fn func()) {
+ license := new(expr.LicenseExpr)
+ if !eval.Execute(fn, license) {
+ return
+ }
+ if a, ok := eval.Current().(*expr.APIExpr); ok {
+ a.License = license
+ return
+ }
+ eval.IncompatibleDSL()
+}
+
+// Docs provides external documentation URLs. It is used by the generated
+// OpenAPI specification.
+//
+// Docs must appear in an API, Service, Method or Attribute expr.
+//
+// Docs takes a single argument which is the defining DSL.
+//
+// Example:
+//
+// var _ = API("cellar", func() {
+// Docs(func() {
+// Description("Additional documentation")
+// URL("https://goa.design")
+// })
+// })
+//
+func Docs(fn func()) {
+ docs := new(expr.DocsExpr)
+ if !eval.Execute(fn, docs) {
+ return
+ }
+ switch e := eval.Current().(type) {
+ case *expr.APIExpr:
+ e.Docs = docs
+ case *expr.ServiceExpr:
+ e.Docs = docs
+ case *expr.MethodExpr:
+ e.Docs = docs
+ case *expr.AttributeExpr:
+ e.Docs = docs
+ case *expr.HTTPFileServerExpr:
+ e.Docs = docs
+ default:
+ eval.IncompatibleDSL()
+ }
+}
+
+// TermsOfService describes the API terms of services or links to them.
+//
+// TermsOfService must appear in a API expression.
+//
+// TermsOfService takes a single argument which is the TOS text or URL.
+//
+// Example:
+//
+// var _ = API("github", func() {
+// TermsOfService("https://help.github.com/articles/github-terms-of-API/"
+// })
+//
+func TermsOfService(terms string) {
+ if s, ok := eval.Current().(*expr.APIExpr); ok {
+ s.TermsOfService = terms
+ return
+ }
+ eval.IncompatibleDSL()
+}
+
+// Name sets the contact or license name.
+//
+// Name must appear in a Contact or License expression.
+//
+// Name takes a single argument which is the contact or license name.
+//
+// Example:
+//
+// var _ = API("divider", func() {
+// License(func() {
+// Name("MIT")
+// URL("https://github.com/goadesign/goa/blob/master/LICENSE")
+// })
+// })
+//
+func Name(name string) {
+ switch def := eval.Current().(type) {
+ case *expr.ContactExpr:
+ def.Name = name
+ case *expr.LicenseExpr:
+ def.Name = name
+ default:
+ eval.IncompatibleDSL()
+ }
+}
+
+// Email sets the contact email.
+//
+// Email must appear in a Contact expression.
+//
+// Email takes a single argument which is the email address.
+//
+// Example:
+//
+// var _ = API("divider", func() {
+// Contact(func() {
+// Email("support@goa.design")
+// })
+// })
+//
+func Email(email string) {
+ if c, ok := eval.Current().(*expr.ContactExpr); ok {
+ c.Email = email
+ }
+}
+
+// URL sets the contact, license or external documentation URL.
+//
+// URL must appear in Contact, License or Docs.
+//
+// URL accepts a single argument which is the URL.
+//
+// Example:
+//
+// Docs(func() {
+// URL("https://goa.design")
+// })
+//
+func URL(url string) {
+ switch def := eval.Current().(type) {
+ case *expr.ContactExpr:
+ def.URL = url
+ case *expr.LicenseExpr:
+ def.URL = url
+ case *expr.DocsExpr:
+ def.URL = url
+ default:
+ eval.IncompatibleDSL()
+ }
+}
diff --git a/vendor/goa.design/goa/dsl/attribute.go b/vendor/goa.design/goa/dsl/attribute.go
new file mode 100644
index 000000000..2d03c05a3
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/attribute.go
@@ -0,0 +1,358 @@
+package dsl
+
+import (
+ "fmt"
+
+ "goa.design/goa/expr"
+ "goa.design/goa/eval"
+)
+
+// Attribute describes a field of an object.
+//
+// An attribute has a name, a type and optionally a default value, an example
+// value and validation rules.
+//
+// The type of an attribute can be one of:
+//
+// * The primitive types Boolean, Float32, Float64, Int, Int32, Int64, UInt,
+// UInt32, UInt64, String or Bytes.
+//
+// * A user type defined via the Type function.
+//
+// * An array defined using the ArrayOf function.
+//
+// * An map defined using the MapOf function.
+//
+// * An object defined inline using Attribute to define the type fields
+// recursively.
+//
+// * The special type Any to indicate that the attribute may take any of the
+// types listed above.
+//
+// Attribute must appear in ResultType, Type, Attribute or Attributes.
+//
+// Attribute accepts one to four arguments, the valid usages of the function
+// are:
+//
+// Attribute(name) // Attribute of type String with no description, no
+// // validation, default or example value
+//
+// Attribute(name, fn) // Attribute of type object with inline field
+// // definitions, description, validations, default
+// // and/or example value
+//
+// Attribute(name, type) // Attribute with no description, no validation,
+// // no default or example value
+//
+// Attribute(name, type, fn) // Attribute with description, validations,
+// // default and/or example value
+//
+// Attribute(name, type, description) // Attribute with no validation,
+// // default or example value
+//
+// Attribute(name, type, description, fn) // Attribute with description,
+// // validations, default and/or
+// // example value
+//
+// Where name is a string indicating the name of the attribute, type specifies
+// the attribute type (see above for the possible values), description a string
+// providing a human description of the attribute and fn the defining DSL if
+// any.
+//
+// When defining the type inline using Attribute recursively the function takes
+// the second form (name and DSL defining the type). The description can be
+// provided using the Description function in this case.
+//
+// Examples:
+//
+// Attribute("name")
+//
+// Attribute("driver", Person) // Use type defined with Type function
+//
+// Attribute("driver", "Person") // May also use the type name
+//
+// Attribute("name", String, func() {
+// Pattern("^foo") // Adds a validation rule
+// })
+//
+// Attribute("driver", Person, func() {
+// Required("name") // Add required field to list of
+// }) // fields already required in Person
+//
+// Attribute("name", String, func() {
+// Default("bob") // Sets a default value
+// })
+//
+// Attribute("name", String, "name of driver") // Sets a description
+//
+// Attribute("age", Int32, "description", func() {
+// Minimum(2) // Sets both a description and
+// // validations
+// })
+//
+// The definition below defines an attribute inline. The resulting type
+// is an object with three attributes "name", "age" and "child". The "child"
+// attribute is itself defined inline and has one child attribute "name".
+//
+// Attribute("driver", func() { // Define type inline
+// Description("Composite attribute") // Set description
+//
+// Attribute("name", String) // Child attribute
+// Attribute("age", Int32, func() { // Another child attribute
+// Description("Age of driver")
+// Default(42)
+// Minimum(2)
+// })
+// Attribute("child", func() { // Defines a child attribute
+// Attribute("name", String) // Grand-child attribute
+// Required("name")
+// })
+//
+// Required("name", "age") // List required attributes
+// })
+//
+func Attribute(name string, args ...interface{}) {
+ var parent *expr.AttributeExpr
+ {
+ switch def := eval.Current().(type) {
+ case *expr.AttributeExpr:
+ parent = def
+ case expr.CompositeExpr:
+ parent = def.Attribute()
+ default:
+ eval.IncompatibleDSL()
+ return
+ }
+ if parent == nil {
+ eval.ReportError("invalid syntax, attribute %#v has no parent", name)
+ return
+ }
+ if parent.Type == nil {
+ parent.Type = &expr.Object{}
+ }
+ if _, ok := parent.Type.(*expr.Object); !ok {
+ eval.ReportError("can't define child attribute %#v on attribute of type %s", name, parent.Type.Name())
+ return
+ }
+ }
+
+ var attr *expr.AttributeExpr
+ {
+ for _, ref := range parent.References {
+ if att := expr.AsObject(ref).Attribute(name); att != nil {
+ attr = expr.DupAtt(att)
+ break
+ }
+ }
+
+ dataType, description, fn := parseAttributeArgs(attr, args...)
+ if attr != nil {
+ if description != "" {
+ attr.Description = description
+ }
+ if dataType != nil {
+ attr.Type = dataType
+ }
+ } else {
+ attr = &expr.AttributeExpr{
+ Type: dataType,
+ Description: description,
+ }
+ }
+ attr.References = parent.References
+ attr.Bases = parent.Bases
+ if fn != nil {
+ eval.Execute(fn, attr)
+ }
+ if attr.Type == nil {
+ // DSL did not contain an "Attribute" declaration
+ attr.Type = expr.String
+ }
+ }
+
+ parent.Type.(*expr.Object).Set(name, attr)
+}
+
+// Field is syntactic sugar to define an attribute with the "rpc:tag" meta
+// set with the value of the first argument.
+//
+// Field must appear wherever Attribute can.
+//
+// Field takes the same arguments as Attribute with the addition of the tag
+// value as first argument.
+//
+// Example:
+//
+// Field(1, "ID", String, func() {
+// Pattern("[0-9]+")
+// })
+//
+func Field(tag interface{}, name string, args ...interface{}) {
+ fn := func() { Meta("rpc:tag", fmt.Sprintf("%v", tag)) }
+ if d, ok := args[len(args)-1].(func()); ok {
+ old := fn
+ fn = func() { d(); old() }
+ args = args[:len(args)-1]
+ }
+ Attribute(name, append(args, fn)...)
+}
+
+// Default sets the default value for an attribute.
+func Default(def interface{}) {
+ a, ok := eval.Current().(*expr.AttributeExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ if a.Type != nil && !a.Type.IsCompatible(def) {
+ eval.ReportError("default value %#v is incompatible with attribute of type %s",
+ def, expr.QualifiedTypeName(a.Type))
+ return
+ }
+ a.SetDefault(def)
+}
+
+// Example provides an example value for a type, a parameter, a header or any
+// attribute. Example supports two syntaxes: one syntax accepts two arguments
+// where the first argument is a summary describing the example and the second a
+// value provided directly or via a DSL which may also specify a long
+// description. The other syntax accepts a single argument and is equivalent to
+// using the first syntax where the summary is the string "default".
+//
+// If no example is explicitly provided in an attribute expression then a random
+// example is generated unless the "swagger:example" meta is set to "false".
+// See Meta.
+//
+// Example must appear in a Attributes or Attribute expression DSL.
+//
+// Example takes one or two arguments: an optional summary and the example value
+// or defining DSL.
+//
+// Examples:
+//
+// Params(func() {
+// Param("ZipCode:zip-code", String, "Zip code filter", func() {
+// Example("Santa Barbara", "93111")
+// Example("93117") // same as Example("default", "93117")
+// })
+// })
+//
+// Attributes(func() {
+// Attribute("ID", Int64, "ID is the unique bottle identifier")
+// Example("The first bottle", func() {
+// Description("This bottle has an ID set to 1")
+// Value(Val{"ID": 1})
+// })
+// Example("Another bottle", func() {
+// Description("This bottle has an ID set to 5")
+// Value(Val{"ID": 5})
+// })
+// })
+//
+func Example(args ...interface{}) {
+ if len(args) == 0 {
+ eval.ReportError("not enough arguments")
+ return
+ }
+ if len(args) > 2 {
+ eval.ReportError("too many arguments")
+ return
+ }
+ var (
+ summary string
+ arg interface{}
+ )
+ if len(args) == 1 {
+ summary = "default"
+ arg = args[0]
+ } else {
+ var ok bool
+ summary, ok = args[0].(string)
+ if !ok {
+ eval.InvalidArgError("summary (string)", summary)
+ return
+ }
+ arg = args[1]
+ }
+ if a, ok := eval.Current().(*expr.AttributeExpr); ok {
+ ex := &expr.ExampleExpr{Summary: summary}
+ if dsl, ok := arg.(func()); ok {
+ eval.Execute(dsl, ex)
+ } else {
+ ex.Value = arg
+ }
+ if ex.Value == nil {
+ eval.ReportError("example value is missing")
+ return
+ }
+ if a.Type != nil && !a.Type.IsCompatible(ex.Value) {
+ eval.ReportError("example value %#v is incompatible with attribute of type %s",
+ ex.Value, a.Type.Name())
+ return
+ }
+ a.UserExamples = append(a.UserExamples, ex)
+ }
+}
+
+func parseAttributeArgs(baseAttr *expr.AttributeExpr, args ...interface{}) (expr.DataType, string, func()) {
+ var (
+ dataType expr.DataType
+ description string
+ fn func()
+ ok bool
+ )
+
+ parseDataType := func(expected string, index int) {
+ if name, ok2 := args[index].(string); ok2 {
+ // Lookup type by name
+ if dataType = expr.Root.UserType(name); dataType == nil {
+ eval.InvalidArgError(expected, args[index])
+ }
+ return
+ }
+ if dataType, ok = args[index].(expr.DataType); !ok {
+ eval.InvalidArgError(expected, args[index])
+ }
+ }
+ parseDescription := func(expected string, index int) {
+ if description, ok = args[index].(string); !ok {
+ eval.InvalidArgError(expected, args[index])
+ }
+ }
+ parseDSL := func(index int, success, failure func()) {
+ if fn, ok = args[index].(func()); ok {
+ success()
+ return
+ }
+ failure()
+ }
+
+ success := func() {}
+
+ switch len(args) {
+ case 0:
+ if baseAttr != nil {
+ dataType = baseAttr.Type
+ } else {
+ dataType = expr.String
+ }
+ case 1:
+ success = func() {
+ if baseAttr != nil {
+ dataType = baseAttr.Type
+ }
+ }
+ parseDSL(0, success, func() { parseDataType("type, type name or func()", 0) })
+ case 2:
+ parseDataType("type or type name", 0)
+ parseDSL(1, success, func() { parseDescription("string or func()", 1) })
+ case 3:
+ parseDataType("type or type name", 0)
+ parseDescription("string", 1)
+ parseDSL(2, success, func() { eval.InvalidArgError("func()", args[2]) })
+ default:
+ eval.ReportError("too many arguments in call to Attribute")
+ }
+
+ return dataType, description, fn
+}
diff --git a/vendor/goa.design/goa/dsl/convert.go b/vendor/goa.design/goa/dsl/convert.go
new file mode 100644
index 000000000..8633eb354
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/convert.go
@@ -0,0 +1,168 @@
+package dsl
+
+import (
+ "goa.design/goa/eval"
+ "goa.design/goa/expr"
+)
+
+// ConvertTo specifies an external type that instances of the generated struct
+// are converted into. The generated struct is equipped with a method that makes
+// it possible to instantiate the external type. The default algorithm used to
+// match the external type fields to the design attributes is as follows:
+//
+// 1. Look for an attribute with the same name as the field
+// 2. Look for an attribute with the same name as the field but with the
+// first letter being lowercase
+// 3. Look for an attribute with a name corresponding to the snake_case
+// version of the field name
+//
+// This algorithm does not apply if the attribute is equipped with the
+// "struct.field.external" meta. In this case the matching is done by
+// looking up the field with a name corresponding to the value of the meta.
+// If the value of the meta is "-" the attribute isn't matched and no
+// conversion code is generated for it. In all other cases it is an error if no
+// match is found or if the matching field type does not correspond to the
+// attribute type.
+//
+// The following limitations apply on the external Go struct field types
+// recursively:
+//
+// * struct fields must use pointers
+// * pointers on slices or on maps are not supported
+//
+// ConvertTo must appear in Type or ResutType.
+//
+// ConvertTo accepts one arguments: an instance of the external type.
+//
+// Example:
+//
+// Service design:
+//
+// var Bottle = Type("bottle", func() {
+// Description("A bottle")
+// ConvertTo(models.Bottle{})
+// // The "rating" attribute is matched to the external
+// // typ "Rating" field.
+// Attribute("rating", Int)
+// Attribute("name", String, func() {
+// // The "name" attribute is matched to the external
+// // type "MyName" field.
+// Meta("struct.field.external", "MyName")
+// })
+// Attribute("vineyard", String, func() {
+// // The "vineyard" attribute is not converted.
+// Meta("struct.field.external", "-")
+// })
+// })
+//
+// External (i.e. non design) package:
+//
+// package model
+//
+// type Bottle struct {
+// Rating int
+// // Mapped field
+// MyName string
+// // Additional fields are OK
+// Description string
+// }
+//
+func ConvertTo(obj interface{}) {
+ var ut expr.UserType
+ switch actual := eval.Current().(type) {
+ case *expr.AttributeExpr:
+ for _, t := range expr.Root.Types {
+ if t.Attribute() == actual {
+ ut = t
+ }
+ }
+ case *expr.ResultTypeExpr:
+ ut = actual
+ default:
+ eval.IncompatibleDSL()
+ return
+ }
+ expr.Root.Conversions =
+ append(expr.Root.Conversions, &expr.TypeMap{User: ut, External: obj})
+}
+
+// CreateFrom specifies an external type that instances of the generated struct
+// can be initialized from. The generated struct is equipped with a method that
+// initializes its fields from an instance of the external type. The default
+// algorithm used to match the external type fields to the design attributes is
+// as follows:
+//
+// 1. Look for an attribute with the same name as the field
+// 2. Look for an attribute with the same name as the field but with the
+// first letter being lowercase
+// 3. Look for an attribute with a name corresponding to the snake_case
+// version of the field name
+//
+// This algorithm does not apply if the attribute is equipped with the
+// "struct.field.external" meta. In this case the matching is done by
+// looking up the field with a name corresponding to the value of the meta.
+// If the value of the meta is "-" the attribute isn't matched and no
+// conversion code is generated for it. In all other cases it is an error if no
+// match is found or if the matching field type does not correspond to the
+// attribute type.
+//
+// The following limitations apply on the external Go struct field types
+// recursively:
+//
+// * struct fields must use pointers
+// * pointers on slices or on maps are not supported
+//
+// CreateFrom must appear in Type or ResultType.
+//
+// CreateFrom accepts one arguments: an instance of the external type.
+//
+// Example:
+//
+// Service design:
+//
+// var Bottle = Type("bottle", func() {
+// Description("A bottle")
+// CreateFrom(models.Bottle{})
+// Attribute("rating", Int)
+// Attribute("name", String, func() {
+// // The "name" attribute is matched to the external
+// // type "MyName" field.
+// Meta("struct.field.external", "MyName")
+// })
+// Attribute("vineyard", String, func() {
+// // The "vineyard" attribute is not initialized by the
+// // generated constructor method.
+// Meta("struct.field.external", "-")
+// })
+// })
+//
+// External (i.e. non design) package:
+//
+// package model
+//
+// type Bottle struct {
+// Rating int
+// // Mapped field
+// MyName string
+// // Additional fields are OK
+// Description string
+// }
+//
+func CreateFrom(obj interface{}) {
+ var ut expr.UserType
+ switch actual := eval.Current().(type) {
+ case *expr.AttributeExpr:
+ for _, t := range expr.Root.Types {
+ if t.Attribute() == actual {
+ ut = t
+ }
+ }
+ case *expr.ResultTypeExpr:
+ ut = actual
+ default:
+ eval.IncompatibleDSL()
+ return
+ }
+ expr.Root.Creations =
+ append(expr.Root.Creations, &expr.TypeMap{User: ut, External: obj})
+}
diff --git a/vendor/goa.design/goa/dsl/description.go b/vendor/goa.design/goa/dsl/description.go
new file mode 100644
index 000000000..d1b41f43a
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/description.go
@@ -0,0 +1,50 @@
+package dsl
+
+import "goa.design/goa/expr"
+import "goa.design/goa/eval"
+
+// Description sets the expression description.
+//
+// Description may appear in API, Docs, Type or Attribute.
+// Description may also appear in Response and FileServer.
+//
+// Description accepts one arguments: the description string.
+//
+// Example:
+//
+// API("adder", func() {
+// Description("Adder API")
+// })
+//
+func Description(d string) {
+ switch e := eval.Current().(type) {
+ case *expr.APIExpr:
+ e.Description = d
+ case *expr.ServerExpr:
+ e.Description = d
+ case *expr.HostExpr:
+ e.Description = d
+ case *expr.ServiceExpr:
+ e.Description = d
+ case *expr.ResultTypeExpr:
+ e.Description = d
+ case *expr.AttributeExpr:
+ e.Description = d
+ case *expr.DocsExpr:
+ e.Description = d
+ case *expr.MethodExpr:
+ e.Description = d
+ case *expr.ExampleExpr:
+ e.Description = d
+ case *expr.SchemeExpr:
+ e.Description = d
+ case *expr.HTTPResponseExpr:
+ e.Description = d
+ case *expr.HTTPFileServerExpr:
+ e.Description = d
+ case *expr.GRPCResponseExpr:
+ e.Description = d
+ default:
+ eval.IncompatibleDSL()
+ }
+}
diff --git a/vendor/goa.design/goa/dsl/doc.go b/vendor/goa.design/goa/dsl/doc.go
new file mode 100644
index 000000000..e1cd4734b
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/doc.go
@@ -0,0 +1,151 @@
+/*
+Package dsl implements the goa DSL used to define HTTP APIs.
+
+The HTTP DSL adds a "HTTP" function to the DSL constructs that require HTTP
+specific information. These include the API, Service, Method and Error DSLs.
+
+For example:
+
+ var _ = API("name", func() {
+ Description("Optional description")
+ // HTTP specific properties
+ HTTP(func() {
+ // Base path for all the API requests.
+ Path("/path")
+ })
+ })
+
+The HTTP function defines the mapping of the data type attributes used
+in the generic DSL to HTTP parameters (for requests), headers and body fields.
+
+For example:
+
+ var _ = Service("name", func() {
+ Method("name", func() {
+ Payload(PayloadType) // has attributes rq1, rq2, rq3 and rq4
+ Result(ResultType) // has attributes rp1 and rp2
+ Error("name", ErrorType) // has attributes er1 and er2
+
+ HTTP(func() {
+ GET("/{rq1}") // rq1 read from path parameter
+ Param("rq2") // rq2 read from query string
+ Header("rq3") // rq3 read from header
+ Body(func() {
+ Attribute("rq4") // rq4 read from body field, default
+ })
+ Response(StatusOK, func() {
+ Header("rp1") // rp1 written to header
+ Body(func() {
+ Attribute("rp2") // rp2 written to body field, default
+ })
+ })
+ Response(StatusBadRequest, func() {
+ Header("er1") // er1 written to header
+ Body(func() {
+ Attribute("er2") // er2 written to body field, default
+ })
+ })
+ })
+ })
+ })
+
+By default the payload, result and error type attributes define the request and
+response body fields respectively. Any attribute that is not explicitly mapped
+is used to define the request or response body. The default response status code
+is 200 OK for response types other than Empty and 204 NoContent for the Empty
+response type. The default response status code for errors is 400.
+
+The example above can thus be simplified to:
+
+ var _ = Service("name", func() {
+ Method("name", func() {
+ Payload(PayloadType) // has attributes rq1, rq2, rq3 and rq4
+ Result(ResultType) // has attributes rp1 and rp2
+ Error("name", ErrorType) // has attributes er1 and er2
+
+ HTTP(func() {
+ GET("/{rq1}") // rq1 read from path parameter
+ Param("rq2") // rq2 read from query string
+ Header("rq3") // rq3 read from header
+ Response(StatusOK, func() {
+ Header("rp1") // rp1 written to header
+ })
+ Response("name", StatusBadRequest, func() {
+ Header("er1") // er1 written to header
+ })
+ })
+ })
+ })
+
+The GRPC DSL adds a "GRPC" function to the DSL constructs that require gRPC
+specific information. These include the API, Service, Method, and Error DSLs.
+
+For example:
+
+ var _ = API("name", func() {
+ Description("Optional description")
+ // gRPC specific properties
+ GRPC(func() {
+ })
+ })
+
+The GRPC function defines the mapping of the data type attributes used in the
+generic DSL to gRPC messages and metadata.
+
+For example:
+
+ var PayloadType = Type("Payload", func() {
+ TypeName("Payload") // mapped to gRPC message with name "Payload"
+ Field(1, "rq1", String) // mapped to field in "Payload" message
+ // with name "rq1" and tag number 1
+ Field(2, "rq2", String) // mapped to field in "Payload" message
+ // with name "rq2" and tag number 2
+ Attribute("rq3", Int)
+ Attribute("rq4", Int)
+ })
+
+ var ResultType = ResultType("application/vnd.result", func() {
+ TypeName("Result") // mapped to gRPC message with name "Result"
+ Attributes(func() {
+ Attribute("rp1", Int)
+ Field(1, "rp2", String) // mapped to field in "Result" message
+ // with name "rp2" and tag number 1
+ })
+ })
+
+ var _ = Service("name", func() {
+ Method("name", func() {
+ Payload(PayloadType)
+ Result(ResultType) // has attributes rp1 and rp2
+ Error("name")
+
+ GRPC(func() {
+ Metadata(func() { // rq3 and rq4 present in gRPC request metadata
+ Attribute("rq3)
+ Attribute("rq4")
+ })
+ Message(func() {
+ Attribute("rq1") // rq1 and rq2 present in gRPC request message
+ })
+ Response(CodeOK, func() {
+ Metadata(func() {
+ Attribute("rp1") // rp1 present in gRPC response metadata
+ })
+ Message(func() {
+ Attribute("rp2") // rp2 present in gRPC response message
+ })
+ })
+ Response("name", CodeInternal) // responds with error message
+ // defined by error "name"
+ })
+ })
+ })
+
+By default the payload and result type attributes define the request and
+response message fields respectively with the exception of security attributes
+in payload which are mapped to request metadata unless specified explicitly.
+The default response status code is CodeOK for success response and
+CodeUnknown for error responses. See google.golang.org/grpc/codes package for
+more information on the status codes.
+*/
+package dsl
diff --git a/vendor/goa.design/goa/dsl/error.go b/vendor/goa.design/goa/dsl/error.go
new file mode 100644
index 000000000..2da09d042
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/error.go
@@ -0,0 +1,132 @@
+package dsl
+
+import (
+ "goa.design/goa/eval"
+ "goa.design/goa/expr"
+)
+
+// Error describes a method error return value. The description includes a
+// unique name (in the scope of the method), an optional type, description and
+// DSL that further describes the type. If no type is specified then the
+// built-in ErrorResult type is used. The DSL syntax is identical to the
+// Attribute DSL.
+//
+// Error must appear in the Service (to define error responses that apply to all
+// the service methods) or Method expressions.
+//
+// See Attribute for details on the Error arguments.
+//
+// Example:
+//
+// var _ = Service("divider", func() {
+// Error("invalid_arguments") // Uses type ErrorResult
+//
+// // Method which uses the default type for its response.
+// Method("divide", func() {
+// Payload(DivideRequest)
+// Error("div_by_zero", DivByZero, "Division by zero")
+// })
+// })
+//
+func Error(name string, args ...interface{}) {
+ if len(args) == 0 {
+ args = []interface{}{expr.ErrorResult}
+ }
+ dt, desc, fn := parseAttributeArgs(nil, args...)
+ att := &expr.AttributeExpr{
+ Description: desc,
+ Type: dt,
+ }
+ if fn != nil {
+ eval.Execute(fn, att)
+ }
+ if att.Type == nil {
+ att.Type = expr.ErrorResult
+ }
+ erro := &expr.ErrorExpr{AttributeExpr: att, Name: name}
+ switch actual := eval.Current().(type) {
+ case *expr.ServiceExpr:
+ actual.Errors = append(actual.Errors, erro)
+ case *expr.MethodExpr:
+ actual.Errors = append(actual.Errors, erro)
+ default:
+ eval.IncompatibleDSL()
+ }
+}
+
+// Temporary qualifies an error type as describing temporary (i.e. retryable)
+// errors.
+//
+// Temporary must appear in a Error expression.
+//
+// Temporary takes no argument.
+//
+// Example:
+//
+// var _ = Service("divider", func() {
+// Error("request_timeout", func() {
+// Temporary()
+// })
+// })
+func Temporary() {
+ attr, ok := eval.Current().(*expr.AttributeExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ if attr.Meta == nil {
+ attr.Meta = make(expr.MetaExpr)
+ }
+ attr.Meta["goa:error:temporary"] = nil
+}
+
+// Timeout qualifies an error type as describing errors due to timeouts.
+//
+// Timeout must appear in a Error expression.
+//
+// Timeout takes no argument.
+//
+// Example:
+//
+// var _ = Service("divider", func() {
+// Error("request_timeout", func() {
+// Timeout()
+// })
+// })
+func Timeout() {
+ attr, ok := eval.Current().(*expr.AttributeExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ if attr.Meta == nil {
+ attr.Meta = make(expr.MetaExpr)
+ }
+ attr.Meta["goa:error:timeout"] = nil
+}
+
+// Fault qualifies an error type as describing errors due to a server-side
+// fault.
+//
+// Fault must appear in a Error expression.
+//
+// Fault takes no argument.
+//
+// Example:
+//
+// var _ = Service("divider", func() {
+// Error("internal_error", func() {
+// Fault()
+// })
+// })
+func Fault() {
+ attr, ok := eval.Current().(*expr.AttributeExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ if attr.Meta == nil {
+ attr.Meta = make(expr.MetaExpr)
+ }
+ attr.Meta["goa:error:fault"] = nil
+}
diff --git a/vendor/goa.design/goa/dsl/grpc.go b/vendor/goa.design/goa/dsl/grpc.go
new file mode 100644
index 000000000..1dc306862
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/grpc.go
@@ -0,0 +1,314 @@
+package dsl
+
+import (
+ "goa.design/goa/eval"
+ "goa.design/goa/expr"
+)
+
+const (
+ // CodeOK represents the gRPC response code "OK".
+ CodeOK = 0
+ // CodeCanceled represents the gRPC response code "Canceled".
+ CodeCanceled = 1
+ // CodeUnknown represents the gRPC response code "Unknown".
+ CodeUnknown = 2
+ // CodeInvalidArgument represents the gRPC response code "InvalidArgument".
+ CodeInvalidArgument = 3
+ // CodeDeadlineExceeded represents the gRPC response code "DeadlineExceeded".
+ CodeDeadlineExceeded = 4
+ // CodeNotFound represents the gRPC response code "NotFound".
+ CodeNotFound = 5
+ // CodeAlreadyExists represents the gRPC response code "AlreadyExists".
+ CodeAlreadyExists = 6
+ // CodePermissionDenied represents the gRPC response code "PermissionDenied".
+ CodePermissionDenied = 7
+ // CodeResourceExhausted represents the gRPC response code "ResourceExhausted".
+ CodeResourceExhausted = 8
+ // CodeFailedPrecondition represents the gRPC response code "FailedPrecondition".
+ CodeFailedPrecondition = 9
+ // CodeAborted represents the gRPC response code "Aborted".
+ CodeAborted = 10
+ // CodeOutOfRange represents the gRPC response code "OutOfRange".
+ CodeOutOfRange = 11
+ // CodeUnimplemented represents the gRPC response code "Unimplemented".
+ CodeUnimplemented = 12
+ // CodeInternal represents the gRPC response code "Internal".
+ CodeInternal = 13
+ // CodeUnavailable represents the gRPC response code "Unavailable".
+ CodeUnavailable = 14
+ // CodeDataLoss represents the gRPC response code "DataLoss".
+ CodeDataLoss = 15
+ // CodeUnauthenticated represents the gRPC response code "Unauthenticated".
+ CodeUnauthenticated = 16
+)
+
+// GRPC defines gRPC transport specific properties on an API, a service, or a
+// single method. The function maps the request and response types to gRPC
+// properties such as request and response messages.
+//
+// As a special case GRPC may be used to define the response generated for
+// invalid requests and internal errors (errors returned by the service methods
+// that don't match any of the error responses defined in the design). This is
+// the only use of GRPC allowed in the API expression.
+//
+// The functions that appear in GRPC such as Message or Response may take
+// advantage of the request or response types (depending on whether they appear
+// when describing the gRPC request or response). The properties of the message
+// attributes inherit the properties of the attributes with the same names that
+// appear in the request or response types. The functions may also define new
+// attributes or override the existing request or response type attributes.
+//
+// GRPC must appear in a API, Service or a Method expression.
+//
+// GRPC accepts a single argument which is the defining DSL function.
+//
+// Example:
+//
+// var CreatePayload = Type("CreatePayload", func() {
+// Field(1, "name", String, "Name of account")
+// TokenField(2, "token", String, "JWT token for authentication")
+// })
+//
+// var CreateResult = ResultType("application/vnd.create", func() {
+// Attributes(func() {
+// Field(1, "name", String, "Name of the created resource")
+// Field(2, "href", String, "Href of the created resource")
+// })
+// })
+//
+// Method("create", func() {
+// Payload(CreatePayload)
+// Result(CreateResult)
+// Error("unauthenticated")
+//
+// GRPC(func() { // gRPC endpoint to define gRPC service
+// Message(func() { // gRPC request message
+// Attribute("token")
+// })
+// Response(CodeOK) // gRPC success response
+// Response("unauthenticated", CodeUnauthenticated) // grpc error
+// })
+// })
+//
+func GRPC(fn func()) {
+ switch actual := eval.Current().(type) {
+ case *expr.ServiceExpr:
+ res := expr.Root.API.GRPC.ServiceFor(actual)
+ res.DSLFunc = fn
+ case *expr.MethodExpr:
+ res := expr.Root.API.GRPC.ServiceFor(actual.Service)
+ act := res.EndpointFor(actual.Name, actual)
+ act.DSLFunc = fn
+ default:
+ eval.IncompatibleDSL()
+ }
+}
+
+// Message describes a gRPC request or response message.
+//
+// Message must appear in a gRPC method expression to define the
+// attributes that must appear in a request message or in a gRPC response
+// expression to define the attributes that must appear in a response message.
+// If Message is absent then the request message is built using the method
+// payload expression and the response message is built using the method
+// result expression.
+//
+// Message accepts one argument of function type which lists the attributes
+// that must be present in the message. For example, the Message function can
+// be defined on the gRPC method expression listing the security attributes
+// to appear in the request message instead of sending them in the gRPC
+// metadata by default. The attributes listed in the function inherit the
+// properties (description, type, meta, validations etc.) of the request or
+// response type attributes with identical names.
+//
+// Example:
+//
+// var CreatePayload = Type("CreatePayload", func() {
+// Field(1, "name", String, "Name of account")
+// TokenField(2, "token", String, "JWT token for authentication")
+// })
+//
+// var CreateResult = ResultType("application/vnd.create", func() {
+// Attributes(func() {
+// Field(1, "name", String, "Name of the created resource")
+// Field(2, "href", String, "Href of the created resource")
+// })
+// })
+//
+// Method("create", func() {
+// Payload(CreatePayload)
+// Result(CreateResult)
+// GRPC(func() {
+// Message(func() {
+// Attribute("token") // "token" sent in the request message
+// // along with "name"
+// })
+// Response(func() {
+// Code(CodeOK)
+// Message(func() {
+// Attribute("name") // "name" sent in the response
+// // message along with "href"
+// Required("name") // "name" is set to required
+// })
+// })
+// })
+// })
+//
+// If the method payload/result type is a primitive, array, or a map the
+// request/response message by default contains one attribute with name
+// "field", "rpc:tag" set to 1, and the type set to the type of the
+// method payload/result. The function argument can also be used to set
+// the message field name to something other than "field".
+//
+// Example:
+//
+// Method("add", func() {
+// Payload(Operands)
+// Result(Int) // method Result is a primitive
+// GRPC(func() {
+// Response(CodeOK, func()
+// Message(func() {
+// Attribute("sum") // Response message has one field with
+// // name "sum" instead of the default
+// // "field"
+// })
+// })
+// })
+// })
+//
+func Message(fn func()) {
+ var setter func(*expr.AttributeExpr)
+ {
+ switch e := eval.Current().(type) {
+ case *expr.GRPCEndpointExpr:
+ setter = func(att *expr.AttributeExpr) {
+ e.Request = att
+ }
+ case *expr.GRPCErrorExpr:
+ setter = func(att *expr.AttributeExpr) {
+ if e.Response == nil {
+ e.Response = &expr.GRPCResponseExpr{}
+ }
+ e.Response.Message = att
+ }
+ case *expr.GRPCResponseExpr:
+ setter = func(att *expr.AttributeExpr) {
+ e.Message = att
+ }
+ default:
+ eval.IncompatibleDSL()
+ return
+ }
+ }
+ attr := &expr.AttributeExpr{}
+ if eval.Execute(fn, attr) {
+ setter(attr)
+ }
+}
+
+// Metadata defines a gRPC request metadata.
+//
+// Metadata must appear in a gRPC endpoint expression to describe gRPC request
+// metadata.
+//
+// Security attributes in the method payload are automatically added to the
+// request metadata unless specified explicitly in request message using
+// Message function. All other attributes in method payload are added to the
+// request message unless specified explicitly using Metadata (in which case
+// will be added to the metadata).
+//
+// Metadata takes one argument of function type which lists the attributes
+// that must be set in the request metadata instead of the message.
+// If Metadata is set in the gRPC endpoint expression, it inherits the
+// attribute properties (description, type, meta, validations etc.) from the
+// method payload.
+//
+// Example:
+//
+// var CreatePayload = Type("CreatePayload", func() {
+// Field(1, "name", String, "Name of account")
+// TokenField(2, "token", String, "JWT token for authentication")
+// })
+//
+// var CreateResult = ResultType("application/vnd.create", func() {
+// Attributes(func() {
+// Field(1, "name", String, "Name of the created resource")
+// Field(2, "href", String, "Href of the created resource")
+// })
+// })
+//
+// Method("create", func() {
+// Payload(CreatePayload)
+// Result(CreateResult)
+// GRPC(func() {
+// Metadata(func() {
+// Attribute("name") // "name" sent in the request metadata
+// // along with "token"
+// })
+// Response(func() {
+// Code(CodeOK)
+// })
+// })
+// })
+//
+func Metadata(fn func()) {
+ switch e := eval.Current().(type) {
+ case *expr.GRPCEndpointExpr:
+ attr := &expr.AttributeExpr{}
+ if eval.Execute(fn, attr) {
+ e.Metadata = expr.NewMappedAttributeExpr(attr)
+ }
+ default:
+ eval.IncompatibleDSL()
+ }
+}
+
+// Trailers defines gRPC trailers in response metadata.
+//
+// Trailers must appear in a gRPC response expression to describe gRPC trailers
+// in response metadata.
+//
+// Trailers takes one argument of function type which lists the attributes
+// that must be set in the trailer response metadata instead of the message.
+// If Trailers is set in the gRPC response expression, it inherits the
+// attribute properties (description, type, meta, validations etc.) from the
+// method result.
+//
+// Example:
+//
+// var CreatePayload = Type("CreatePayload", func() {
+// Field(1, "name", String, "Name of account")
+// TokenField(2, "token", String, "JWT token for authentication")
+// })
+//
+// var CreateResult = ResultType("application/vnd.create", func() {
+// Attributes(func() {
+// Field(1, "name", String, "Name of the created resource")
+// Field(2, "href", String, "Href of the created resource")
+// })
+// })
+//
+// Method("create", func() {
+// Payload(CreatePayload)
+// Result(CreateResult)
+// GRPC(func() {
+// Response(func() {
+// Code(CodeOK)
+// Trailers(func() {
+// Attribute("name") // "name" sent in the trailer metadata
+// })
+// })
+// })
+// })
+//
+func Trailers(fn func()) {
+ switch e := eval.Current().(type) {
+ case *expr.GRPCResponseExpr:
+ attr := &expr.AttributeExpr{}
+ if eval.Execute(fn, attr) {
+ e.Trailers = expr.NewMappedAttributeExpr(attr)
+ }
+ default:
+ eval.IncompatibleDSL()
+ }
+}
diff --git a/vendor/goa.design/goa/dsl/headers.go b/vendor/goa.design/goa/dsl/headers.go
new file mode 100644
index 000000000..e8eb1a434
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/headers.go
@@ -0,0 +1,100 @@
+package dsl
+
+import (
+ "reflect"
+
+ "goa.design/goa/eval"
+ "goa.design/goa/expr"
+)
+
+// Headers describes HTTP request/response or gRPC response headers.
+// When used in a HTTP expression, it groups a set of Header expressions and
+// makes it possible to list required headers using the Required function.
+// When used in a GRPC response expression, it defines the headers to be sent
+// in the response metadata.
+//
+// To define HTTP headers, Headers must appear in an Service HTTP expression
+// to define request headers common to all the service methods. Headers may
+// also appear in a method, response or error HTTP expression to define the
+// HTTP endpoint request and response headers.
+//
+// To define gRPC response header metadata, Headers must appear in a GRPC
+// response expression.
+//
+// Headers accepts one argument: Either a function listing the headers (both
+// HTTP and gRPC) or a user type which must be an object and whose attributes
+// define the headers (HTTP only).
+//
+// Example:
+//
+// // HTTP headers
+//
+// var _ = Service("cellar", func() {
+// HTTP(func() {
+// Headers(func() {
+// Header("version:Api-Version", String, "API version", func() {
+// Enum("1.0", "2.0")
+// })
+// Required("version")
+// })
+// })
+// })
+//
+// // gRPC response header metadata
+//
+// var CreateResult = ResultType("application/vnd.create", func() {
+// Attributes(func() {
+// Field(1, "name", String, "Name of the created resource")
+// Field(2, "href", String, "Href of the created resource")
+// })
+// })
+//
+// Method("create", func() {
+// Payload(CreatePayload)
+// Result(CreateResult)
+// GRPC(func() {
+// Response(func() {
+// Code(CodeOK)
+// Headers(func() {
+// Attribute("name") // "name" sent in the header metadata
+// })
+// })
+// })
+// })
+//
+func Headers(args interface{}) {
+ switch e := eval.Current().(type) {
+ case *expr.GRPCResponseExpr:
+ if fn, ok := args.(func()); ok {
+ attr := &expr.AttributeExpr{}
+ if eval.Execute(fn, attr) {
+ e.Headers = expr.NewMappedAttributeExpr(attr)
+ }
+ return
+ }
+ if _, ok := args.(expr.UserType); ok {
+ eval.InvalidArgError("function", args)
+ return
+ }
+ default:
+ h := headers(eval.Current())
+ if h == nil {
+ eval.IncompatibleDSL()
+ return
+ }
+ if fn, ok := args.(func()); ok {
+ eval.Execute(fn, h)
+ return
+ }
+ t, ok := args.(expr.UserType)
+ if !ok {
+ eval.InvalidArgError("function or type", args)
+ return
+ }
+ o := expr.AsObject(t)
+ if o == nil {
+ eval.ReportError("type must be an object but got %s", reflect.TypeOf(args).Name())
+ }
+ h.Merge(expr.NewMappedAttributeExpr(&expr.AttributeExpr{Type: o}))
+ }
+}
diff --git a/vendor/goa.design/goa/dsl/http.go b/vendor/goa.design/goa/dsl/http.go
new file mode 100644
index 000000000..0d7ddf8ba
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/http.go
@@ -0,0 +1,846 @@
+package dsl
+
+import (
+ "fmt"
+ "strings"
+
+ "reflect"
+
+ "goa.design/goa/eval"
+ "goa.design/goa/expr"
+)
+
+const (
+ StatusContinue = expr.StatusContinue
+ StatusSwitchingProtocols = expr.StatusSwitchingProtocols
+ StatusProcessing = expr.StatusProcessing
+
+ StatusOK = expr.StatusOK
+ StatusCreated = expr.StatusCreated
+ StatusAccepted = expr.StatusAccepted
+ StatusNonAuthoritativeInfo = expr.StatusNonAuthoritativeInfo
+ StatusNoContent = expr.StatusNoContent
+ StatusResetContent = expr.StatusResetContent
+ StatusPartialContent = expr.StatusPartialContent
+ StatusMultiStatus = expr.StatusMultiStatus
+ StatusAlreadyReported = expr.StatusAlreadyReported
+ StatusIMUsed = expr.StatusIMUsed
+
+ StatusMultipleChoices = expr.StatusMultipleChoices
+ StatusMovedPermanently = expr.StatusMovedPermanently
+ StatusFound = expr.StatusFound
+ StatusSeeOther = expr.StatusSeeOther
+ StatusNotModified = expr.StatusNotModified
+ StatusUseProxy = expr.StatusUseProxy
+
+ StatusTemporaryRedirect = expr.StatusTemporaryRedirect
+ StatusPermanentRedirect = expr.StatusPermanentRedirect
+
+ StatusBadRequest = expr.StatusBadRequest
+ StatusUnauthorized = expr.StatusUnauthorized
+ StatusPaymentRequired = expr.StatusPaymentRequired
+ StatusForbidden = expr.StatusForbidden
+ StatusNotFound = expr.StatusNotFound
+ StatusMethodNotAllowed = expr.StatusMethodNotAllowed
+ StatusNotAcceptable = expr.StatusNotAcceptable
+ StatusProxyAuthRequired = expr.StatusProxyAuthRequired
+ StatusRequestTimeout = expr.StatusRequestTimeout
+ StatusConflict = expr.StatusConflict
+ StatusGone = expr.StatusGone
+ StatusLengthRequired = expr.StatusLengthRequired
+ StatusPreconditionFailed = expr.StatusPreconditionFailed
+ StatusRequestEntityTooLarge = expr.StatusRequestEntityTooLarge
+ StatusRequestURITooLong = expr.StatusRequestURITooLong
+ StatusUnsupportedResultType = expr.StatusUnsupportedResultType
+ StatusRequestedRangeNotSatisfiable = expr.StatusRequestedRangeNotSatisfiable
+ StatusExpectationFailed = expr.StatusExpectationFailed
+ StatusTeapot = expr.StatusTeapot
+ StatusUnprocessableEntity = expr.StatusUnprocessableEntity
+ StatusLocked = expr.StatusLocked
+ StatusFailedDependency = expr.StatusFailedDependency
+ StatusUpgradeRequired = expr.StatusUpgradeRequired
+ StatusPreconditionRequired = expr.StatusPreconditionRequired
+ StatusTooManyRequests = expr.StatusTooManyRequests
+ StatusRequestHeaderFieldsTooLarge = expr.StatusRequestHeaderFieldsTooLarge
+ StatusUnavailableForLegalReasons = expr.StatusUnavailableForLegalReasons
+
+ StatusInternalServerError = expr.StatusInternalServerError
+ StatusNotImplemented = expr.StatusNotImplemented
+ StatusBadGateway = expr.StatusBadGateway
+ StatusServiceUnavailable = expr.StatusServiceUnavailable
+ StatusGatewayTimeout = expr.StatusGatewayTimeout
+ StatusHTTPVersionNotSupported = expr.StatusHTTPVersionNotSupported
+ StatusVariantAlsoNegotiates = expr.StatusVariantAlsoNegotiates
+ StatusInsufficientStorage = expr.StatusInsufficientStorage
+ StatusLoopDetected = expr.StatusLoopDetected
+ StatusNotExtended = expr.StatusNotExtended
+ StatusNetworkAuthenticationRequired = expr.StatusNetworkAuthenticationRequired
+)
+
+// HTTP defines the HTTP transport specific properties of an API, a service or a
+// single method. The function maps the method payload and result types to HTTP
+// properties such as parameters (via path wildcards or query strings), request
+// or response headers, request or response bodies as well as response status
+// code. HTTP also defines HTTP specific properties such as the method endpoint
+// URLs and HTTP methods.
+//
+// The functions that appear in HTTP such as Header, Param or Body may take
+// advantage of the method payload or result types (depending on whether they
+// appear when describing the HTTP request or response). The properties of the
+// header, parameter or body attributes inherit the properties of the attributes
+// with the same names that appear in the method payload or result types.
+//
+// HTTP must appear in API, a Service or an Method expression.
+//
+// HTTP accepts an optional argument which is the defining DSL function.
+//
+// Example:
+//
+// var _ = API("calc", func() {
+// HTTP(func() {
+// Path("/api") // Prefix to HTTP path of all requests.
+// })
+// })
+//
+// Example:
+//
+// var _ = Service("calculator", func() {
+// Error("unauthorized")
+//
+// HTTP(func() {
+// Path("/calc") // Prefix to all request paths
+// Error("unauthorized", StatusUnauthorized) // Define "unauthorized"
+// // error HTTP response status code.
+// Parent("account") // Parent service, used to prefix request
+// // paths.
+// CanonicalMethod("show") // Method whose path is used to prefix
+// // the paths of child service.
+// })
+//
+// Method("div", func() {
+// Description("Divide two operands.")
+// Payload(Operands)
+// Error("div_by_zero")
+//
+// HTTP(func() {
+// GET("/div/{left}/{right}") // Define HTTP route. The "left"
+// // and "right" parameter properties
+// // are inherited from the
+// // corresponding Operands attributes.
+// Param("integer:int") // Load "integer" attribute of
+// // Operands from "int" query string.
+// Header("requestID:X-RequestId") // Load "requestID" attribute
+// // of Operands from
+// // X-RequestId header
+// Response(StatusOK) // Use status 200 on success
+// Error("div_by_zero", BadRequest) // Use status code 400 for
+// // "div_by_zero" responses
+// })
+// })
+// })
+//
+func HTTP(fns ...func()) {
+ if len(fns) > 1 {
+ eval.InvalidArgError("zero or one function", fmt.Sprintf("%d functions", len(fns)))
+ return
+ }
+ fn := func() {}
+ if len(fns) == 1 {
+ fn = fns[0]
+ }
+ switch actual := eval.Current().(type) {
+ case *expr.APIExpr:
+ eval.Execute(fn, expr.Root)
+ case *expr.ServiceExpr:
+ res := expr.Root.API.HTTP.ServiceFor(actual)
+ res.DSLFunc = fn
+ case *expr.MethodExpr:
+ res := expr.Root.API.HTTP.ServiceFor(actual.Service)
+ act := res.EndpointFor(actual.Name, actual)
+ act.DSLFunc = fn
+ default:
+ eval.IncompatibleDSL()
+ }
+}
+
+// Consumes adds a MIME type to the list of MIME types the APIs supports when
+// accepting requests. While the DSL supports any MIME type, the code generator
+// only knows to generate the code for "application/json", "application/xml" and
+// "application/gob". The service code must provide the decoders for other MIME
+// types.
+//
+// Consumes must appear in the HTTP expression of API.
+//
+// Consumes accepts one or more strings corresponding to the MIME types.
+//
+// Example:
+//
+// API("cellar", func() {
+// // ...
+// HTTP(func() {
+// Consumes("application/json", "application/xml")
+// // ...
+// })
+// })
+//
+func Consumes(args ...string) {
+ switch e := eval.Current().(type) {
+ case *expr.RootExpr:
+ e.API.HTTP.Consumes = append(e.API.HTTP.Consumes, args...)
+ default:
+ eval.IncompatibleDSL()
+ }
+}
+
+// Produces adds a MIME type to the list of MIME types the APIs supports when
+// writing responses. While the DSL supports any MIME type, the code generator
+// only knows to generate the code for "application/json", "application/xml" and
+// "application/gob". The service code must provide the encoders for other MIME
+// types.
+//
+// Produces must appear in the HTTP expression of API.
+//
+// Produces accepts one or more strings corresponding to the MIME types.
+//
+// Example:
+//
+// API("cellar", func() {
+// // ...
+// HTTP(func() {
+// Produces("application/json", "application/xml")
+// // ...
+// })
+// })
+//
+func Produces(args ...string) {
+ switch e := eval.Current().(type) {
+ case *expr.RootExpr:
+ e.API.HTTP.Produces = append(e.API.HTTP.Produces, args...)
+ default:
+ eval.IncompatibleDSL()
+ }
+}
+
+// Path defines an API or service base path, i.e. a common path prefix to all
+// the API or service methods. The path may define wildcards (see GET for a
+// description of the wildcard syntax). The corresponding parameters must be
+// described using Params. Multiple base paths may be defined for services.
+func Path(val string) {
+ switch def := eval.Current().(type) {
+ case *expr.RootExpr:
+ if expr.Root.API.HTTP.Path != "" {
+ eval.ReportError(`only one base path may be specified for an API, got base paths %q and %q`, expr.Root.API.HTTP.Path, val)
+ }
+ expr.Root.API.HTTP.Path = val
+ case *expr.HTTPServiceExpr:
+ if !strings.HasPrefix(val, "//") {
+ rp := expr.Root.API.HTTP.Path
+ awcs := expr.ExtractHTTPWildcards(rp)
+ wcs := expr.ExtractHTTPWildcards(val)
+ for _, awc := range awcs {
+ for _, wc := range wcs {
+ if awc == wc {
+ eval.ReportError(`duplicate wildcard "%s" in API and service base paths`, wc)
+ }
+ }
+ }
+ }
+ def.Paths = append(def.Paths, val)
+ default:
+ eval.IncompatibleDSL()
+ }
+}
+
+// GET defines a route using the GET HTTP method. The route may use wildcards to
+// define path parameters. Wildcards start with '{' or with '{*' and end with
+// '}'. They must appear after a '/'.
+//
+// A wildcard that starts with '{' matches a section of the path (the value in
+// between two slashes).
+//
+// A wildcard that starts with '{*' matches the rest of the path. Such wildcards
+// must terminate the path.
+//
+// GET must appear in a method HTTP function.
+//
+// GET accepts one argument which is the request path.
+//
+// Example:
+//
+// var _ = Service("Manager", func() {
+// Method("GetAccount", func() {
+// Payload(GetAccount)
+// Result(Account)
+// HTTP(func() {
+// GET("/{accountID}/details")
+// GET("/{*accountPath}")
+// })
+// })
+// })
+func GET(path string) *expr.RouteExpr {
+ return route("GET", path)
+}
+
+// HEAD creates a route using the HEAD HTTP method. See GET.
+func HEAD(path string) *expr.RouteExpr {
+ return route("HEAD", path)
+}
+
+// POST creates a route using the POST HTTP method. See GET.
+func POST(path string) *expr.RouteExpr {
+ return route("POST", path)
+}
+
+// PUT creates a route using the PUT HTTP method. See GET.
+func PUT(path string) *expr.RouteExpr {
+ return route("PUT", path)
+}
+
+// DELETE creates a route using the DELETE HTTP method. See GET.
+func DELETE(path string) *expr.RouteExpr {
+ return route("DELETE", path)
+}
+
+// OPTIONS creates a route using the OPTIONS HTTP method. See GET.
+func OPTIONS(path string) *expr.RouteExpr {
+ return route("OPTIONS", path)
+}
+
+// TRACE creates a route using the TRACE HTTP method. See GET.
+func TRACE(path string) *expr.RouteExpr {
+ return route("TRACE", path)
+}
+
+// CONNECT creates a route using the CONNECT HTTP method. See GET.
+func CONNECT(path string) *expr.RouteExpr {
+ return route("CONNECT", path)
+}
+
+// PATCH creates a route using the PATCH HTTP method. See GET.
+func PATCH(path string) *expr.RouteExpr {
+ return route("PATCH", path)
+}
+
+func route(method, path string) *expr.RouteExpr {
+ r := &expr.RouteExpr{Method: method, Path: path}
+ a, ok := eval.Current().(*expr.HTTPEndpointExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return r
+ }
+ r.Endpoint = a
+ a.Routes = append(a.Routes, r)
+ return r
+}
+
+// Header describes a single HTTP header. The properties (description, type,
+// validation etc.) of a header are inherited from the request or response type
+// attribute with the same name by default.
+//
+// Header must appear in the API HTTP expression (to define request headers
+// common to all the API endpoints), a specific method HTTP expression (to
+// define request headers), a Result expression (to define the response
+// headers) or an Error expression (to define the error response headers). Header
+// may also appear in a Headers expression.
+//
+// Header accepts the same arguments as the Attribute function. The header name
+// may define a mapping between the attribute name and the HTTP header name when
+// they differ. The mapping syntax is "name of attribute:name of header".
+//
+// Example:
+//
+// var _ = Service("account", func() {
+// Method("create", func() {
+// Payload(CreatePayload)
+// Result(Account)
+// HTTP(func() {
+// Header("auth:Authorization", String, "Auth token", func() {
+// Pattern("^Bearer [^ ]+$")
+// })
+// Response(StatusCreated, func() {
+// Header("href") // Inherits description, type, validations
+// // etc. from Account href attribute
+// })
+// })
+// })
+// })
+//
+func Header(name string, args ...interface{}) {
+ h := headers(eval.Current())
+ if h == nil {
+ eval.IncompatibleDSL()
+ return
+ }
+ if name == "" {
+ eval.ReportError("header name cannot be empty")
+ }
+ eval.Execute(func() { Attribute(name, args...) }, h.AttributeExpr)
+ h.Remap()
+}
+
+// Params groups a set of Param expressions. It makes it possible to list
+// required parameters using the Required function.
+//
+// Params must appear in an API or Service HTTP expression to define the API or
+// service base path and query string parameters. Params may also appear in an
+// method HTTP expression to define the HTTP endpoint path and query string
+// parameters.
+//
+// Params accepts one argument: Either a function listing the parameters or a
+// user type which must be an object and whose attributes define the parameters.
+//
+// Example:
+//
+// var _ = API("cellar", func() {
+// HTTP(func() {
+// Params(func() {
+// Param("version", String, "API version", func() {
+// Enum("1.0", "2.0")
+// })
+// Required("version")
+// })
+// })
+// })
+//
+func Params(args interface{}) {
+ p := params(eval.Current())
+ if p == nil {
+ eval.IncompatibleDSL()
+ return
+ }
+ if fn, ok := args.(func()); ok {
+ eval.Execute(fn, p)
+ return
+ }
+ t, ok := args.(expr.UserType)
+ if !ok {
+ eval.InvalidArgError("function or type", args)
+ return
+ }
+ o := expr.AsObject(t)
+ if o == nil {
+ eval.ReportError("type must be an object but got %s", reflect.TypeOf(args).Name())
+ }
+ p.Merge(expr.NewMappedAttributeExpr(&expr.AttributeExpr{Type: o}))
+}
+
+// Param describes a single HTTP request path or query string parameter.
+//
+// Param must appear in the API HTTP expression (to define request parameters
+// common to all the API endpoints), a service HTTP expression to define common
+// parameters to all the service methods or a specific method HTTP
+// expression. Param may also appear in a Params expression.
+//
+// Param accepts the same arguments as the Function Attribute.
+//
+// The name may be of the form "name of attribute:name of parameter" to define a
+// mapping between the attribute and parameter names when they differ.
+//
+// Example:
+//
+// var ShowPayload = Type("ShowPayload", func() {
+// Attribute("id", UInt64, "Account ID")
+// Attribute("version", String, "Version", func() {
+// Enum("1.0", "2.0")
+// })
+// })
+//
+// var _ = Service("account", func() {
+// HTTP(func() {
+// Path("/{parentID}")
+// Param("parentID", UInt64, "ID of parent account")
+// })
+// Method("show", func() { // default response type.
+// Payload(ShowPayload)
+// Result(AccountResult)
+// HTTP(func() {
+// GET("/{id}") // HTTP request uses ShowPayload "id"
+// // attribute to define "id" parameter.
+// Params(func() { // Params makes it possible to group
+// // Param expressions.
+// Param("version:v") // "version" of ShowPayload to define
+// // path and query string parameters.
+// // Query string "v" maps to attribute
+// // "version" of ShowPayload.
+// Param("csrf", String) // HTTP only parameter not defined in
+// // ShowPayload
+// Required("crsf") // Params makes it possible to list the
+// // required parameters.
+// })
+// })
+// })
+// })
+//
+func Param(name string, args ...interface{}) {
+ p := params(eval.Current())
+ if p == nil {
+ eval.IncompatibleDSL()
+ return
+ }
+ if name == "" {
+ eval.ReportError("parameter name cannot be empty")
+ }
+ eval.Execute(func() { Attribute(name, args...) }, p.AttributeExpr)
+ p.Remap()
+}
+
+// MapParams describes the query string parameters in a HTTP request.
+//
+// MapParams must appear in a Method HTTP expression to map the query string
+// parameters with the Method's Payload.
+//
+// MapParams accepts one optional argument which specifes the Payload
+// attribute to which the query string parameters must be mapped. This Payload
+// attribute must be a map. If no argument is specified, the query string
+// parameters are mapped with the entire Payload (the Payload must be a map).
+//
+// Example:
+//
+// var _ = Service("account", func() {
+// Method("index", func() {
+// Payload(MapOf(String, Int))
+// HTTP(func() {
+// GET("/")
+// MapParams()
+// })
+// })
+// })
+//
+// var _ = Service("account", func() {
+// Method("show", func() {
+// Payload(func() {
+// Attribute("p", MapOf(String, String))
+// Attribute("id", String)
+// })
+// HTTP(func() {
+// GET("/{id}")
+// MapParams("p")
+// })
+// })
+// })
+//
+func MapParams(args ...interface{}) {
+ if len(args) > 1 {
+ eval.ReportError("too many arguments")
+ }
+ e, ok := eval.Current().(*expr.HTTPEndpointExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ var mapName string
+ if len(args) > 0 {
+ mapName, ok = args[0].(string)
+ if !ok {
+ eval.ReportError("argument must be a string")
+ }
+ }
+ e.MapQueryParams = &mapName
+}
+
+// MultipartRequest indicates that HTTP requests made to the method use
+// MIME multipart encoding as defined in RFC 2046.
+//
+// MultipartRequest must appear in a HTTP endpoint expression.
+//
+// goa generates a custom encoder that writes the payload for requests made to
+// HTTP endpoints that use MultipartRequest. The generated encoder accept a
+// user provided function that does the actual mapping of the payload to the
+// multipart content. The user provided function accepts a multipart writer
+// and a reference to the payload and is responsible for encoding the payload.
+// goa also generates a custom decoder that reads back the multipart content
+// into the payload struct. The generated decoder also accepts a user provided
+// function that takes a multipart reader and a reference to the payload struct
+// as parameter. The user provided decoder is responsible for decoding the
+// multipart content into the payload. The example command generates a default
+// implementation for the user decoder and encoder.
+//
+func MultipartRequest() {
+ e, ok := eval.Current().(*expr.HTTPEndpointExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ e.MultipartRequest = true
+}
+
+// Body describes a HTTP request or response body.
+//
+// Body must appear in a Method HTTP expression to define the request body or in
+// an Error or Result HTTP expression to define the response body. If Body is
+// absent then the body is built using the HTTP endpoint request or response
+// type attributes not used to describe parameters (request only) or headers.
+//
+// Body accepts one argument which describes the shape of the body, it can be:
+//
+// - The name of an attribute of the request or response type. In this case the
+// attribute type describes the shape of the body.
+//
+// - A function listing the body attributes. The attributes inherit the
+// properties (description, type, validations etc.) of the request or
+// response type attributes with identical names.
+//
+// Assuming the type:
+//
+// var CreatePayload = Type("CreatePayload", func() {
+// Attribute("name", String, "Name of account")
+// })
+//
+// The following:
+//
+// Method("create", func() {
+// Payload(CreatePayload)
+// })
+//
+// is equivalent to:
+//
+// Method("create", func() {
+// Payload(CreatePayload)
+// HTTP(func() {
+// Body(func() {
+// Attribute("name")
+// })
+// })
+// })
+//
+func Body(args ...interface{}) {
+ if len(args) == 0 {
+ eval.ReportError("not enough arguments, use Body(name), Body(type), Body(func()) or Body(type, func())")
+ return
+ }
+
+ var (
+ ref *expr.AttributeExpr
+ setter func(*expr.AttributeExpr)
+ kind string
+ )
+
+ // Figure out reference type and setter function
+ switch e := eval.Current().(type) {
+ case *expr.HTTPEndpointExpr:
+ ref = e.MethodExpr.Payload
+ setter = func(att *expr.AttributeExpr) {
+ e.Body = att
+ }
+ kind = "Request"
+ case *expr.HTTPErrorExpr:
+ ref = e.ErrorExpr.AttributeExpr
+ setter = func(att *expr.AttributeExpr) {
+ if e.Response == nil {
+ e.Response = &expr.HTTPResponseExpr{}
+ }
+ e.Response.Body = att
+ }
+ kind = "Error"
+ if e.Name != "" {
+ kind += " " + e.Name
+ }
+ case *expr.HTTPResponseExpr:
+ ref = e.Parent.(*expr.HTTPEndpointExpr).MethodExpr.Result
+ setter = func(att *expr.AttributeExpr) {
+ e.Body = att
+ }
+ kind = "Response"
+ default:
+ eval.IncompatibleDSL()
+ return
+ }
+
+ // Now initialize target attribute and DSL if any
+ var (
+ attr *expr.AttributeExpr
+ fn func()
+ )
+ switch a := args[0].(type) {
+ case string:
+ if !expr.IsObject(ref.Type) {
+ eval.ReportError("%s type must be an object with an attribute with name %#v, got %T", kind, a, ref.Type)
+ return
+ }
+ attr = ref.Find(a)
+ if attr == nil {
+ eval.ReportError("%s type does not have an attribute named %#v", kind, a)
+ return
+ }
+ attr = expr.DupAtt(attr)
+ if attr.Meta == nil {
+ attr.Meta = expr.MetaExpr{"origin:attribute": []string{a}}
+ } else {
+ attr.Meta["origin:attribute"] = []string{a}
+ }
+ case expr.UserType:
+ attr = &expr.AttributeExpr{Type: a}
+ if len(args) > 1 {
+ var ok bool
+ fn, ok = args[1].(func())
+ if !ok {
+ eval.ReportError("second argument must be a function")
+ }
+ }
+ case func():
+ fn = a
+ if ref == nil {
+ eval.ReportError("Body is set but Payload is not defined")
+ return
+ }
+ attr = ref
+ default:
+ eval.InvalidArgError("attribute name, user type or DSL", a)
+ return
+ }
+
+ // Set body attribute
+ if fn != nil {
+ eval.Execute(fn, attr)
+ }
+ if attr.Meta == nil {
+ attr.Meta = expr.MetaExpr{}
+ }
+ attr.Meta["http:body"] = []string{}
+ setter(attr)
+}
+
+// Parent sets the name of the parent service. The parent service canonical
+// method path is used as prefix for all the service HTTP endpoint paths.
+func Parent(name string) {
+ r, ok := eval.Current().(*expr.HTTPServiceExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ r.ParentName = name
+}
+
+// CanonicalMethod sets the name of the service canonical method. The canonical
+// method endpoint path is used to prefix the paths to any child service
+// endpoint. The default value is "show".
+func CanonicalMethod(name string) {
+ r, ok := eval.Current().(*expr.HTTPServiceExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ r.CanonicalEndpointName = name
+}
+
+// Tag identifies a method result type field and a value. The algorithm that
+// encodes the result into the HTTP response iterates through the responses and
+// uses the first response that has a matching tag (that is for which the result
+// field with the tag name matches the tag value). There must be one and only
+// one response with no Tag expression, this response is used when no other tag
+// matches.
+//
+// Tag must appear in Response.
+//
+// Tag accepts two arguments: the name of the field and the (string) value.
+//
+// Example:
+//
+// Method("create", func() {
+// Result(CreateResult)
+// HTTP(func() {
+// Response(StatusCreated, func() {
+// Tag("outcome", "created") // Assumes CreateResult has attribute
+// // "outcome" which may be "created"
+// // or "accepted"
+// })
+//
+// Response(StatusAccepted, func() {
+// Tag("outcome", "accepted")
+// })
+//
+// Response(StatusOK) // Default response if "outcome" is
+// // neither "created" nor "accepted"
+// })
+// })
+//
+func Tag(name, value string) {
+ res, ok := eval.Current().(*expr.HTTPResponseExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ res.Tag = [2]string{name, value}
+}
+
+// ContentType sets the value of the Content-Type response header.
+//
+// ContentType may appear in a ResultType or a Response expression.
+// ContentType accepts one argument: the mime type as defined by RFC 6838.
+//
+// var _ = ResultType("application/vnd.myapp.mytype", func() {
+// ContentType("application/json")
+// })
+//
+// var _ = Method("add", func() {
+// HTTP(func() {
+// Response(OK, func() {
+// ContentType("application/json")
+// })
+// })
+// })
+//
+func ContentType(typ string) {
+ switch actual := eval.Current().(type) {
+ case *expr.ResultTypeExpr:
+ actual.ContentType = typ
+ case *expr.HTTPResponseExpr:
+ actual.ContentType = typ
+ default:
+ eval.IncompatibleDSL()
+ }
+}
+
+// headers returns the mapped attribute containing the headers for the given
+// expression if it's either the root, a service or an endpoint - nil otherwise.
+func headers(exp eval.Expression) *expr.MappedAttributeExpr {
+ switch e := exp.(type) {
+ case *expr.RootExpr:
+ if e.API.HTTP.Headers == nil {
+ e.API.HTTP.Headers = expr.NewEmptyMappedAttributeExpr()
+ }
+ return e.API.HTTP.Headers
+ case *expr.HTTPServiceExpr:
+ if e.Headers == nil {
+ e.Headers = expr.NewEmptyMappedAttributeExpr()
+ }
+ return e.Headers
+ case *expr.HTTPEndpointExpr:
+ if e.Headers == nil {
+ e.Headers = expr.NewEmptyMappedAttributeExpr()
+ }
+ return e.Headers
+ case *expr.HTTPResponseExpr:
+ if e.Headers == nil {
+ e.Headers = expr.NewEmptyMappedAttributeExpr()
+ }
+ return e.Headers
+ default:
+ return nil
+ }
+}
+
+// params returns the mapped attribute containing the path and query params for
+// the given expression if it's either the root, a API server, a service or an
+// endpoint - nil otherwise.
+func params(exp eval.Expression) *expr.MappedAttributeExpr {
+ switch e := exp.(type) {
+ case *expr.RootExpr:
+ if e.API.HTTP.Params == nil {
+ e.API.HTTP.Params = expr.NewEmptyMappedAttributeExpr()
+ }
+ return e.API.HTTP.Params
+ case *expr.HTTPServiceExpr:
+ if e.Params == nil {
+ e.Params = expr.NewEmptyMappedAttributeExpr()
+ }
+ return e.Params
+ case *expr.HTTPEndpointExpr:
+ if e.Params == nil {
+ e.Params = expr.NewEmptyMappedAttributeExpr()
+ }
+ return e.Params
+ default:
+ return nil
+ }
+}
diff --git a/vendor/goa.design/goa/dsl/http_file_server.go b/vendor/goa.design/goa/dsl/http_file_server.go
new file mode 100644
index 000000000..e44fdd719
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/http_file_server.go
@@ -0,0 +1,67 @@
+package dsl
+
+import (
+ "strings"
+
+ "goa.design/goa/eval"
+ "goa.design/goa/expr"
+)
+
+// Files defines a endpoint that serves static assets via HTTP. The logic for
+// what to do when the filename points to a file vs. a directory is the same as
+// the standard http package ServeFile function. The path may end with a
+// wildcard that matches the rest of the URL (e.g. *filepath). If it does the
+// matching path is appended to filename to form the full file path, so:
+//
+// Files("/index.html", "/www/data/index.html")
+//
+// returns the content of the file "/www/data/index.html" when requests are sent
+// to "/index.html" and:
+//
+// Files("/assets/*filepath", "/www/data/assets")
+//
+// returns the content of the file "/www/data/assets/x/y/z" when requests are
+// sent to "/assets/x/y/z".
+//
+// Files must appear in Service.
+//
+// Files accepts 2 arguments and an optional DSL. The first argument is the
+// request path which may use a wildcard starting with *. The second argument is
+// the path on disk to the files being served. The file path may be absolute or
+// relative to the current path of the process. The DSL allows setting a
+// description and documentation.
+//
+// Example:
+//
+// var _ = Service("bottle", func() {
+// Files("/index.html", "/www/data/index.html", func() {
+// Description("Serve home page")
+// Docs(func() {
+// Description("Additional documentation")
+// URL("https://goa.design")
+// })
+// })
+// })
+//
+func Files(path, filename string, fns ...func()) {
+ if len(fns) > 1 {
+ eval.ReportError("too many arguments given to Files")
+ return
+ }
+ // Make sure request path starts with a "/" so codegen can rely on it.
+ if !strings.HasPrefix(path, "/") {
+ path = "/" + path
+ }
+ if s, ok := eval.Current().(*expr.ServiceExpr); ok {
+ r := expr.Root.API.HTTP.ServiceFor(s)
+ server := &expr.HTTPFileServerExpr{
+ Service: r,
+ RequestPaths: []string{path},
+ FilePath: filename,
+ }
+ if len(fns) > 0 {
+ eval.Execute(fns[0], server)
+ }
+ r.FileServers = append(r.FileServers, server)
+ }
+}
diff --git a/vendor/goa.design/goa/dsl/meta.go b/vendor/goa.design/goa/dsl/meta.go
new file mode 100644
index 000000000..76c1af2cd
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/meta.go
@@ -0,0 +1,161 @@
+package dsl
+
+import (
+ "goa.design/goa/eval"
+ "goa.design/goa/expr"
+)
+
+// Meta defines a set of key/value pairs that can be assigned to an object. Each
+// value consists of a slice of strings so that multiple invocation of the Meta
+// function on the same target using the same key builds up the slice.
+//
+// Meta may appear in attributes, result types, endpoints, responses, services
+// and API definitions.
+//
+// While keys can have any value the following names have special meanings:
+//
+// - "type:generate:force" forces the code generation for the type it is defined
+// on. By default goa only generates types that are used explicitly by the
+// service methods. The value is a slice of strings that lists the names of the
+// services for which to generate the struct. The struct is generated for all
+// services if left empty.
+//
+// package design
+//
+// var _ = Service("service1", func() { ... })
+// var _ = Service("service2", func() { ... })
+//
+// var Unused = Type("Unused", func() {
+// Attribute("name", String)
+// Meta("type:generate:force", service1, service2)
+// })
+//
+// - "struct:error:name" identifies the attribute of a result type used to
+// select the returned error when multiple errors are defined on the same
+// method. The value of the field corresponding to the attribute with the
+// struct:error:name metadata is matched against the names of the method errors
+// as defined in the design. This makes it possible to define distinct transport
+// mappings for the various errors (for example to return different HTTP status
+// codes). There must be one and exactly one attribute with the
+// struct:error:name metadata defined on result types used to define error
+// results.
+//
+// var CustomErrorType = ResultType("application/vnd.goa.error", func() {
+// Attribute("message", String, "Error returned.", func() {
+// Meta("struct:error:name")
+// })
+// Attribute("occurred_at", DateTime, "Time error occurred.")
+// })
+//
+// var _ = Service("MyService", func() {
+// Error("internal_error", CustomErrorType)
+// Error("bad_request", CustomErrorType)
+// })
+//
+// - "struct:field:name" overrides the Go struct field name generated by default
+// by goa. Applicable to attributes only.
+//
+// var MyType = Type("MyType", func() {
+// Attribute("ssn", String, "User SSN", func() {
+// Meta("struct:field:name", "SSN")
+// })
+// })
+//
+// - "struct:tag:xxx" sets a generated Go struct field tag and overrides tags
+// that goa would otherwise set. If the metadata value is a slice then the
+// strings are joined with the space character as separator. Applicable to
+// attributes only.
+//
+// var MyType = Type("MyType", func() {
+// Attribute("ssn", String, "User SSN", func() {
+// Meta("struct:tag:json", "SSN,omitempty")
+// Meta("struct:tag:xml", "SSN,omitempty")
+// })
+// })
+//
+// - "swagger:generate" specifies whether Swagger specification should be
+// generated. Defaults to true. Applicable to services, methods and file
+// servers.
+//
+// var _ = Service("MyService", func() {
+// Meta("swagger:generate", "false")
+// })
+//
+// - "swagger:summary" sets the Swagger operation summary field. Applicable to
+// methods.
+//
+// var _ = Service("MyService", func() {
+// Method("MyMethod", func() {
+// Meta("swagger:summary", "Summary of MyMethod")
+// })
+// })
+//
+// - "swagger:example" specifies whether to generate random example. Defaults to
+// true. Applicable to API (applies to all attributes) or individual attributes.
+//
+// var _ = API("MyAPI", func() {
+// Meta("swagger:example", "false")
+// })
+//
+// - "swagger:tag:xxx" sets the Swagger object field tag xxx. Applicable to
+// services and methods.
+//
+// var _ = Service("MyService", func() {
+// Method("MyMethod", func() {
+// Meta("swagger:tag:Backend")
+// Meta("swagger:tag:Backend:desc", "Description of Backend")
+// Meta("swagger:tag:Backend:url", "http://example.com")
+// Meta("swagger:tag:Backend:url:desc", "See more docs here")
+// })
+// })
+//
+// - "swagger:extension:xxx" sets the Swagger extensions xxx. The value can be
+// any valid JSON. Applicable to API (Swagger info and tag objects), Service
+// (Swagger paths object), Method (Swagger path-item object), Route (Swagger
+// operation object), Param (Swagger parameter object), Response (Swagger
+// response object) and Security (Swagger security-scheme object). See
+// https://github.com/OAI/OpenAPI-Specification/blob/master/guidelines/EXTENSIONS.md.
+//
+// var _ = API("MyAPI", func() {
+// Meta("swagger:extension:x-api", `{"foo":"bar"}`)
+// })
+//
+func Meta(name string, value ...string) {
+ appendMeta := func(meta expr.MetaExpr, name string, value ...string) expr.MetaExpr {
+ if meta == nil {
+ meta = make(map[string][]string)
+ }
+ meta[name] = append(meta[name], value...)
+ return meta
+ }
+
+ switch e := eval.Current().(type) {
+ case expr.CompositeExpr:
+ att := e.Attribute()
+ att.Meta = appendMeta(att.Meta, name, value...)
+ case *expr.RootExpr:
+ e.Meta = appendMeta(e.Meta, name, value...)
+ case *expr.APIExpr:
+ e.Meta = appendMeta(e.Meta, name, value...)
+ case *expr.AttributeExpr:
+ e.Meta = appendMeta(e.Meta, name, value...)
+ case *expr.ResultTypeExpr:
+ e.Meta = appendMeta(e.Meta, name, value...)
+ case *expr.MethodExpr:
+ e.Meta = appendMeta(e.Meta, name, value...)
+ case *expr.ServiceExpr:
+ e.Meta = appendMeta(e.Meta, name, value...)
+ case *expr.HTTPServiceExpr:
+ e.Meta = appendMeta(e.Meta, name, value...)
+ case *expr.HTTPEndpointExpr:
+ e.Meta = appendMeta(e.Meta, name, value...)
+ case *expr.RouteExpr:
+ e.Meta = appendMeta(e.Meta, name, value...)
+ case *expr.HTTPFileServerExpr:
+ e.Meta = appendMeta(e.Meta, name, value...)
+ case *expr.HTTPResponseExpr:
+ e.Meta = appendMeta(e.Meta, name, value...)
+ default:
+ eval.IncompatibleDSL()
+ }
+}
diff --git a/vendor/goa.design/goa/dsl/method.go b/vendor/goa.design/goa/dsl/method.go
new file mode 100644
index 000000000..614d2e952
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/method.go
@@ -0,0 +1,35 @@
+package dsl
+
+import (
+ "goa.design/goa/eval"
+ "goa.design/goa/expr"
+)
+
+// Method defines a single service method.
+//
+// Method must appear in a Service expression.
+//
+// Method takes two arguments: the name of the method and the defining DSL.
+//
+// Example:
+//
+// Method("add", func() {
+// Description("The add method returns the sum of A and B")
+// Docs(func() {
+// Description("Add docs")
+// URL("http//adder.goa.design/docs/endpoints/add")
+// })
+// Payload(Operands)
+// Result(Sum)
+// Error(ErrInvalidOperands)
+// })
+//
+func Method(name string, fn func()) {
+ s, ok := eval.Current().(*expr.ServiceExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ ep := &expr.MethodExpr{Name: name, Service: s, DSLFunc: fn}
+ s.Methods = append(s.Methods, ep)
+}
diff --git a/vendor/goa.design/goa/dsl/payload.go b/vendor/goa.design/goa/dsl/payload.go
new file mode 100644
index 000000000..748e641af
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/payload.go
@@ -0,0 +1,185 @@
+package dsl
+
+import (
+ "goa.design/goa/eval"
+ "goa.design/goa/expr"
+)
+
+// Payload defines the data type of an method input. Payload also makes the
+// input required.
+//
+// Payload must appear in a Method expression.
+//
+// Payload takes one to three arguments. The first argument is either a type or
+// a DSL function. If the first argument is a type then an optional description
+// may be passed as second argument. Finally a DSL may be passed as last
+// argument that further specializes the type by providing additional
+// validations (e.g. list of required attributes)
+//
+// The valid usage for Payload are thus:
+//
+// Payload(Type)
+//
+// Payload(func())
+//
+// Payload(Type, "description")
+//
+// Payload(Type, func())
+//
+// Payload(Type, "description", func())
+//
+// Examples:
+//
+// Method("upper"), func() {
+// // Use primitive type.
+// Payload(String)
+// }
+//
+// Method("upper"), func() {
+// // Use primitive type.and description
+// Payload(String, "string to convert to uppercase")
+// }
+//
+// Method("upper"), func() {
+// // Use primitive type, description and validations
+// Payload(String, "string to convert to uppercase", func() {
+// Pattern("^[a-z]")
+// })
+// }
+//
+// Method("add", func() {
+// // Define payload data structure inline
+// Payload(func() {
+// Description("Left and right operands to add")
+// Attribute("left", Int32, "Left operand")
+// Attribute("right", Int32, "Left operand")
+// Required("left", "right")
+// })
+// })
+//
+// Method("add", func() {
+// // Define payload type by reference to user type
+// Payload(Operands)
+// })
+//
+// Method("divide", func() {
+// // Specify additional required attributes on user type.
+// Payload(Operands, func() {
+// Required("left", "right")
+// })
+// })
+//
+func Payload(val interface{}, args ...interface{}) {
+ if len(args) > 2 {
+ eval.ReportError("too many arguments")
+ }
+ e, ok := eval.Current().(*expr.MethodExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ e.Payload = methodDSL("Payload", val, args...)
+}
+
+// StreamingPayload defines a method that accepts a stream of instances of the
+// given type.
+//
+// StreamingPayload must appear in a Method expression.
+//
+// The arguments to a StreamingPayload DSL is same as the Payload DSL.
+//
+// Examples:
+//
+// // Method payload is the JWT token and the method streaming payload is a
+// // stream of strings.
+// Method("upper", func() {
+// Payload(func() {
+// Token("token", String, func() {
+// Description("JWT used for authentication")
+// })
+// })
+// StreamingPayload(String)
+// })
+//
+// // Method streaming payload is a stream of string with validation set
+// // on each
+// Method("upper"), func() {
+// StreamingPayload(String, "string to convert to uppercase", func() {
+// Pattern("^[a-z]")
+// })
+// }
+//
+// // Method payload is a stream of objects defined inline
+// Method("add", func() {
+// StreamingPayload(func() {
+// Description("Left and right operands to add")
+// Attribute("left", Int32, "Left operand")
+// Attribute("right", Int32, "Left operand")
+// Required("left", "right")
+// })
+// })
+//
+// // Method payload is a stream of user type
+// Method("add", func() {
+// StreamingPayload(Operands)
+// })
+//
+func StreamingPayload(val interface{}, args ...interface{}) {
+ if len(args) > 2 {
+ eval.ReportError("too many arguments")
+ }
+ e, ok := eval.Current().(*expr.MethodExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ e.StreamingPayload = methodDSL("StreamingPayload", val, args...)
+ if e.Stream == expr.ServerStreamKind {
+ e.Stream = expr.BidirectionalStreamKind
+ } else {
+ e.Stream = expr.ClientStreamKind
+ }
+}
+
+func methodDSL(suffix string, p interface{}, args ...interface{}) *expr.AttributeExpr {
+ var (
+ att *expr.AttributeExpr
+ fn func()
+ )
+ switch actual := p.(type) {
+ case func():
+ fn = actual
+ att = &expr.AttributeExpr{Type: &expr.Object{}}
+ case expr.UserType:
+ if len(args) == 0 {
+ // Do not duplicate type if it is not customized
+ return &expr.AttributeExpr{Type: actual}
+ }
+ att = &expr.AttributeExpr{Type: expr.Dup(actual)}
+ case expr.DataType:
+ att = &expr.AttributeExpr{Type: actual}
+ default:
+ eval.ReportError("invalid %s argument, must be a type or a function", suffix)
+ return nil
+ }
+ if len(args) >= 1 {
+ if f, ok := args[len(args)-1].(func()); ok {
+ if fn != nil {
+ eval.ReportError("invalid arguments in %s call, must be (type), (func), (type, func), (type, desc) or (type, desc, func)", suffix)
+ }
+ fn = f
+ }
+ if d, ok := args[0].(string); ok {
+ att.Description = d
+ }
+ }
+ if fn != nil {
+ eval.Execute(fn, att)
+ if obj, ok := att.Type.(*expr.Object); ok {
+ if len(*obj) == 0 {
+ att.Type = expr.Empty
+ }
+ }
+ }
+ return att
+}
diff --git a/vendor/goa.design/goa/dsl/response.go b/vendor/goa.design/goa/dsl/response.go
new file mode 100644
index 000000000..f43ecd0f2
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/response.go
@@ -0,0 +1,268 @@
+package dsl
+
+import (
+ "goa.design/goa/eval"
+ "goa.design/goa/expr"
+)
+
+// Response describes a HTTP or a gRPC response. Response describes both success
+// and error responses. When describing an error response the first argument is
+// the name of the error.
+//
+// While a service method may only define a single result type, Response may
+// appear multiple times to define multiple success HTTP responses. In this case
+// the Tag expression makes it possible to identify a result type attribute and
+// a corresponding string value used to select the proper success response (each
+// success response is associated with a different tag value). gRPC responses
+// may only define one success response.
+//
+// Response may appear in an API or service expression to define error responses
+// common to all the API or service methods. Response may also appear in a
+// method expression to define both success and error responses specific to the
+// method.
+//
+// Response accepts one to three arguments. Success response accepts a status
+// code as first argument. If the first argument is a status code then a
+// function may be given as the second argument. This function may provide a
+// description and describes how to map the result type attributes to transport
+// specific constructs (e.g. HTTP headers and body, gRPC metadata and message).
+//
+// The valid invocations for successful response are thus:
+//
+// * Response(status)
+//
+// * Response(func)
+//
+// * Response(status, func)
+//
+// Error responses additionally accept the name of the error as first argument.
+//
+// * Response(error_name, status)
+//
+// * Response(error_name, func)
+//
+// * Response(error_name, status, func)
+//
+// By default (i.e. if Response only defines a status code) then:
+//
+// - success HTTP responses use code 200 (OK) and error HTTP responses use code 400 (BadRequest)
+// - success gRPC responses use code 0 (OK) and error gRPC response use code 2 (Unknown)
+// - The result type attributes are all mapped to the HTTP response body or gRPC response message.
+//
+// Example:
+//
+// Method("create", func() {
+// Payload(CreatePayload)
+// Result(CreateResult)
+// Error("an_error")
+//
+// HTTP(func() {
+// Response(StatusAccepted, func() { // HTTP status code set using argument
+// Description("Response used for async creations")
+// Tag("outcome", "accepted") // Tag identifies a result type attribute and corresponding
+// // value for this response to be selected.
+// Header("taskHref") // map "taskHref" attribute to header, all others to body
+// })
+//
+// Response(StatusCreated, func () {
+// Tag("outcome", "created") // CreateResult type to describe body
+// })
+//
+// Response(func() {
+// Description("Response used when item already exists")
+// Code(StatusNoContent) // HTTP status code set using Code
+// Body(Empty) // Override method result type
+// })
+//
+// Response("an_error", StatusConflict) // Override default of 400
+// })
+//
+// GRPC(func() {
+// Response(CodeOK, func() {
+// Metadata("taskHref") // map "taskHref" attribute to metadata, all others to message
+// })
+//
+// Response("an_error", CodeInternal, func() {
+// Description("Error returned for internal errors")
+// })
+// })
+// })
+//
+func Response(val interface{}, args ...interface{}) {
+ name, ok := val.(string)
+ switch t := eval.Current().(type) {
+ case *expr.HTTPExpr:
+ if !ok {
+ eval.InvalidArgError("name of error", val)
+ return
+ }
+ if e := httpError(name, t, args...); e != nil {
+ t.Errors = append(t.Errors, e)
+ }
+ case *expr.GRPCExpr:
+ if !ok {
+ eval.InvalidArgError("name of error", val)
+ return
+ }
+ if e := grpcError(name, t, args...); e != nil {
+ t.Errors = append(t.Errors, e)
+ }
+ case *expr.HTTPServiceExpr:
+ if !ok {
+ eval.InvalidArgError("name of error", val)
+ return
+ }
+ if e := httpError(name, t, args...); e != nil {
+ t.HTTPErrors = append(t.HTTPErrors, e)
+ }
+ case *expr.HTTPEndpointExpr:
+ if ok {
+ if e := httpError(name, t, args...); e != nil {
+ t.HTTPErrors = append(t.HTTPErrors, e)
+ }
+ return
+ }
+ code, fn := parseResponseArgs(val, args...)
+ if code == 0 {
+ code = expr.StatusOK
+ }
+ resp := &expr.HTTPResponseExpr{
+ StatusCode: code,
+ Parent: t,
+ }
+ if fn != nil {
+ eval.Execute(fn, resp)
+ }
+ t.Responses = append(t.Responses, resp)
+ case *expr.GRPCServiceExpr:
+ if !ok {
+ eval.InvalidArgError("name of error", val)
+ return
+ }
+ if e := grpcError(name, t, args...); e != nil {
+ t.GRPCErrors = append(t.GRPCErrors, e)
+ }
+ case *expr.GRPCEndpointExpr:
+ if ok {
+ // error response
+ if e := grpcError(name, t, args...); e != nil {
+ t.GRPCErrors = append(t.GRPCErrors, e)
+ }
+ return
+ }
+ code, fn := parseResponseArgs(val, args...)
+ resp := &expr.GRPCResponseExpr{
+ StatusCode: code,
+ Parent: t,
+ }
+ if fn != nil {
+ eval.Execute(fn, resp)
+ }
+ t.Response = resp
+ default:
+ eval.IncompatibleDSL()
+ }
+}
+
+// Code sets the Response status code.
+//
+// Code must appear in a Response expression.
+//
+// Code accepts one argument: the HTTP or gRPC status code.
+func Code(code int) {
+ switch t := eval.Current().(type) {
+ case *expr.HTTPResponseExpr:
+ t.StatusCode = code
+ case *expr.GRPCResponseExpr:
+ t.StatusCode = code
+ default:
+ eval.IncompatibleDSL()
+ }
+}
+
+func grpcError(n string, p eval.Expression, args ...interface{}) *expr.GRPCErrorExpr {
+ if len(args) == 0 {
+ eval.ReportError("not enough arguments, use Response(name, status), Response(name, status, func()) or Response(name, func())")
+ return nil
+ }
+ var (
+ code int
+ fn func()
+ val interface{}
+ )
+ val = args[0]
+ args = args[1:]
+ code, fn = parseResponseArgs(val, args...)
+ if code == 0 {
+ code = CodeUnknown
+ }
+ resp := &expr.GRPCResponseExpr{
+ StatusCode: code,
+ Parent: p,
+ }
+ if fn != nil {
+ eval.Execute(fn, resp)
+ }
+ return &expr.GRPCErrorExpr{
+ Name: n,
+ Response: resp,
+ }
+}
+
+func parseResponseArgs(val interface{}, args ...interface{}) (code int, fn func()) {
+ switch t := val.(type) {
+ case int:
+ code = t
+ if len(args) > 1 {
+ eval.ReportError("too many arguments given to Response (%d)", len(args)+1)
+ return
+ }
+ if len(args) == 1 {
+ if d, ok := args[0].(func()); ok {
+ fn = d
+ } else {
+ eval.InvalidArgError("function", args[0])
+ return
+ }
+ }
+ case func():
+ if len(args) > 0 {
+ eval.InvalidArgError("int (HTTP status code)", val)
+ return
+ }
+ fn = t
+ default:
+ eval.InvalidArgError("int (HTTP status code) or function", val)
+ return
+ }
+ return
+}
+
+func httpError(n string, p eval.Expression, args ...interface{}) *expr.HTTPErrorExpr {
+ if len(args) == 0 {
+ eval.ReportError("not enough arguments, use Response(name, status), Response(name, status, func()) or Response(name, func())")
+ return nil
+ }
+ var (
+ code int
+ fn func()
+ val interface{}
+ )
+ val = args[0]
+ args = args[1:]
+ code, fn = parseResponseArgs(val, args...)
+ if code == 0 {
+ code = expr.StatusBadRequest
+ }
+ resp := &expr.HTTPResponseExpr{
+ StatusCode: code,
+ Parent: p,
+ }
+ if fn != nil {
+ eval.Execute(fn, resp)
+ }
+ return &expr.HTTPErrorExpr{
+ Name: n,
+ Response: resp,
+ }
+}
diff --git a/vendor/goa.design/goa/dsl/result.go b/vendor/goa.design/goa/dsl/result.go
new file mode 100644
index 000000000..7c7c7d933
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/result.go
@@ -0,0 +1,149 @@
+package dsl
+
+import (
+ "goa.design/goa/expr"
+ "goa.design/goa/eval"
+)
+
+// Result defines the data type of a method output.
+//
+// Result must appear in a Method expression.
+//
+// Result takes one to three arguments. The first argument is either a type or a
+// DSL function. If the first argument is a type then an optional description
+// may be passed as second argument. Finally a DSL may be passed as last
+// argument that further specializes the type by providing additional
+// validations (e.g. list of required attributes) The DSL may also specify a
+// view when the first argument is a result type corresponding to the view
+// rendered by this method. If no view is specified then the generated code
+// defines response methods for all views.
+//
+// The valid syntax for Result is thus:
+//
+// Result(Type)
+//
+// Result(func())
+//
+// Result(Type, "description")
+//
+// Result(Type, func())
+//
+// Result(Type, "description", func())
+//
+// Examples:
+//
+// // Define result using primitive type
+// Method("add", func() {
+// Result(Int32)
+// })
+//
+// // Define result using primitive type and description
+// Method("add", func() {
+// Result(Int32, "Resulting sum")
+// })
+//
+// // Define result using primitive type, description and validations.
+// Method("add", func() {
+// Result(Int32, "Resulting sum", func() {
+// Minimum(0)
+// })
+// })
+//
+// // Define result using object defined inline
+// Method("add", func() {
+// Result(func() {
+// Description("Result defines a single field which is the sum.")
+// Attribute("value", Int32, "Resulting sum")
+// Required("value")
+// })
+// })
+//
+// // Define result type using user type
+// Method("add", func() {
+// Result(Sum)
+// })
+//
+// // Specify view and required attributes on result type
+// Method("add", func() {
+// Result(Sum, func() {
+// View("default")
+// Required("value")
+// })
+// })
+//
+func Result(val interface{}, args ...interface{}) {
+ if len(args) > 2 {
+ eval.ReportError("too many arguments")
+ return
+ }
+ e, ok := eval.Current().(*expr.MethodExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ e.Result = methodDSL("Result", val, args...)
+}
+
+// StreamingResult defines a method that streams instances of the given type.
+//
+// StreamingResult must appear in a Method expression.
+//
+// The arguments to a StreamingResult DSL is same as the Result DSL.
+//
+// Examples:
+//
+// // Method result is a stream of integers
+// Method("add", func() {
+// StreamingResult(Int32)
+// })
+//
+// Method("add", func() {
+// StreamingResult(Int32, "Resulting sum")
+// })
+//
+// // Method result is a stream of integers with validation set on each
+// Method("add", func() {
+// StreamingResult(Int32, "Resulting sum", func() {
+// Minimum(0)
+// })
+// })
+//
+// // Method result is a stream of objects defined inline
+// Method("add", func() {
+// StreamingResult(func() {
+// Description("Result defines a single field which is the sum.")
+// Attribute("value", Int32, "Resulting sum")
+// Required("value")
+// })
+// })
+//
+// // Method result is a stream of user type
+// Method("add", func() {
+// StreamingResult(Sum)
+// })
+//
+// // Method result is a stream of result type with a view
+// Method("add", func() {
+// StreamingResult(Sum, func() {
+// View("default")
+// Required("value")
+// })
+// })
+//
+func StreamingResult(val interface{}, args ...interface{}) {
+ if len(args) > 2 {
+ eval.ReportError("too many arguments")
+ return
+ }
+ e, ok := eval.Current().(*expr.MethodExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ e.Result = methodDSL("Result", val, args...)
+ if e.Stream == expr.ClientStreamKind {
+ e.Stream = expr.BidirectionalStreamKind
+ } else {
+ e.Stream = expr.ServerStreamKind
+ }
+}
diff --git a/vendor/goa.design/goa/dsl/result_type.go b/vendor/goa.design/goa/dsl/result_type.go
new file mode 100644
index 000000000..359f34c7b
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/result_type.go
@@ -0,0 +1,443 @@
+package dsl
+
+import (
+ "fmt"
+ "mime"
+ "strings"
+
+ "goa.design/goa/eval"
+ "goa.design/goa/expr"
+)
+
+// Counter used to create unique result type names for identifier-less result
+// types.
+var resultTypeCount int
+
+// ResultType defines a result type used to describe a method response.
+//
+// Result types have a unique identifier as described in RFC 6838. The
+// identifier defines the default value for the Content-Type header of HTTP
+// responses.
+//
+// The result type expression includes a listing of all the response attributes.
+// Views specify which of the attributes are actually rendered so that the same
+// result type expression may represent multiple rendering of a given response.
+//
+// All result types have a view named "default". This view is used to render the
+// result type in responses when no other view is specified. If the default view
+// is not explicitly described in the DSL then one is created that lists all the
+// result type attributes.
+//
+// ResultType is a top level DSL.
+//
+// ResultType accepts two arguments: the result type identifier and the defining
+// DSL.
+//
+// Example:
+//
+// var BottleMT = ResultType("application/vnd.goa.example.bottle", func() {
+// Description("A bottle of wine")
+// TypeName("BottleResult") // Override generated type name
+// ContentType("application/json") // Override Content-Type header
+//
+// Attributes(func() {
+// Attribute("id", Int, "ID of bottle")
+// Attribute("href", String, "API href of bottle")
+// Attribute("account", Account, "Owner account")
+// Attribute("origin", Origin, "Details on wine origin")
+// Required("id", "href")
+// })
+//
+// View("default", func() { // Explicitly define default view
+// Attribute("id")
+// Attribute("href")
+// })
+//
+// View("extended", func() { // Define "extended" view
+// Attribute("id")
+// Attribute("href")
+// Attribute("account")
+// Attribute("origin")
+// })
+// })
+//
+func ResultType(identifier string, fn func()) *expr.ResultTypeExpr {
+ if _, ok := eval.Current().(eval.TopExpr); !ok {
+ eval.IncompatibleDSL()
+ return nil
+ }
+
+ // Validate Result Type
+ identifier, params, err := mime.ParseMediaType(identifier)
+ if err != nil {
+ eval.ReportError("invalid result type identifier %#v: %s",
+ identifier, err)
+ // We don't return so that other errors may be captured in this
+ // one run.
+ identifier = "text/plain"
+ }
+ canonicalID := expr.CanonicalIdentifier(identifier)
+ // Validate that result type identifier doesn't clash
+ for _, rt := range expr.Root.ResultTypes {
+ if re := rt.(*expr.ResultTypeExpr); re.Identifier == canonicalID {
+ eval.ReportError(
+ "result type %#v with canonical identifier %#v is defined twice",
+ identifier, canonicalID)
+ return nil
+ }
+ }
+ identifier = mime.FormatMediaType(identifier, params)
+ lastPart := identifier
+ lastPartIndex := strings.LastIndex(identifier, "/")
+ if lastPartIndex > -1 {
+ lastPart = identifier[lastPartIndex+1:]
+ }
+ plusIndex := strings.Index(lastPart, "+")
+ if plusIndex > 0 {
+ lastPart = lastPart[:plusIndex]
+ }
+ lastPart = strings.TrimPrefix(lastPart, "vnd.")
+ elems := strings.Split(lastPart, ".")
+ for i, e := range elems {
+ elems[i] = strings.Title(e)
+ }
+ typeName := strings.Join(elems, "")
+ if typeName == "" {
+ resultTypeCount++
+ typeName = fmt.Sprintf("ResultType%d", resultTypeCount)
+ }
+ // Now save the type in the API result types map
+ mt := expr.NewResultTypeExpr(typeName, identifier, fn)
+ expr.Root.ResultTypes = append(expr.Root.ResultTypes, mt)
+
+ return mt
+}
+
+// TypeName makes it possible to set the Go struct name for a type or result
+// type in the generated code. By default goa uses the name (type) or identifier
+// (result type) given in the DSL and computes a valid Go identifier from it.
+// This function makes it possible to override that and provide a custom name.
+// name must be a valid Go identifier.
+func TypeName(name string) {
+ switch e := eval.Current().(type) {
+ case expr.UserType:
+ e.Rename(name)
+ case *expr.AttributeExpr:
+ if e.Meta == nil {
+ e.Meta = make(expr.MetaExpr)
+ }
+ e.Meta["struct:type:name"] = []string{name}
+ default:
+ eval.IncompatibleDSL()
+ }
+}
+
+// View adds a new view to a result type. A view has a name and lists attributes
+// that are rendered when the view is used to produce a response. The attribute
+// names must appear in the result type expression. If an attribute is itself a
+// result type then the view may specify which view to use when rendering the
+// attribute using the View function in the View DSL. If not specified then the
+// view named "default" is used.
+//
+// View must appear in a ResultType expression.
+//
+// View accepts two arguments: the view name and its defining DSL.
+//
+// Examples:
+//
+// View("default", func() {
+// // "id" and "name" must be result type attributes
+// Attribute("id")
+// Attribute("name")
+// })
+//
+// View("extended", func() {
+// Attribute("id")
+// Attribute("name")
+// Attribute("origin", func() {
+// // Use view "extended" to render attribute "origin"
+// View("extended")
+// })
+// })
+//
+func View(name string, adsl ...func()) {
+ switch e := eval.Current().(type) {
+ case *expr.ResultTypeExpr:
+ mt := e
+ if mt.View(name) != nil {
+ eval.ReportError("multiple expressions for view %#v in result type %#v", name, mt.TypeName)
+ return
+ }
+ at := &expr.AttributeExpr{}
+ ok := false
+ if len(adsl) > 0 {
+ ok = eval.Execute(adsl[0], at)
+ } else if a, ok := mt.Type.(*expr.Array); ok {
+ // inherit view from collection element if present
+ elem := a.ElemType
+ if elem != nil {
+ if pa, ok2 := elem.Type.(*expr.ResultTypeExpr); ok2 {
+ if v := pa.View(name); v != nil {
+ at = v.AttributeExpr
+ ok = true
+ } else {
+ eval.ReportError("unknown view %#v", name)
+ return
+ }
+ }
+ }
+ }
+ if ok {
+ view, err := buildView(name, mt, at)
+ if err != nil {
+ eval.ReportError(err.Error())
+ return
+ }
+ mt.Views = append(mt.Views, view)
+ }
+
+ case *expr.AttributeExpr:
+ if e.Meta == nil {
+ e.Meta = make(map[string][]string)
+ }
+ e.Meta["view"] = []string{name}
+
+ default:
+ eval.IncompatibleDSL()
+ }
+}
+
+// CollectionOf creates a collection result type from its element result type. A
+// collection result type represents the content of responses that return a
+// collection of values such as listings. The expression accepts an optional DSL
+// as second argument that allows specifying which view(s) of the original result
+// type apply.
+//
+// The resulting result type identifier is built from the element result type by
+// appending the result type parameter "type" with value "collection".
+//
+// CollectionOf must appear wherever ResultType can.
+//
+// CollectionOf takes the element result type as first argument and an optional
+// DSL as second argument.
+//
+// Example:
+//
+// var DivisionResult = ResultType("application/vnd.goa.divresult", func() {
+// Attributes(func() {
+// Attribute("value", Float64)
+// })
+// View("default", func() {
+// Attribute("value")
+// })
+// })
+//
+// var MultiResults = CollectionOf(DivisionResult)
+//
+func CollectionOf(v interface{}, adsl ...func()) *expr.ResultTypeExpr {
+ var m *expr.ResultTypeExpr
+ var ok bool
+ m, ok = v.(*expr.ResultTypeExpr)
+ if !ok {
+ if id, ok := v.(string); ok {
+ if dt := expr.Root.UserType(expr.CanonicalIdentifier(id)); dt != nil {
+ if mt, ok := dt.(*expr.ResultTypeExpr); ok {
+ m = mt
+ }
+ }
+ }
+ }
+ if m == nil {
+ eval.ReportError("invalid CollectionOf argument: not a result type and not a known result type identifier")
+ // don't return nil to avoid panics, the error will get reported at the end
+ return expr.NewResultTypeExpr("InvalidCollection", "text/plain", nil)
+ }
+ id := m.Identifier
+ rtype, params, err := mime.ParseMediaType(id)
+ if err != nil {
+ eval.ReportError("invalid result type identifier %#v: %s", id, err)
+ // don't return nil to avoid panics, the error will get reported at the end
+ return expr.NewResultTypeExpr("InvalidCollection", "text/plain", nil)
+ }
+ hasType := false
+ for param := range params {
+ if param == "type" {
+ hasType = true
+ break
+ }
+ }
+ if !hasType {
+ params["type"] = "collection"
+ }
+ id = mime.FormatMediaType(rtype, params)
+ canonical := expr.CanonicalIdentifier(id)
+ if mt := expr.Root.GeneratedResultType(canonical); mt != nil {
+ // Already have a type for this collection, reuse it.
+ return mt
+ }
+ mt := expr.NewResultTypeExpr("", id, func() {
+ rt, ok := eval.Current().(*expr.ResultTypeExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ // Cannot compute collection type name before element result type
+ // DSL has executed since the DSL may modify element type name
+ // via the TypeName function.
+ rt.TypeName = m.TypeName + "Collection"
+ rt.AttributeExpr = &expr.AttributeExpr{Type: ArrayOf(m)}
+ if len(adsl) > 0 {
+ eval.Execute(adsl[0], rt)
+ }
+ if rt.Views == nil {
+ // If the DSL didn't create any view (or there is no DSL
+ // at all) then inherit the views from the collection
+ // element.
+ rt.Views = make([]*expr.ViewExpr, len(m.Views))
+ for i, v := range m.Views {
+ v := v
+ rt.Views[i] = v
+ }
+ }
+ })
+ // do not execute the DSL right away, will be done last to make sure
+ // the element DSL has run first.
+ *expr.Root.GeneratedTypes = append(*expr.Root.GeneratedTypes, mt)
+ return mt
+}
+
+// Reference sets a type or result type reference. The value itself can be a
+// type or a result type. The reference type attributes define the default
+// properties for attributes with the same name in the type using the reference.
+//
+// Reference may be used in Type or ResultType, it may appear multiple times in
+// which case attributes are looked up in each reference in order of appearance
+// in the DSL.
+//
+// Reference accepts a single argument: the type or result type containing the
+// attributes that define the default properties of the attributes of the type
+// or result type that uses Reference.
+//
+// Example:
+//
+// var Bottle = Type("bottle", func() {
+// Attribute("name", String, func() {
+// MinLength(3)
+// })
+// Attribute("vintage", Int32, func() {
+// Minimum(1970)
+// })
+// Attribute("somethingelse", String)
+// })
+//
+// var BottleResult = ResultType("vnd.goa.bottle", func() {
+// Reference(Bottle)
+// Attributes(func() {
+// Attribute("id", UInt64, "ID is the bottle identifier")
+//
+// // The type and validation of "name" and "vintage" are
+// // inherited from the Bottle type "name" and "vintage"
+// // attributes.
+// Attribute("name")
+// Attribute("vintage")
+// })
+// })
+//
+func Reference(t expr.DataType) {
+ if !expr.IsObject(t) {
+ eval.ReportError("argument of Reference must be an object, got %s", t.Name())
+ return
+ }
+ switch def := eval.Current().(type) {
+ case *expr.ResultTypeExpr:
+ def.References = append(def.References, t)
+ case *expr.AttributeExpr:
+ def.References = append(def.References, t)
+ default:
+ eval.IncompatibleDSL()
+ }
+}
+
+// Extend adds the parameter type attributes to the type using Extend. The
+// parameter type must be an object.
+//
+// Extend may be used in Type or ResultType. Extend accepts a single argument:
+// the type or result type containing the attributes to be copied.
+//
+// Example:
+//
+// var CreateBottlePayload = Type("CreateBottlePayload", func() {
+// Attribute("name", String, func() {
+// MinLength(3)
+// })
+// Attribute("vintage", Int32, func() {
+// Minimum(1970)
+// })
+// })
+//
+// var UpdateBottlePayload = Type("UpatePayload", func() {
+// Atribute("id", String, "ID of bottle to update")
+// Extend(CreateBottlePayload) // Adds attributes "name" and "vintage"
+// })
+//
+func Extend(t expr.DataType) {
+ if !expr.IsObject(t) {
+ eval.ReportError("argument of Extend must be an object, got %s", t.Name())
+ return
+ }
+ switch def := eval.Current().(type) {
+ case *expr.ResultTypeExpr:
+ def.Bases = append(def.Bases, t)
+ case *expr.AttributeExpr:
+ def.Bases = append(def.Bases, t)
+ default:
+ eval.IncompatibleDSL()
+ }
+}
+
+// Attributes implements the result type Attributes DSL. See ResultType.
+func Attributes(fn func()) {
+ mt, ok := eval.Current().(*expr.ResultTypeExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ eval.Execute(fn, mt)
+}
+
+// buildView builds a view expression given an attribute and a corresponding
+// result type.
+func buildView(name string, mt *expr.ResultTypeExpr, at *expr.AttributeExpr) (*expr.ViewExpr, error) {
+ if at.Type == nil {
+ return nil, fmt.Errorf("invalid view DSL")
+ }
+ o := expr.AsObject(at.Type)
+ if o == nil {
+ return nil, fmt.Errorf("invalid view DSL")
+ }
+ mto := expr.AsObject(mt.Type)
+ if mto == nil {
+ mto = expr.AsObject(mt.Type.(*expr.Array).ElemType.Type)
+ }
+ for _, nat := range *o {
+ n := nat.Name
+ cat := nat.Attribute
+ if existing := mt.Find(n); existing != nil {
+ dup := expr.DupAtt(existing)
+ if dup.Meta == nil {
+ dup.Meta = make(map[string][]string)
+ }
+ if len(cat.Meta["view"]) > 0 {
+ dup.Meta["view"] = cat.Meta["view"]
+ }
+ o.Set(n, dup)
+ } else if n != "links" {
+ return nil, fmt.Errorf("unknown attribute %#v", n)
+ }
+ }
+ return &expr.ViewExpr{
+ AttributeExpr: at,
+ Name: name,
+ Parent: mt,
+ }, nil
+}
diff --git a/vendor/goa.design/goa/dsl/security.go b/vendor/goa.design/goa/dsl/security.go
new file mode 100644
index 000000000..372e0fdbf
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/security.go
@@ -0,0 +1,683 @@
+package dsl
+
+import (
+ "goa.design/goa/eval"
+ "goa.design/goa/expr"
+)
+
+// BasicAuthSecurity defines a basic authentication security scheme.
+//
+// BasicAuthSecurity is a top level DSL.
+//
+// BasicAuthSecurity takes a name as first argument and an optional DSL as
+// second argument.
+//
+// Example:
+//
+// var Basic = BasicAuthSecurity("basicauth", func() {
+// Description("Use your own password!")
+// })
+//
+func BasicAuthSecurity(name string, fn ...func()) *expr.SchemeExpr {
+ if _, ok := eval.Current().(eval.TopExpr); !ok {
+ eval.IncompatibleDSL()
+ return nil
+ }
+
+ if securitySchemeRedefined(name) {
+ return nil
+ }
+
+ e := &expr.SchemeExpr{
+ Kind: expr.BasicAuthKind,
+ SchemeName: name,
+ }
+
+ if len(fn) != 0 {
+ if !eval.Execute(fn[0], e) {
+ return nil
+ }
+ }
+
+ expr.Root.Schemes = append(expr.Root.Schemes, e)
+
+ return e
+}
+
+// APIKeySecurity defines an API key security scheme where a key must be
+// provided by the client to perform authorization.
+//
+// APIKeySecurity is a top level DSL.
+//
+// APIKeySecurity takes a name as first argument and an optional DSL as
+// second argument.
+//
+// Example:
+//
+// var APIKey = APIKeySecurity("key", func() {
+// Description("Shared secret")
+// })
+//
+func APIKeySecurity(name string, fn ...func()) *expr.SchemeExpr {
+ if _, ok := eval.Current().(eval.TopExpr); !ok {
+ eval.IncompatibleDSL()
+ return nil
+ }
+
+ if securitySchemeRedefined(name) {
+ return nil
+ }
+
+ e := &expr.SchemeExpr{
+ Kind: expr.APIKeyKind,
+ SchemeName: name,
+ }
+
+ if len(fn) != 0 {
+ if !eval.Execute(fn[0], e) {
+ return nil
+ }
+ }
+
+ expr.Root.Schemes = append(expr.Root.Schemes, e)
+
+ return e
+}
+
+// OAuth2Security defines an OAuth2 security scheme. The DSL provided as second
+// argument defines the specific flows supported by the scheme. The supported
+// flow types are ImplicitFlow, PasswordFlow, ClientCredentialsFlow, and
+// AuthorizationCodeFlow. The DSL also defines the scopes that may be
+// associated with the incoming request tokens.
+//
+// OAuth2Security is a top level DSL.
+//
+// OAuth2Security takes a name as first argument and a DSL as second argument.
+//
+// Example:
+//
+// var OAuth2 = OAuth2Security("googauth", func() {
+// ImplicitFlow("/authorization")
+//
+// Scope("api:write", "Write acess")
+// Scope("api:read", "Read access")
+// })
+//
+func OAuth2Security(name string, fn ...func()) *expr.SchemeExpr {
+ if _, ok := eval.Current().(eval.TopExpr); !ok {
+ eval.IncompatibleDSL()
+ return nil
+ }
+
+ if securitySchemeRedefined(name) {
+ return nil
+ }
+
+ e := &expr.SchemeExpr{
+ SchemeName: name,
+ Kind: expr.OAuth2Kind,
+ }
+
+ if len(fn) != 0 {
+ if !eval.Execute(fn[0], e) {
+ return nil
+ }
+ }
+
+ expr.Root.Schemes = append(expr.Root.Schemes, e)
+
+ return e
+}
+
+// JWTSecurity defines an HTTP security scheme where a JWT is passed in the
+// request Authorization header as a bearer token to perform auth. This scheme
+// supports defining scopes that endpoint may require to authorize the request.
+// The scheme also supports specifying a token URL used to retrieve token
+// values.
+//
+// Since scopes are not compatible with the Swagger specification, the swagger
+// generator inserts comments in the description of the different elements on
+// which they are defined.
+//
+// JWTSecurity is a top level DSL.
+//
+// JWTSecurity takes a name as first argument and an optional DSL as second
+// argument.
+//
+// Example:
+//
+// var JWT = JWTSecurity("jwt", func() {
+// Scope("system:write", "Write to the system")
+// Scope("system:read", "Read anything in there")
+// })
+//
+func JWTSecurity(name string, fn ...func()) *expr.SchemeExpr {
+ if _, ok := eval.Current().(eval.TopExpr); !ok {
+ eval.IncompatibleDSL()
+ return nil
+ }
+
+ if securitySchemeRedefined(name) {
+ return nil
+ }
+
+ e := &expr.SchemeExpr{
+ SchemeName: name,
+ Kind: expr.JWTKind,
+ In: "header",
+ Name: "Authorization",
+ }
+
+ if len(fn) != 0 {
+ if !eval.Execute(fn[0], e) {
+ return nil
+ }
+ }
+
+ expr.Root.Schemes = append(expr.Root.Schemes, e)
+
+ return e
+}
+
+// Security defines authentication requirements to access a service or a service
+// method.
+//
+// The requirement refers to one or more OAuth2Security, BasicAuthSecurity,
+// APIKeySecurity or JWTSecurity security scheme. If the schemes include a
+// OAuth2Security or JWTSecurity scheme then required scopes may be listed by
+// name in the Security DSL. All the listed schemes must be validated by the
+// client for the request to be authorized. Security may appear multiple times
+// in the same scope in which case the client may validate any one of the
+// requirements for the request to be authorized.
+//
+// Security must appear in a Service or Method expression.
+//
+// Security accepts an arbitrary number of security schemes as argument
+// specified by name or by reference and an optional DSL function as last
+// argument.
+//
+// Examples:
+//
+// var _ = Service("calculator", func() {
+// // Override default API security requirements. Accept either basic
+// // auth or OAuth2 access token with "api:read" scope.
+// Security(BasicAuth)
+// Security("oauth2", func() {
+// Scope("api:read")
+// })
+//
+// Method("add", func() {
+// Description("Add two operands")
+//
+// // Override default service security requirements. Require
+// // both basic auth and OAuth2 access token with "api:write"
+// // scope.
+// Security(BasicAuth, "oauth2", func() {
+// Scope("api:write")
+// })
+//
+// Payload(Operands)
+// Error(ErrBadRequest, ErrorResult)
+// })
+//
+// Method("health-check", func() {
+// Description("Check health")
+//
+// // Remove need for authorization for this endpoint.
+// NoSecurity()
+//
+// Payload(Operands)
+// Error(ErrBadRequest, ErrorResult)
+// })
+// })
+//
+func Security(args ...interface{}) {
+ var dsl func()
+ {
+ if d, ok := args[len(args)-1].(func()); ok {
+ args = args[:len(args)-1]
+ dsl = d
+ }
+ }
+
+ var schemes []*expr.SchemeExpr
+ {
+ schemes = make([]*expr.SchemeExpr, len(args))
+ for i, arg := range args {
+ switch val := arg.(type) {
+ case string:
+ for _, s := range expr.Root.Schemes {
+ if s.SchemeName == val {
+ schemes[i] = expr.DupScheme(s)
+ break
+ }
+ }
+ if schemes[i] == nil {
+ eval.ReportError("security scheme %q not found", val)
+ return
+ }
+ case *expr.SchemeExpr:
+ schemes[i] = expr.DupScheme(val)
+ default:
+ eval.InvalidArgError("security scheme or security scheme name", val)
+ return
+ }
+ }
+ }
+
+ security := &expr.SecurityExpr{Schemes: schemes}
+ if dsl != nil {
+ if !eval.Execute(dsl, security) {
+ return
+ }
+ }
+
+ current := eval.Current()
+ switch actual := current.(type) {
+ case *expr.MethodExpr:
+ actual.Requirements = append(actual.Requirements, security)
+ case *expr.ServiceExpr:
+ actual.Requirements = append(actual.Requirements, security)
+ case *expr.APIExpr:
+ actual.Requirements = append(actual.Requirements, security)
+ default:
+ eval.IncompatibleDSL()
+ return
+ }
+}
+
+// NoSecurity removes the need for an endpoint to perform authorization.
+//
+// NoSecurity must appear in Method.
+func NoSecurity() {
+ security := &expr.SecurityExpr{
+ Schemes: []*expr.SchemeExpr{
+ &expr.SchemeExpr{Kind: expr.NoKind},
+ },
+ }
+
+ current := eval.Current()
+ switch actual := current.(type) {
+ case *expr.MethodExpr:
+ actual.Requirements = append(actual.Requirements, security)
+ default:
+ eval.IncompatibleDSL()
+ return
+ }
+}
+
+// Username defines the attribute used to provide the username to an endpoint
+// secured with basic authentication. The parameters and usage of Username are
+// the same as the goa DSL Attribute function.
+//
+// The generated code produced by goa uses the value of the corresponding
+// payload field to compute the basic authentication Authorization header value.
+//
+// Username must appear in Payload or Type.
+//
+// Example:
+//
+// Method("login", func() {
+// Security(Basic)
+// Payload(func() {
+// Username("user", String)
+// Password("pass", String)
+// })
+// HTTP(func() {
+// // The "Authorization" header is defined implicitly.
+// POST("/login")
+// })
+// })
+//
+func Username(name string, args ...interface{}) {
+ args = useDSL(args, func() { Meta("security:username") })
+ Attribute(name, args...)
+}
+
+// UsernameField is syntactic sugar to define a username attribute with the
+// "rpc:tag" meta set with the value of the first argument.
+//
+// UsernameField takes the same arguments as Username with the addition of the
+// tag value as the first argument.
+//
+func UsernameField(tag interface{}, name string, args ...interface{}) {
+ args = useDSL(args, func() { Meta("security:username") })
+ Field(tag, name, args...)
+}
+
+// Password defines the attribute used to provide the password to an endpoint
+// secured with basic authentication. The parameters and usage of Password are
+// the same as the goa DSL Attribute function.
+//
+// The generated code produced by goa uses the value of the corresponding
+// payload field to compute the basic authentication Authorization header value.
+//
+// Password must appear in Payload or Type.
+//
+// Example:
+//
+// Method("login", func() {
+// Security(Basic)
+// Payload(func() {
+// Username("user", String)
+// Password("pass", String)
+// })
+// HTTP(func() {
+// // The "Authorization" header is defined implicitly.
+// POST("/login")
+// })
+// })
+//
+func Password(name string, args ...interface{}) {
+ args = useDSL(args, func() { Meta("security:password") })
+ Attribute(name, args...)
+}
+
+// PasswordField is syntactic sugar to define a password attribute with the
+// "rpc:tag" meta set with the value of the first argument.
+//
+// PasswordField takes the same arguments as Password with the addition of the
+// tag value as the first argument.
+//
+func PasswordField(tag interface{}, name string, args ...interface{}) {
+ args = useDSL(args, func() { Meta("security:password") })
+ Field(tag, name, args...)
+}
+
+// APIKey defines the attribute used to provide the API key to an endpoint
+// secured with API keys. The parameters and usage of APIKey are the same as the
+// goa DSL Attribute function except that it accepts an extra first argument
+// corresponding to the name of the API key security scheme.
+//
+// The generated code produced by goa uses the value of the corresponding
+// payload field to set the API key value.
+//
+// APIKey must appear in Payload or Type.
+//
+// Example:
+//
+// Method("secured_read", func() {
+// Security(APIKeyAuth)
+// Payload(func() {
+// APIKey("api_key", "key", String, "API key used to perform authorization")
+// Required("key")
+// })
+// Result(String)
+// HTTP(func() {
+// GET("/")
+// Param("key:k") // Provide the key as a query string param "k"
+// })
+// })
+//
+// Method("secured_write", func() {
+// Security(APIKeyAuth)
+// Payload(func() {
+// APIKey("api_key", "key", String, "API key used to perform authorization")
+// Attribute("data", String, "Data to be written")
+// Required("key", "data")
+// })
+// HTTP(func() {
+// POST("/")
+// Header("key:Authorization") // Provide the key in Authorization header (default)
+// })
+// })
+//
+func APIKey(scheme, name string, args ...interface{}) {
+ args = useDSL(args, func() { Meta("security:apikey:"+scheme, scheme) })
+ Attribute(name, args...)
+}
+
+// APIKeyField is syntactic sugar to define an API key attribute with the
+// "rpc:tag" meta set with the value of the first argument.
+//
+// APIKeyField takes the same arguments as APIKey with the addition of the
+// tag value as the first argument.
+//
+func APIKeyField(tag interface{}, scheme, name string, args ...interface{}) {
+ args = useDSL(args, func() { Meta("security:apikey:"+scheme, scheme) })
+ Field(tag, name, args...)
+}
+
+// AccessToken defines the attribute used to provide the access token to an
+// endpoint secured with OAuth2. The parameters and usage of AccessToken are the
+// same as the goa DSL Attribute function.
+//
+// The generated code produced by goa uses the value of the corresponding
+// payload field to initialize the Authorization header.
+//
+// AccessToken must appear in Payload or Type.
+//
+// Example:
+//
+// Method("secured", func() {
+// Security(OAuth2)
+// Payload(func() {
+// AccessToken("token", String, "OAuth2 access token used to perform authorization")
+// Required("token")
+// })
+// Result(String)
+// HTTP(func() {
+// // The "Authorization" header is defined implicitly.
+// GET("/")
+// })
+// })
+//
+func AccessToken(name string, args ...interface{}) {
+ args = useDSL(args, func() { Meta("security:accesstoken") })
+ Attribute(name, args...)
+}
+
+// AccessTokenField is syntactic sugar to define an access token attribute with the
+// "rpc:tag" meta set with the value of the first argument.
+//
+// AccessTokenField takes the same arguments as AccessToken with the addition of the
+// tag value as the first argument.
+//
+func AccessTokenField(tag interface{}, name string, args ...interface{}) {
+ args = useDSL(args, func() { Meta("security:accesstoken") })
+ Field(tag, name, args...)
+}
+
+// Token defines the attribute used to provide the JWT to an endpoint secured
+// via JWT. The parameters and usage of Token are the same as the goa DSL
+// Attribute function.
+//
+// The generated code produced by goa uses the value of the corresponding
+// payload field to initialize the Authorization header.
+//
+// Example:
+//
+// Method("secured", func() {
+// Security(JWT)
+// Payload(func() {
+// Token("token", String, "JWT token used to perform authorization")
+// Required("token")
+// })
+// Result(String)
+// HTTP(func() {
+// // The "Authorization" header is defined implicitly.
+// GET("/")
+// })
+// })
+//
+func Token(name string, args ...interface{}) {
+ args = useDSL(args, func() { Meta("security:token") })
+ Attribute(name, args...)
+}
+
+// TokenField is syntactic sugar to define a JWT token attribute with the
+// "rpc:tag" meta set with the value of the first argument.
+//
+// TokenField takes the same arguments as Token with the addition of the
+// tag value as the first argument.
+//
+func TokenField(tag interface{}, name string, args ...interface{}) {
+ args = useDSL(args, func() { Meta("security:token") })
+ Field(tag, name, args...)
+}
+
+// Scope has two uses: in JWTSecurity or OAuth2Security it defines a scope
+// supported by the scheme. In Security it lists required scopes.
+//
+// Scope must appear in Security, JWTSecurity or OAuth2Security.
+//
+// Scope accepts one or two arguments: the first argument is the scope name and
+// when used in JWTSecurity or OAuth2Security the second argument is a
+// description.
+//
+// Example:
+//
+// var JWT = JWTSecurity("JWT", func() {
+// Scope("api:read", "Read access") // Defines a scope
+// Scope("api:write", "Write access")
+// })
+//
+// Method("secured", func() {
+// Security(JWT, func() {
+// Scope("api:read") // Required scope for auth
+// })
+// })
+//
+func Scope(name string, desc ...string) {
+ switch current := eval.Current().(type) {
+ case *expr.SecurityExpr:
+ if len(desc) >= 1 {
+ eval.ReportError("too many arguments")
+ return
+ }
+ current.Scopes = append(current.Scopes, name)
+ case *expr.SchemeExpr:
+ if len(desc) > 1 {
+ eval.ReportError("too many arguments")
+ return
+ }
+ d := "no description"
+ if len(desc) == 1 {
+ d = desc[0]
+ }
+ current.Scopes = append(current.Scopes,
+ &expr.ScopeExpr{Name: name, Description: d})
+ default:
+ eval.IncompatibleDSL()
+ }
+}
+
+// AuthorizationCodeFlow defines an authorizationCode OAuth2 flow as described
+// in section 1.3.1 of RFC 6749.
+//
+// AuthorizationCodeFlow must be used in OAuth2Security.
+//
+// AuthorizationCodeFlow accepts three arguments: the authorization, token and
+// refresh URLs.
+func AuthorizationCodeFlow(authorizationURL, tokenURL, refreshURL string) {
+ current, ok := eval.Current().(*expr.SchemeExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ if current.Kind != expr.OAuth2Kind {
+ eval.ReportError("cannot specify flow for non-oauth2 security scheme.")
+ return
+ }
+ current.Flows = append(current.Flows, &expr.FlowExpr{
+ Kind: expr.AuthorizationCodeFlowKind,
+ AuthorizationURL: authorizationURL,
+ TokenURL: tokenURL,
+ RefreshURL: refreshURL,
+ })
+}
+
+// ImplicitFlow defines an implicit OAuth2 flow as described in section 1.3.2
+// of RFC 6749.
+//
+// ImplicitFlow must be used in OAuth2Security.
+//
+// ImplicitFlow accepts two arguments: the authorization and refresh URLs.
+func ImplicitFlow(authorizationURL, refreshURL string) {
+ current, ok := eval.Current().(*expr.SchemeExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ if current.Kind != expr.OAuth2Kind {
+ eval.ReportError("cannot specify flow for non-oauth2 security scheme.")
+ return
+ }
+ current.Flows = append(current.Flows, &expr.FlowExpr{
+ Kind: expr.ImplicitFlowKind,
+ AuthorizationURL: authorizationURL,
+ RefreshURL: refreshURL,
+ })
+}
+
+// PasswordFlow defines an Resource Owner Password Credentials OAuth2 flow as
+// described in section 1.3.3 of RFC 6749.
+//
+// PasswordFlow must be used in OAuth2Security.
+//
+// PasswordFlow accepts two arguments: the token and refresh URLs.
+func PasswordFlow(tokenURL, refreshURL string) {
+ current, ok := eval.Current().(*expr.SchemeExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ if current.Kind != expr.OAuth2Kind {
+ eval.ReportError("cannot specify flow for non-oauth2 security scheme.")
+ return
+ }
+ current.Flows = append(current.Flows, &expr.FlowExpr{
+ Kind: expr.PasswordFlowKind,
+ TokenURL: tokenURL,
+ RefreshURL: refreshURL,
+ })
+}
+
+// ClientCredentialsFlow defines an clientCredentials OAuth2 flow as described
+// in section 1.3.4 of RFC 6749.
+//
+// ClientCredentialsFlow must be used in OAuth2Security.
+//
+// ClientCredentialsFlow accepts two arguments: the token and refresh URLs.
+func ClientCredentialsFlow(tokenURL, refreshURL string) {
+ current, ok := eval.Current().(*expr.SchemeExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ if current.Kind != expr.OAuth2Kind {
+ eval.ReportError("cannot specify flow for non-oauth2 security scheme.")
+ return
+ }
+ current.Flows = append(current.Flows, &expr.FlowExpr{
+ Kind: expr.ClientCredentialsFlowKind,
+ TokenURL: tokenURL,
+ RefreshURL: refreshURL,
+ })
+}
+
+func securitySchemeRedefined(name string) bool {
+ for _, s := range expr.Root.Schemes {
+ if s.SchemeName == name {
+ eval.ReportError("cannot redefine security scheme with name %q", name)
+ return true
+ }
+ }
+ return false
+}
+
+// useDSL modifies the Attribute function to use the given function as DSL,
+// merging it with any pre-exsiting DSL.
+func useDSL(args []interface{}, d func()) []interface{} {
+ if len(args) == 0 {
+ return []interface{}{d}
+ }
+ ds, ok := args[len(args)-1].(func())
+ if ok {
+ newdsl := func() { ds(); d() }
+ args = append(args[:len(args)-1], newdsl)
+ } else {
+ args = append(args, d)
+ }
+ return args
+}
diff --git a/vendor/goa.design/goa/dsl/server.go b/vendor/goa.design/goa/dsl/server.go
new file mode 100644
index 000000000..bb85b87e7
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/server.go
@@ -0,0 +1,201 @@
+package dsl
+
+import (
+ "goa.design/goa/eval"
+ "goa.design/goa/expr"
+)
+
+// Server describes a single process listening for client requests. The DSL
+// defines the set of services that the server exposes as well as host details.
+// Not defining a server in a design has the same effect as defining a single
+// server that exposes all of the services defined in the design in a single
+// host listening on "locahost" and using port 80 for HTTP endpoints and 8080
+// for GRPC endpoints.
+//
+// The Server expression is leveraged by the example generator to produce the
+// service and client commands. It is also consumed by the OpenAPI specification
+// generator. There is one specification generated per server. The first URI of
+// the first host is used to set the OpenAPI v2 specification 'host' and
+// 'basePath' values.
+//
+// Server must appear in a API expression.
+//
+// Server takes two arguments: the name of the server and the defining DSL.
+//
+// Example:
+//
+// var _ = API("calc", func() {
+// Server("calcsvr", func() {
+// Description("calcsvr hosts the Calculator Service.")
+//
+// // List the services hosted by this server.
+// Services("calc")
+//
+// // List the Hosts and their transport URLs.
+// Host("production", func() {
+// Description("Production host.")
+// // URIs can be parameterized using {param} notation.
+// URI("https://{version}.goa.design/calc")
+// URI("grpcs://{version}.goa.design")
+//
+// // Variable describes a URI variable.
+// Variable("version", String, "API version", func() {
+// // URI parameters must have a default value and/or an
+// // enum validation.
+// Default("v1")
+// })
+// })
+//
+// Host("development", func() {
+// Description("Development hosts.")
+// // Transport specific URLs, supported schemes are:
+// // 'http', 'https', 'grpc' and 'grpcs' with the respective default
+// // ports: 80, 443, 8080, 8443.
+// URI("http://localhost:80/calc")
+// URI("grpc://localhost:8080")
+// })
+// })
+// })
+//
+func Server(name string, fn ...func()) *expr.ServerExpr {
+ if len(fn) > 1 {
+ eval.ReportError("too many arguments given to Server")
+ }
+ api, ok := eval.Current().(*expr.APIExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ }
+ server := &expr.ServerExpr{Name: name}
+ if len(fn) > 0 {
+ eval.Execute(fn[0], server)
+ }
+ api.Servers = append(api.Servers, server)
+ return server
+}
+
+// Services sets the list of services implemented by a server.
+//
+// Services must appear in a Server expression
+//
+// Services takes one or more strings as argument corresponding to service
+// names.
+//
+// Example:
+//
+// var _ = Server("calcsvr", func() {
+// Services("calc", "adder")
+// Services("other") // Multiple calls to Services are OK
+// })
+//
+func Services(svcs ...string) {
+ s, ok := eval.Current().(*expr.ServerExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ s.Services = append(s.Services, svcs...)
+}
+
+// Host defines a server host. A single server may define multiple hosts. Each
+// host lists the set of URIs that identify it.
+//
+// The Host expression is leveraged by the example generator to produce the
+// service and client commands. It is also consumed by the OpenAPI specification
+// generator to initialize the server objects.
+//
+// Host must appear in a Server expression.
+//
+// Host takes two arguments: a name and a DSL function.
+//
+// Example:
+//
+// var _ = Server("calcsvc", func() {
+// Host("development", func() {
+// URI("http://localhost:80/calc")
+// URI("grpc://localhost:8080")
+// })
+// })
+//
+func Host(name string, fn func()) {
+ s, ok := eval.Current().(*expr.ServerExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ host := &expr.HostExpr{
+ Name: name,
+ ServerName: s.Name,
+ Variables: &expr.AttributeExpr{Type: &expr.Object{}},
+ }
+ eval.Execute(fn, host)
+ s.Hosts = append(s.Hosts, host)
+}
+
+// URI defines a server host URI. A single host may define multiple URIs. The
+// supported schemes are 'http', 'https', 'grpc' and 'grpcs' where 'grpcs'
+// indicates gRPC using client-side SSL/TLS. gRPC URIs may only define the
+// authority component (in particular no path). URIs may be parameterized using
+// the {param} notation. Note that the variables appearing in a URI must be
+// provided when the service is initialized and in particular their values
+// cannot defer between requests.
+//
+// The URI expression is leveraged by the example generator to produce the
+// service and client commands. It is also consumed by the OpenAPI specification
+// generator to initialize the server objects.
+//
+// URI must appear in a Host expression.
+//
+// URI takes one argument: a string representing the URI value.
+//
+// Example:
+//
+// var _ = Server("calcsvc", func() {
+// Host("development", func() {
+// URI("http://localhost:80/{version}/calc")
+// URI("grpc://localhost:8080")
+// })
+// })
+//
+func URI(uri string) {
+ h, ok := eval.Current().(*expr.HostExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ h.URIs = append(h.URIs, expr.URIExpr(uri))
+}
+
+// Variable defines a server host URI variable.
+//
+// The URI expression is leveraged by the example generator to produce the
+// service and client commands. It is also consumed by the OpenAPI specification
+// generator to initialize the server objects.
+//
+// Variable must appear in a Host expression.
+//
+// The Variable DSL is the same as the Attribute DSL with the following two
+// restrictions:
+//
+// 1. The type used to define the variable must be a primitive.
+// 2. The variable must have a default value and/or a enum validation.
+//
+// Example:
+//
+// var _ = Server("calcsvr", func() {
+// Host("production", func() {
+// URI("https://{version}.goa.design/calc")
+// URI("grpcs://{version}.goa.design")
+//
+// Variable("version", String, "API version", func() {
+// Enum("v1", "v2")
+// })
+// })
+// })
+//
+func Variable(name string, args ...interface{}) {
+ if _, ok := eval.Current().(*expr.HostExpr); !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ Attribute(name, args...)
+}
diff --git a/vendor/goa.design/goa/dsl/service.go b/vendor/goa.design/goa/dsl/service.go
new file mode 100644
index 000000000..ec52bdd78
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/service.go
@@ -0,0 +1,67 @@
+package dsl
+
+import (
+ "goa.design/goa/eval"
+ "goa.design/goa/expr"
+)
+
+// Service defines a group of remotely accessible methods that are hosted
+// together. The service DSL makes it possible to define the methods, their
+// input and output as well as the errors they may return independently of the
+// underlying transport (HTTP or gRPC). The transport specific DSLs defined by
+// the HTTP and GRPC functions define the mapping between the input, output and
+// error type attributes and the transport data (e.g. HTTP headers, HTTP bodies
+// or gRPC messages).
+//
+// The Service expression is leveraged by the code generators to define the
+// business layer service interface, the endpoint layer as well as the transport
+// layer including input validation, marshalling and unmarshalling. It also
+// affects the generated OpenAPI specification.
+//
+// Service is as a top level expression.
+//
+// Service accepts two arguments: the name of the service - which must be unique
+// in the design package - and its defining DSL.
+//
+// Example:
+//
+// var _ = Service("divider", func() {
+// Title("divider service") // optional
+//
+// Error("Unauthorized") // error that apply to all the service methods
+// HTTP(func() { // HTTP mapping for error responses
+// // Use HTTP status 401 for 'Unauthorized' errors.
+// Response("Unauthorized", StatusUnauthorized)
+// })
+//
+// Method("divide", func() { // Defines a service method.
+// Description("Divide divides two value.") // optional
+// Payload(DividePayload) // input type
+// Result(Float64) // output type
+// Error("DivisionByZero") // method specific error
+// // No HTTP mapping for "DivisionByZero" means default of status
+// // 400 and error struct serialized in HTTP response body.
+//
+// HTTP(func() { // Defines HTTP transport mapping.
+// GET("/div") // HTTP verb and path
+// Param("a") // query string parameter
+// Param("b") // 'a' and 'b' are attributes of DividePayload.
+// // No 'Response' DSL means default of status 200 and result
+// // marshaled in HTTP response body.
+// })
+// })
+// })
+//
+func Service(name string, fn func()) *expr.ServiceExpr {
+ if _, ok := eval.Current().(eval.TopExpr); !ok {
+ eval.IncompatibleDSL()
+ return nil
+ }
+ if s := expr.Root.Service(name); s != nil {
+ eval.ReportError("service %#v is defined twice", name)
+ return nil
+ }
+ s := &expr.ServiceExpr{Name: name, DSLFunc: fn}
+ expr.Root.Services = append(expr.Root.Services, s)
+ return s
+}
diff --git a/vendor/goa.design/goa/dsl/types.go b/vendor/goa.design/goa/dsl/types.go
new file mode 100644
index 000000000..b8b5818be
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/types.go
@@ -0,0 +1,44 @@
+package dsl
+
+import "goa.design/goa/expr"
+
+const (
+ // Boolean is the type for a JSON boolean.
+ Boolean = expr.Boolean
+
+ // Int is the type for a signed integer.
+ Int = expr.Int
+
+ // Int32 is the type for a signed 32-bit integer.
+ Int32 = expr.Int32
+
+ // Int64 is the type for a signed 64-bit integer.
+ Int64 = expr.Int64
+
+ // UInt is the type for an unsigned integer.
+ UInt = expr.UInt
+
+ // UInt32 is the type for an unsigned 32-bit integer.
+ UInt32 = expr.UInt32
+
+ // UInt64 is the type for an unsigned 64-bit integer.
+ UInt64 = expr.UInt64
+
+ // Float32 is the type for a 32-bit floating number.
+ Float32 = expr.Float32
+
+ // Float64 is the type for a 64-bit floating number.
+ Float64 = expr.Float64
+
+ // String is the type for a JSON string.
+ String = expr.String
+
+ // Bytes is the type for binary data.
+ Bytes = expr.Bytes
+
+ // Any is the type for an arbitrary JSON value (interface{} in Go).
+ Any = expr.Any
+)
+
+// Empty represents empty values.
+var Empty = expr.Empty
diff --git a/vendor/goa.design/goa/dsl/user_type.go b/vendor/goa.design/goa/dsl/user_type.go
new file mode 100644
index 000000000..645768e59
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/user_type.go
@@ -0,0 +1,281 @@
+package dsl
+
+import (
+ "goa.design/goa/eval"
+ "goa.design/goa/expr"
+)
+
+var (
+ // ErrorResultIdentifier is the result type identifier used for error
+ // responses.
+ ErrorResultIdentifier = expr.ErrorResultIdentifier
+
+ // ErrorResult is the built-in result type for error responses.
+ ErrorResult = expr.ErrorResult
+)
+
+// Type defines a user type. A user type has a unique name and may be an alias
+// to an existing type or may describe a completely new type using a list of
+// attributes (object fields). Attribute types may themselves be user type.
+// When a user type is defined as an alias to another type it may define
+// additional validations - for example it a user type which is an alias of
+// String may define a validation pattern that all instances of the type
+// must match.
+//
+// Type is a top level definition.
+//
+// Type takes two or three arguments: the first argument is the name of the type.
+// The name must be unique. The second argument is either another type or a
+// function. If the second argument is a type then there may be a function passed
+// as third argument.
+//
+// Example:
+//
+// // simple alias
+// var MyString = Type("MyString", String)
+//
+// // alias with description and additional validation
+// var Hostname = Type("Hostname", String, func() {
+// Description("A host name")
+// Format(FormatHostname)
+// })
+//
+// // new type
+// var SumPayload = Type("SumPayload", func() {
+// Description("Type sent to add method")
+//
+// Attribute("a", String) // string attribute "a"
+// Attribute("b", Int32, "operand") // attribute with description
+// Attribute("operands", ArrayOf(Int32)) // array attribute
+// Attribute("ops", MapOf(String, Int32)) // map attribute
+// Attribute("c", SumMod) // attribute using user type
+// Attribute("len", Int64, func() { // attribute with validation
+// Minimum(1)
+// })
+//
+// Required("a") // Required attributes
+// Required("b", "c")
+// })
+//
+func Type(name string, args ...interface{}) expr.UserType {
+ if len(args) > 2 {
+ eval.ReportError("too many arguments")
+ return nil
+ }
+ if t := expr.Root.UserType(name); t != nil {
+ eval.ReportError("type %#v defined twice", name)
+ return nil
+ }
+
+ if _, ok := eval.Current().(eval.TopExpr); !ok {
+ eval.IncompatibleDSL()
+ return nil
+ }
+
+ var (
+ base expr.DataType
+ fn func()
+ )
+ if len(args) == 0 {
+ // Make Type behave like Attribute
+ args = []interface{}{expr.String}
+ }
+ switch a := args[0].(type) {
+ case expr.DataType:
+ base = a
+ if len(args) == 2 {
+ d, ok := args[1].(func())
+ if !ok {
+ eval.ReportError("third argument must be a function")
+ return nil
+ }
+ fn = d
+ }
+ case func():
+ base = &expr.Object{}
+ fn = a
+ if len(args) == 2 {
+ eval.ReportError("only one argument allowed when it is a function")
+ return nil
+ }
+ default:
+ eval.InvalidArgError("type or function", args[0])
+ return nil
+ }
+
+ t := &expr.UserTypeExpr{
+ TypeName: name,
+ AttributeExpr: &expr.AttributeExpr{Type: base, DSLFunc: fn},
+ }
+ expr.Root.Types = append(expr.Root.Types, t)
+ return t
+}
+
+// ArrayOf creates an array type from its element type.
+//
+// ArrayOf may be used wherever types can.
+// The first argument of ArrayOf is the type of the array elements specified by
+// name or by reference.
+// The second argument of ArrayOf is an optional function that defines
+// validations for the array elements.
+//
+// Examples:
+//
+// var Names = ArrayOf(String, func() {
+// Pattern("[a-zA-Z]+") // Validates elements of the array
+// })
+//
+// var Account = Type("Account", func() {
+// Attribute("bottles", ArrayOf(Bottle), "Account bottles", func() {
+// MinLength(1) // Validates array as a whole
+// })
+// })
+//
+// Note: CollectionOf and ArrayOf both return array types. CollectionOf returns
+// a result type where ArrayOf returns a user type. In general you want to use
+// CollectionOf if the argument is a result type and ArrayOf if it is a user
+// type.
+func ArrayOf(v interface{}, fn ...func()) *expr.Array {
+ var t expr.DataType
+ var ok bool
+ t, ok = v.(expr.DataType)
+ if !ok {
+ if name, ok := v.(string); ok {
+ t = expr.Root.UserType(name)
+ }
+ }
+ // never return nil to avoid panics, errors are reported after DSL execution
+ res := &expr.Array{ElemType: &expr.AttributeExpr{Type: expr.String}}
+ if t == nil {
+ eval.ReportError("invalid ArrayOf argument: not a type and not a known user type name")
+ return res
+ }
+ if len(fn) > 1 {
+ eval.ReportError("ArrayOf: too many arguments")
+ return res
+ }
+ at := expr.AttributeExpr{Type: t}
+ if len(fn) == 1 {
+ eval.Execute(fn[0], &at)
+ }
+ return &expr.Array{ElemType: &at}
+}
+
+// MapOf creates a map from its key and element types.
+//
+// MapOf may be used wherever types can.
+// MapOf takes two arguments: the key and value types either by name of by reference.
+//
+// Example:
+//
+// var ReviewByID = MapOf(Int64, String, func() {
+// Key(func() {
+// Minimum(1) // Validates keys of the map
+// })
+// Value(func() {
+// Pattern("[a-zA-Z]+") // Validates values of the map
+// })
+// })
+//
+// var Review = Type("Review", func() {
+// Attribute("ratings", MapOf(Bottle, Int32), "Bottle ratings")
+// })
+//
+func MapOf(k, v interface{}, fn ...func()) *expr.Map {
+ var tk, tv expr.DataType
+ var ok bool
+ tk, ok = k.(expr.DataType)
+ if !ok {
+ if name, ok := k.(string); ok {
+ tk = expr.Root.UserType(name)
+ }
+ }
+ tv, ok = v.(expr.DataType)
+ if !ok {
+ if name, ok := v.(string); ok {
+ tv = expr.Root.UserType(name)
+ }
+ }
+ // never return nil to avoid panics, errors are reported after DSL execution
+ res := &expr.Map{KeyType: &expr.AttributeExpr{Type: expr.String}, ElemType: &expr.AttributeExpr{Type: expr.String}}
+ if tk == nil {
+ eval.ReportError("invalid MapOf key argument: not a type and not a known user type name")
+ return res
+ }
+ if expr.IsMap(tk) {
+ eval.ReportError("invalid MapOf key type: key type must be a primitive, array, or user type")
+ return res
+ }
+ if tv == nil {
+ eval.ReportError("invalid MapOf value argument: not a type and not a known user type name")
+ return res
+ }
+ if len(fn) > 1 {
+ eval.ReportError("MapOf: too many arguments")
+ return res
+ }
+ kat := expr.AttributeExpr{Type: tk}
+ vat := expr.AttributeExpr{Type: tv}
+ m := &expr.Map{KeyType: &kat, ElemType: &vat}
+ if len(fn) == 1 {
+ mat := expr.AttributeExpr{Type: m}
+ eval.Execute(fn[0], &mat)
+ }
+ return m
+}
+
+// Key makes it possible to specify validations for map keys.
+//
+// Example:
+//
+// Attribute("map", MapOf(String, Int), func() {
+// Key(func() {
+// Format(FormatDateTime) // map keys are timestamps
+// })
+// })
+//
+func Key(fn func()) {
+ at, ok := eval.Current().(*expr.AttributeExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ if m, ok := at.Type.(*expr.Map); ok {
+ eval.Execute(fn, m.KeyType)
+ return
+ }
+ eval.IncompatibleDSL()
+}
+
+// Elem makes it possible to specify validations for array and map values.
+//
+// Example:
+//
+// Attribute("array", ArrayOf(Int), func() {
+// Elem(func() {
+// Enum(1, 2, 3, 4, 5) // list possible values for array elements
+// })
+// })
+//
+// Attribute("map", MapOf(String, Int), func() {
+// Elem(func() {
+// Minimum(1)
+// Maximum(100)
+// })
+// })
+//
+func Elem(fn func()) {
+ at, ok := eval.Current().(*expr.AttributeExpr)
+ if !ok {
+ eval.IncompatibleDSL()
+ return
+ }
+ switch e := at.Type.(type) {
+ case *expr.Array:
+ eval.Execute(fn, e.ElemType)
+ case *expr.Map:
+ eval.Execute(fn, e.ElemType)
+ default:
+ eval.IncompatibleDSL()
+ }
+}
diff --git a/vendor/goa.design/goa/dsl/validation.go b/vendor/goa.design/goa/dsl/validation.go
new file mode 100644
index 000000000..26dfc517e
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/validation.go
@@ -0,0 +1,383 @@
+package dsl
+
+import (
+ "reflect"
+ "regexp"
+ "strconv"
+
+ "goa.design/goa/eval"
+ "goa.design/goa/expr"
+)
+
+const (
+ // FormatDate describes RFC3339 date values.
+ FormatDate = expr.FormatDate
+
+ // FormatDateTime describes RFC3339 date time values.
+ FormatDateTime = expr.FormatDateTime
+
+ // FormatUUID describes RFC4122 UUID values.
+ FormatUUID = expr.FormatUUID
+
+ // FormatEmail describes RFC5322 email addresses.
+ FormatEmail = expr.FormatEmail
+
+ // FormatHostname describes RFC1035 Internet hostnames.
+ FormatHostname = expr.FormatHostname
+
+ // FormatIPv4 describes RFC2373 IPv4 address values.
+ FormatIPv4 = expr.FormatIPv4
+
+ // FormatIPv6 describes RFC2373 IPv6 address values.
+ FormatIPv6 = expr.FormatIPv6
+
+ // FormatIP describes RFC2373 IPv4 or IPv6 address values.
+ FormatIP = expr.FormatIP
+
+ // FormatURI describes RFC3986 URI values.
+ FormatURI = expr.FormatURI
+
+ // FormatMAC describes IEEE 802 MAC-48, EUI-48 or EUI-64 MAC address values.
+ FormatMAC = expr.FormatMAC
+
+ // FormatCIDR describes RFC4632 and RFC4291 CIDR notation IP address values.
+ FormatCIDR = expr.FormatCIDR
+
+ // FormatRegexp describes regular expression syntax accepted by RE2.
+ FormatRegexp = expr.FormatRegexp
+
+ // FormatJSON describes JSON text.
+ FormatJSON = expr.FormatJSON
+
+ // FormatRFC1123 describes RFC1123 date time values.
+ FormatRFC1123 = expr.FormatRFC1123
+)
+
+// Enum adds a "enum" validation to the attribute.
+// See http://json-schema.org/latest/json-schema-validation.html#anchor76.
+//
+// Example:
+//
+// Attribute("string", String, func() {
+// Enum("this", "that", "and this")
+// })
+//
+// Attribute("array", ArrayOf(Int), func() {
+// Elem(func() {
+// Enum(1, 2, 3, 4, 5) // Sets possible values for array elements
+// })
+// })
+//
+func Enum(vals ...interface{}) {
+ if a, ok := eval.Current().(*expr.AttributeExpr); ok {
+ for i, v := range vals {
+ // When can a.Type be nil? glad you asked
+ // There are two ways to write an Attribute declaration with the DSL that
+ // don't set the type: with one argument - just the name - in which case the type
+ // is set to String or with two arguments - the name and DSL. In this latter form
+ // the type can end up being either String - if the DSL does not define any
+ // attribute - or object if it does.
+ // Why allowing this? because it's not always possible to specify the type of an
+ // object - an object may just be declared inline to represent a substructure.
+ // OK then why not assuming object and not allowing for string? because the DSL
+ // where there's only one argument and the type is string implicitly is very
+ // useful and common, for example to list attributes that refer to other attributes
+ // such as responses that refer to responses defined at the API level or links that
+ // refer to the result type attributes. So if the form that takes a DSL always ended
+ // up defining an object we'd have a weird situation where one arg is string and
+ // two args is object. Breaks the least surprise principle. Soooo long story
+ // short the lesser evil seems to be to allow the ambiguity. Also tests like the
+ // one below are really a convenience to the user and not a fundamental feature
+ // - not checking in the case the type is not known yet is OK.
+ if a.Type != nil && !a.Type.IsCompatible(v) {
+ eval.ReportError("value %#v at index %d is incompatible with attribute of type %s",
+ v, i, a.Type.Name())
+ ok = false
+ }
+ }
+ if ok {
+ if a.Validation == nil {
+ a.Validation = &expr.ValidationExpr{}
+ }
+ a.Validation.Values = make([]interface{}, len(vals))
+ for i, v := range vals {
+ switch actual := v.(type) {
+ case expr.MapVal:
+ a.Validation.Values[i] = actual.ToMap()
+ case expr.ArrayVal:
+ a.Validation.Values[i] = actual.ToSlice()
+ default:
+ a.Validation.Values[i] = actual
+ }
+ }
+ }
+ }
+}
+
+// Format adds a "format" validation to the attribute.
+// See http://json-schema.org/latest/json-schema-validation.html#anchor104.
+// The formats supported by goa are:
+//
+// FormatDate: RFC3339 date
+//
+// FormatDateTime: RFC3339 date time
+//
+// FormatUUID: RFC4122 uuid
+//
+// FormatEmail: RFC5322 email address
+//
+// FormatHostname: RFC1035 internet host name
+//
+// FormatIPv4, FormatIPv6, FormatIP: RFC2373 IPv4, IPv6 address or either
+//
+// FormatURI: RFC3986 URI
+//
+// FormatMAC: IEEE 802 MAC-48, EUI-48 or EUI-64 MAC address
+//
+// FormatCIDR: RFC4632 or RFC4291 CIDR notation IP address
+//
+// FormatRegexp: RE2 regular expression
+//
+// FormatJSON: JSON text
+//
+// FormatRFC1123: RFC1123 date time
+//
+// Example:
+//
+// Attribute("created_at", String, func() {
+// Format(FormatDateTime)
+// })
+func Format(f expr.ValidationFormat) {
+ if a, ok := eval.Current().(*expr.AttributeExpr); ok {
+ if !a.IsSupportedValidationFormat(f) {
+ eval.ReportError("invalid validation format %q", f)
+ }
+ if a.Type != nil && a.Type.Kind() != expr.StringKind {
+ incompatibleAttributeType("format", a.Type.Name(), "a string")
+ } else {
+ if a.Validation == nil {
+ a.Validation = &expr.ValidationExpr{}
+ }
+ a.Validation.Format = expr.ValidationFormat(f)
+ }
+ }
+}
+
+// Pattern adds a "pattern" validation to the attribute.
+// See http://json-schema.org/latest/json-schema-validation.html#anchor33.
+//
+// Example:
+//
+// Attribute("pattern", String, func() {
+// Pattern("^[A-Z].*[0-9]$")
+// })
+//
+func Pattern(p string) {
+ if a, ok := eval.Current().(*expr.AttributeExpr); ok {
+ if a.Type != nil && a.Type.Kind() != expr.StringKind {
+ incompatibleAttributeType("pattern", a.Type.Name(), "a string")
+ } else {
+ _, err := regexp.Compile(p)
+ if err != nil {
+ eval.ReportError("invalid pattern %#v, %s", p, err)
+ } else {
+ if a.Validation == nil {
+ a.Validation = &expr.ValidationExpr{}
+ }
+ a.Validation.Pattern = p
+ }
+ }
+ }
+}
+
+// Minimum adds a "minimum" validation to the attribute.
+// See http://json-schema.org/latest/json-schema-validation.html#anchor21.
+//
+// Example:
+//
+// Attribute("integer", Int, func() {
+// Minimum(100)
+// })
+//
+func Minimum(val interface{}) {
+ if a, ok := eval.Current().(*expr.AttributeExpr); ok {
+ if a.Type != nil &&
+ a.Type.Kind() != expr.IntKind && a.Type.Kind() != expr.UIntKind &&
+ a.Type.Kind() != expr.Int32Kind && a.Type.Kind() != expr.UInt32Kind &&
+ a.Type.Kind() != expr.Int64Kind && a.Type.Kind() != expr.UInt64Kind &&
+ a.Type.Kind() != expr.Float32Kind && a.Type.Kind() != expr.Float64Kind {
+
+ incompatibleAttributeType("minimum", a.Type.Name(), "an integer or a number")
+ } else {
+ var f float64
+ switch v := val.(type) {
+ case float32, float64, int, int8, int16, int32, int64, uint8, uint16, uint32, uint64:
+ f = reflect.ValueOf(v).Convert(reflect.TypeOf(float64(0.0))).Float()
+ case string:
+ var err error
+ f, err = strconv.ParseFloat(v, 64)
+ if err != nil {
+ eval.ReportError("invalid number value %#v", v)
+ return
+ }
+ default:
+ eval.ReportError("invalid number value %#v", v)
+ return
+ }
+ if a.Validation == nil {
+ a.Validation = &expr.ValidationExpr{}
+ }
+ a.Validation.Minimum = &f
+ }
+ }
+}
+
+// Maximum adds a "maximum" validation to the attribute.
+// See http://json-schema.org/latest/json-schema-validation.html#anchor17.
+//
+// Example:
+//
+// Attribute("integer", Int, func() {
+// Maximum(100)
+// })
+//
+func Maximum(val interface{}) {
+ if a, ok := eval.Current().(*expr.AttributeExpr); ok {
+ if a.Type != nil &&
+ a.Type.Kind() != expr.IntKind && a.Type.Kind() != expr.UIntKind &&
+ a.Type.Kind() != expr.Int32Kind && a.Type.Kind() != expr.UInt32Kind &&
+ a.Type.Kind() != expr.Int64Kind && a.Type.Kind() != expr.UInt64Kind &&
+ a.Type.Kind() != expr.Float32Kind && a.Type.Kind() != expr.Float64Kind {
+
+ incompatibleAttributeType("maximum", a.Type.Name(), "an integer or a number")
+ } else {
+ var f float64
+ switch v := val.(type) {
+ case float32, float64, int, int8, int16, int32, int64, uint8, uint16, uint32, uint64:
+ f = reflect.ValueOf(v).Convert(reflect.TypeOf(float64(0.0))).Float()
+ case string:
+ var err error
+ f, err = strconv.ParseFloat(v, 64)
+ if err != nil {
+ eval.ReportError("invalid number value %#v", v)
+ return
+ }
+ default:
+ eval.ReportError("invalid number value %#v", v)
+ return
+ }
+ if a.Validation == nil {
+ a.Validation = &expr.ValidationExpr{}
+ }
+ a.Validation.Maximum = &f
+ }
+ }
+}
+
+// MinLength adds a "minItems" validation to the attribute.
+// See http://json-schema.org/latest/json-schema-validation.html#anchor45.
+//
+// Example:
+//
+// Attribute("map", MapOf(String, String), func() {
+// MinLength(10) // min key-values in map
+// Key(func() {
+// MinLength(1) // min length of map key
+// })
+// Elem(func() {
+// MinLength(5) // min length of map elements
+// })
+// })
+//
+func MinLength(val int) {
+ if a, ok := eval.Current().(*expr.AttributeExpr); ok {
+ if a.Type != nil {
+ kind := a.Type.Kind()
+ if kind != expr.BytesKind &&
+ kind != expr.StringKind &&
+ kind != expr.ArrayKind &&
+ kind != expr.MapKind {
+
+ incompatibleAttributeType("minimum length", a.Type.Name(), "a string or an array")
+ return
+ }
+ }
+ if a.Validation == nil {
+ a.Validation = &expr.ValidationExpr{}
+ }
+ a.Validation.MinLength = &val
+ }
+}
+
+// MaxLength adds a "maxItems" validation to the attribute.
+// See http://json-schema.org/latest/json-schema-validation.html#anchor42.
+//
+// Example:
+//
+// Attribute("array", ArrayOf(String), func() {
+// MaxLength(200) // max array length
+// Elem(func() {
+// MaxLength(5) // max length of each array element
+// })
+// })
+//
+func MaxLength(val int) {
+ if a, ok := eval.Current().(*expr.AttributeExpr); ok {
+ if a.Type != nil {
+ kind := a.Type.Kind()
+ if kind != expr.BytesKind &&
+ kind != expr.StringKind &&
+ kind != expr.ArrayKind &&
+ kind != expr.MapKind {
+
+ incompatibleAttributeType("maximum length", a.Type.Name(), "a string or an array")
+ return
+ }
+ }
+ if a.Validation == nil {
+ a.Validation = &expr.ValidationExpr{}
+ }
+ a.Validation.MaxLength = &val
+ }
+}
+
+// Required adds a "required" validation to the attribute.
+// See http://json-schema.org/latest/json-schema-validation.html#anchor61.
+//
+// Example:
+//
+// var _ = Type("MyType", func() {
+// Attribute("string", String)
+// Attribute("int", Integer)
+// Required("string", "int")
+// })
+//
+func Required(names ...string) {
+ var at *expr.AttributeExpr
+
+ switch def := eval.Current().(type) {
+ case *expr.AttributeExpr:
+ at = def
+ case *expr.ResultTypeExpr:
+ at = def.AttributeExpr
+ default:
+ eval.IncompatibleDSL()
+ return
+ }
+
+ if at.Type != nil && !expr.IsObject(at.Type) {
+ incompatibleAttributeType("required", at.Type.Name(), "an object")
+ } else {
+ if at.Validation == nil {
+ at.Validation = &expr.ValidationExpr{}
+ }
+ at.Validation.AddRequired(names...)
+ }
+}
+
+// incompatibleAttributeType reports an error for validations defined on
+// incompatible attributes (e.g. max value on string).
+func incompatibleAttributeType(validation, actual, expected string) {
+ eval.ReportError("invalid %s validation definition: attribute must be %s (but type is %s)",
+ validation, expected, actual)
+}
diff --git a/vendor/goa.design/goa/dsl/value.go b/vendor/goa.design/goa/dsl/value.go
new file mode 100644
index 000000000..3169a3fcc
--- /dev/null
+++ b/vendor/goa.design/goa/dsl/value.go
@@ -0,0 +1,29 @@
+package dsl
+
+import "goa.design/goa/expr"
+import "goa.design/goa/eval"
+
+// Value sets the example value.
+//
+// Value must appear in Example.
+//
+// Value takes one argument: the example value.
+//
+// Example:
+//
+// Example("A simple bottle", func() {
+// Description("This bottle has an ID set to 1")
+// Value(Val{"ID": 1})
+// })
+//
+func Value(val interface{}) {
+ switch e := eval.Current().(type) {
+ case *expr.ExampleExpr:
+ if v, ok := val.(expr.Val); ok {
+ val = map[string]interface{}(v)
+ }
+ e.Value = val
+ default:
+ eval.IncompatibleDSL()
+ }
+}
diff --git a/vendor/goa.design/goa/endpoint.go b/vendor/goa.design/goa/endpoint.go
new file mode 100644
index 000000000..0a79fa387
--- /dev/null
+++ b/vendor/goa.design/goa/endpoint.go
@@ -0,0 +1,24 @@
+package goa
+
+import "context"
+
+const (
+ // MethodKey is the request context key used to store the name of the
+ // method as defined in the design. The generated transport code
+ // initializes the corresponding value prior to invoking the endpoint.
+ MethodKey contextKey = iota + 1
+
+ // ServiceKey is the request context key used to store the name of the
+ // service as defined in the design. The generated transport code
+ // initializes the corresponding value prior to invoking the endpoint.
+ ServiceKey
+)
+
+type (
+ // private type used to define context keys.
+ contextKey int
+)
+
+// Endpoint exposes service methods to remote clients independently of the
+// underlying transport.
+type Endpoint func(ctx context.Context, request interface{}) (response interface{}, err error)
diff --git a/vendor/goa.design/goa/error.go b/vendor/goa.design/goa/error.go
new file mode 100644
index 000000000..7c080d907
--- /dev/null
+++ b/vendor/goa.design/goa/error.go
@@ -0,0 +1,209 @@
+package goa
+
+import (
+ "crypto/rand"
+ "encoding/base64"
+ "fmt"
+ "io"
+ "strings"
+)
+
+type (
+ // ServiceError is the default error type used by the goa package to
+ // encode and decode error responses.
+ ServiceError struct {
+ // Name is a name for that class of errors.
+ Name string
+ // ID is a unique value for each occurrence of the error.
+ ID string
+ // Message contains the specific error details.
+ Message string
+ // Is the error a timeout?
+ Timeout bool
+ // Is the error temporary?
+ Temporary bool
+ // Is the error a server-side fault?
+ Fault bool
+ }
+)
+
+// Fault creates an error given a format and values a la fmt.Printf. The error
+// has the Fault field set to true.
+func Fault(format string, v ...interface{}) *ServiceError {
+ return newError("fault", false, false, true, format, v...)
+}
+
+// PermanentError creates an error given a name and a format and values a la
+// fmt.Printf.
+func PermanentError(name, format string, v ...interface{}) *ServiceError {
+ return newError(name, false, false, false, format, v...)
+}
+
+// TemporaryError is an error class that indicates that the error is temporary
+// and that retrying the request may be successful. TemporaryError creates an
+// error given a name and a format and values a la fmt.Printf. The error has the
+// Temporary field set to true.
+func TemporaryError(name, format string, v ...interface{}) *ServiceError {
+ return newError(name, false, true, false, format, v...)
+}
+
+// PermanentTimeoutError creates an error given a name and a format and values a
+// la fmt.Printf. The error has the Timeout field set to true.
+func PermanentTimeoutError(name, format string, v ...interface{}) *ServiceError {
+ return newError(name, true, false, false, format, v...)
+}
+
+// TemporaryTimeoutError creates an error given a name and a format and values a
+// la fmt.Printf. The error has both the Timeout and Temporary fields set to
+// true.
+func TemporaryTimeoutError(name, format string, v ...interface{}) *ServiceError {
+ return newError(name, true, true, false, format, v...)
+}
+
+// MissingPayloadError is the error produced by the generated code when a
+// request is missing a required payload.
+func MissingPayloadError() error {
+ return PermanentError("missing_payload", "missing required payload")
+}
+
+// DecodePayloadError is the error produced by the generated code when a request
+// body cannot be decoded successfully.
+func DecodePayloadError(msg string) error {
+ return PermanentError("decode_payload", msg)
+}
+
+// InvalidFieldTypeError is the error produced by the generated code when the
+// type of a payload field does not match the type defined in the design.
+func InvalidFieldTypeError(name string, val interface{}, expected string) error {
+ return PermanentError("invalid_field_type", "invalid value %#v for %q, must be a %s", val, name, expected)
+}
+
+// MissingFieldError is the error produced by the generated code when a payload
+// is missing a required field.
+func MissingFieldError(name, context string) error {
+ return PermanentError("missing_field", "%q is missing from %s", name, context)
+}
+
+// InvalidEnumValueError is the error produced by the generated code when the
+// value of a payload field does not match one the values defined in the design
+// Enum validation.
+func InvalidEnumValueError(name string, val interface{}, allowed []interface{}) error {
+ elems := make([]string, len(allowed))
+ for i, a := range allowed {
+ elems[i] = fmt.Sprintf("%#v", a)
+ }
+ return PermanentError("invalid_enum_value", "value of %s must be one of %s but got value %#v", name, strings.Join(elems, ", "), val)
+}
+
+// InvalidFormatError is the error produced by the generated code when the value
+// of a payload field does not match the format validation defined in the
+// design.
+func InvalidFormatError(name, target string, format Format, formatError error) error {
+ return PermanentError("invalid_format", "%s must be formatted as a %s but got value %q, %s", name, format, target, formatError.Error())
+}
+
+// InvalidPatternError is the error produced by the generated code when the
+// value of a payload field does not match the pattern validation defined in the
+// design.
+func InvalidPatternError(name, target string, pattern string) error {
+ return PermanentError("invalid_pattern", "%s must match the regexp %q but got value %q", name, pattern, target)
+}
+
+// InvalidRangeError is the error produced by the generated code when the value
+// of a payload field does not match the range validation defined in the design.
+// value may be an int or a float64.
+func InvalidRangeError(name string, target interface{}, value interface{}, min bool) error {
+ comp := "greater or equal"
+ if !min {
+ comp = "lesser or equal"
+ }
+ return PermanentError("invalid_range", "%s must be %s than %d but got value %#v", name, comp, value, target)
+}
+
+// InvalidLengthError is the error produced by the generated code when the value
+// of a payload field does not match the length validation defined in the
+// design.
+func InvalidLengthError(name string, target interface{}, ln, value int, min bool) error {
+ comp := "greater or equal"
+ if !min {
+ comp = "lesser or equal"
+ }
+ return PermanentError("invalid_length", "length of %s must be %s than %d but got value %#v (len=%d)", name, comp, value, target, ln)
+}
+
+// NewErrorID creates a unique 8 character ID that is well suited to use as an
+// error identifier.
+func NewErrorID() string {
+ // for the curious - simplifying a bit - the probability of 2 values
+ // being equal for n 6-bytes values is n^2 / 2^49. For n = 1 million
+ // this gives around 1 chance in 500. 6 bytes seems to be a good
+ // trade-off between probability of clashes and length of ID (6 * 4/3 =
+ // 8 chars) since clashes are not catastrophic.
+ b := make([]byte, 6)
+ io.ReadFull(rand.Reader, b)
+ return base64.RawURLEncoding.EncodeToString(b)
+}
+
+// MergeErrors updates an error by merging another into it. It first converts
+// other into a ServiceError if not already one. The merge algorithm then:
+//
+// * uses the name of err if a ServiceError, the name of other otherwise.
+//
+// * appends both error messages.
+//
+// * computes Timeout and Temporary by "and"ing the fields of both errors.
+//
+// Merge returns the updated error. This makes it possible to return other when
+// err is nil.
+func MergeErrors(err, other error) error {
+ if err == nil {
+ if other == nil {
+ return nil
+ }
+ return other
+ }
+ if other == nil {
+ return err
+ }
+ e := asError(err)
+ o := asError(other)
+ if e.Name == "error" {
+ e.Name = o.Name
+ }
+ e.Message = e.Message + "; " + o.Message
+ e.Timeout = e.Timeout && o.Timeout
+ e.Temporary = e.Temporary && o.Temporary
+ e.Fault = e.Fault && o.Fault
+
+ return e
+}
+
+// Error returns the error message.
+func (s *ServiceError) Error() string { return s.Message }
+
+// ErrorName returns the error name.
+func (s *ServiceError) ErrorName() string { return s.Name }
+
+func newError(name string, timeout, temporary, fault bool, format string, v ...interface{}) *ServiceError {
+ return &ServiceError{
+ Name: name,
+ ID: NewErrorID(),
+ Message: fmt.Sprintf(format, v...),
+ Timeout: timeout,
+ Temporary: temporary,
+ Fault: fault,
+ }
+}
+
+func asError(err error) *ServiceError {
+ e, ok := err.(*ServiceError)
+ if !ok {
+ return &ServiceError{
+ Name: "error",
+ ID: NewErrorID(),
+ Message: err.Error(),
+ Fault: true, // Default to fault for unexpected errors
+ }
+ }
+ return e
+}
diff --git a/vendor/goa.design/goa/eval/context.go b/vendor/goa.design/goa/eval/context.go
new file mode 100644
index 000000000..3ada28c6c
--- /dev/null
+++ b/vendor/goa.design/goa/eval/context.go
@@ -0,0 +1,160 @@
+package eval
+
+import (
+ "fmt"
+)
+
+// Context contains the state used by the engine to execute the DSL.
+var Context *DSLContext
+
+type (
+ // DSLContext is the data structure that contains the DSL execution state.
+ DSLContext struct {
+ // Stack represents the current execution stack.
+ Stack Stack
+ // Errors contains the DSL execution errors for the current
+ // expression set.
+ // Errors is an instance of MultiError.
+ Errors error
+
+ // roots is the list of DSL roots as registered by all loaded
+ // DSLs.
+ roots []Root
+ // dslPackages keeps track of the DSL package import paths so
+ // the initiator may skip any callstack frame that belongs to
+ // them when computing error locations.
+ dslPackages []string
+ }
+
+ // Stack represents the expression evaluation stack. The stack is
+ // appended to each time the initiator executes an expression source
+ // DSL.
+ Stack []Expression
+)
+
+func init() {
+ Reset()
+}
+
+// Reset resets the eval context, mostly useful for tests.
+func Reset() {
+ Context = &DSLContext{dslPackages: []string{"goa.design/goa/eval"}}
+}
+
+// Register appends a root expression to the current Context root expressions.
+// Each root expression may only be registered once.
+func Register(r Root) error {
+ for _, o := range Context.roots {
+ if r.EvalName() == o.EvalName() {
+ return fmt.Errorf("duplicate DSL %s", r.EvalName())
+ }
+ }
+ Context.dslPackages = append(Context.dslPackages, r.Packages()...)
+ Context.roots = append(Context.roots, r)
+
+ return nil
+}
+
+// Current evaluation context, i.e. object being currently built by DSL
+func (s Stack) Current() Expression {
+ if len(s) == 0 {
+ return nil
+ }
+ return s[len(s)-1]
+}
+
+// Error builds the error message from the current context errors.
+func (c *DSLContext) Error() string {
+ if c.Errors != nil {
+ return c.Errors.Error()
+ }
+ return ""
+}
+
+// Roots orders the DSL roots making sure dependencies are last. It returns an
+// error if there is a dependency cycle.
+func (c *DSLContext) Roots() ([]Root, error) {
+ // Flatten dependencies for each root
+ rootDeps := make(map[string][]Root, len(c.roots))
+ rootByName := make(map[string]Root, len(c.roots))
+ for _, r := range c.roots {
+ sorted := sortDependencies(c.roots, r, func(r Root) []Root { return r.DependsOn() })
+ length := len(sorted)
+ for i := 0; i < length/2; i++ {
+ sorted[i], sorted[length-i-1] = sorted[length-i-1], sorted[i]
+ }
+ rootDeps[r.EvalName()] = sorted
+ rootByName[r.EvalName()] = r
+ }
+ // Check for cycles
+ for name, deps := range rootDeps {
+ root := rootByName[name]
+ for otherName, otherdeps := range rootDeps {
+ other := rootByName[otherName]
+ if root.EvalName() == other.EvalName() {
+ continue
+ }
+ dependsOnOther := false
+ for _, dep := range deps {
+ if dep.EvalName() == other.EvalName() {
+ dependsOnOther = true
+ break
+ }
+ }
+ if dependsOnOther {
+ for _, dep := range otherdeps {
+ if dep.EvalName() == root.EvalName() {
+ return nil, fmt.Errorf("dependency cycle: %s and %s depend on each other (directly or not)",
+ root.EvalName(), other.EvalName())
+ }
+ }
+ }
+ }
+ }
+ // Now sort top level DSLs
+ var sorted []Root
+ for _, r := range c.roots {
+ s := sortDependencies(c.roots, r, func(r Root) []Root { return rootDeps[r.EvalName()] })
+ for _, s := range s {
+ found := false
+ for _, r := range sorted {
+ if r.EvalName() == s.EvalName() {
+ found = true
+ break
+ }
+ }
+ if !found {
+ sorted = append(sorted, s)
+ }
+ }
+ }
+ return sorted, nil
+}
+
+// Record appends an error to the context Errors field.
+func (c *DSLContext) Record(err *Error) {
+ if c.Errors == nil {
+ c.Errors = MultiError{err}
+ } else {
+ c.Errors = append(c.Errors.(MultiError), err)
+ }
+}
+
+// sortDependencies sorts the depencies of the given root in the given slice.
+func sortDependencies(roots []Root, root Root, depFunc func(Root) []Root) []Root {
+ seen := make(map[string]bool, len(roots))
+ var sorted []Root
+ sortDependenciesR(root, seen, &sorted, depFunc)
+ return sorted
+}
+
+// sortDependenciesR sorts the depencies of the given root in the given slice.
+func sortDependenciesR(root Root, seen map[string]bool, sorted *[]Root, depFunc func(Root) []Root) {
+ for _, dep := range depFunc(root) {
+ if !seen[dep.EvalName()] {
+ seen[root.EvalName()] = true
+ sortDependenciesR(dep, seen, sorted, depFunc)
+ }
+ }
+ *sorted = append(*sorted, root)
+}
diff --git a/vendor/goa.design/goa/eval/doc.go b/vendor/goa.design/goa/eval/doc.go
new file mode 100644
index 000000000..15ae15666
--- /dev/null
+++ b/vendor/goa.design/goa/eval/doc.go
@@ -0,0 +1,121 @@
+/*
+Package eval implements a DSL engine for executing arbitrary Go DSLs.
+
+DSLs executed via eval consist of package functions that build up expressions
+upon execution.
+
+A DSL that allows describing a service and its methods could look like this:
+
+ var _ = Service("service name") // Defines the service "service name"
+
+ var _ = Method("method name", func() { // Defines the method "method name"
+ Description("some method description") // Sets the method description
+ })
+
+DSL keywords are simply package functions that can be nested using anonymous
+functions as last argument. Upon execution the DSL functions create expression
+structs. The expression structs created by the top level functions on process
+start (both Service and Method in this example) should be stored in special
+expressions called root expressions. The DSL implements both the expression and
+root expression structs, the only requirement is that they implement the eval
+package Expression and Root interfaces respectively.
+
+Keeping with the example above, Method creates instances of the following
+MethodExpression struct:
+
+ type MethodExpression struct {
+ Name string
+ DSLFunc func()
+ }
+
+where Name gets initialized with the first argument and DSLFunc with the second.
+ServiceExpression is the root expression that contains the instances of
+MethodExpression created by the Method function:
+
+ type ServiceExpression struct {
+ Name string
+ Methods []eval.Expression
+ }
+
+The Method DSL function simply initializes a MethodExpression and stores it
+in the Methods field of the root ServiceExpression:
+
+ func Method(name string, fn func()) {
+ ep := &MethodExpression{Name: name, DSLFunc: fn}
+ Design.Methods = append(Design.Methods, ep)
+ }
+
+where Design is a package variable holding the ServiceExpression root
+expression:
+
+ // Design is the DSL root expression.
+ var Design *ServiceExpression = &ServiceExpression{}
+
+The Service function simply sets the Name field of Service:
+
+ func Service(name string) {
+ Design.Name = name
+ }
+
+Once the process is loaded the Design package variable contains an instance of
+ServiceExpression which in turn contains all the instances of MethodExpression
+that were created via the Method function. Note that at this point the
+Description function used in the Method DSL hasn't run yet as it is called by
+the anonymous function stored in the DSLFunc field of each MethodExpression
+instance. This is where the RunDSL function of package eval comes in.
+
+RunDSL iterates over the initial set of root expressions and calls the
+WalkSets method exposed by the Root interface. This method lets the DSL
+engine iterate over the sub-expressions that were initialized when the process
+loaded.
+
+In this example the ServiceExpression implementation of WalkSets simply
+passes the Methods field to the iterator:
+
+ func (se *ServiceExpression) WalkSets(it eval.SetWalker) {
+ it(se.Methods)
+ }
+
+Each expression in an expression set may optionally implement the Source,
+Validator and Finalizer interfaces:
+
+- Expressions that are initialized via a child DSL implement Source which
+ provides RunDSL with the corresponding anonymous function.
+
+- Expressions that need to be validated implement the Validator interface.
+
+- Expressions that require an additional pass after validation implement the
+ Finalizer interface.
+
+In our example MethodExpression implements Source and return its DSLFunc
+member in the implementation of the Source interface DSL function:
+
+ func (ep *MethodExpression) Source() func() {
+ return ep.DSLFunc
+ }
+
+MethodExpression could also implement the Validator Validate method to check
+that the name of the method is not empty for example.
+
+The execution of the DSL thus happens in three phases: in the first phase RunDSL
+executes all the DSLs of all the source expressions in each expression set. In
+this initial phase the DSLs being executed may append to the expression set
+and/or may register new expression roots. In the second phase RunDSL validates
+all the validator expressions and in the last phase it calls Finalize on all the
+finalizer expressions.
+
+The eval package exposes functions that the implementation of the DSL can take
+advantage of to report errors, such as ReportError, InvalidArg and
+IncompatibleDSL. The engine records the errors being reported but keeps running
+the current phase so that multiple errors may be reported at once. This means
+that the DSL implementation must maintain a consistent state for the duration of
+one iteration even though some input may be incorrect (for example it may elect
+to create default value expressions instead of leaving them nil to avoid panics
+later on).
+
+The package exposes other helper functions such as Execute which allows running
+a DSL function manually or IsTop which reports whether the expression being
+currently built is a top level expression (such as Service and Method in our
+example).
+*/
+package eval
diff --git a/vendor/goa.design/goa/eval/error.go b/vendor/goa.design/goa/eval/error.go
new file mode 100644
index 000000000..c655c99e4
--- /dev/null
+++ b/vendor/goa.design/goa/eval/error.go
@@ -0,0 +1,87 @@
+package eval
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strings"
+)
+
+type (
+ // Error represents an error that occurred while evaluating the DSL.
+ // It contains the name of the file and line number of where the error
+ // occurred as well as the original Go error.
+ Error struct {
+ // GoError is the original error returned by the DSL function.
+ GoError error
+ // File is the path to the file containing the user code that
+ // caused the error.
+ File string
+ // Line is the line number that caused the error.
+ Line int
+ }
+
+ // MultiError collects multiple DSL errors. It implements error.
+ MultiError []*Error
+)
+
+// Error returns the error message.
+func (m MultiError) Error() string {
+ msgs := make([]string, len(m))
+ for i, de := range m {
+ msgs[i] = de.Error()
+ }
+ return strings.Join(msgs, "\n")
+}
+
+// Error returns the underlying error message.
+func (e *Error) Error() string {
+ if err := e.GoError; err != nil {
+ if e.File == "" {
+ return err.Error()
+ }
+ return fmt.Sprintf("[%s:%d] %s", e.File, e.Line, err.Error())
+ }
+ return ""
+}
+
+// computeErrorLocation implements a heuristic to find the location in the user
+// code where the error occurred. It walks back the callstack until the file
+// doesn't match "/goa/design/*.go" or one of the DSL package paths.
+// When successful it returns the file name and line number, empty string and
+// 0 otherwise.
+func computeErrorLocation() (file string, line int) {
+ skipFunc := func(file string) bool {
+ if strings.HasSuffix(file, "_test.go") { // Be nice with tests
+ return false
+ }
+ file = filepath.ToSlash(file)
+ for _, pkg := range Context.dslPackages {
+ if strings.Contains(file, pkg) {
+ return true
+ }
+ }
+ return false
+ }
+ depth := 3
+ _, file, line, _ = runtime.Caller(depth)
+ for skipFunc(file) {
+ depth++
+ _, file, line, _ = runtime.Caller(depth)
+ }
+ wd, err := os.Getwd()
+ if err != nil {
+ return
+ }
+ wd, err = filepath.Abs(wd)
+ if err != nil {
+ return
+ }
+ f, err := filepath.Rel(wd, file)
+ if err != nil {
+ return
+ }
+ file = f
+ return
+}
diff --git a/vendor/goa.design/goa/eval/eval.go b/vendor/goa.design/goa/eval/eval.go
new file mode 100644
index 000000000..638c301ee
--- /dev/null
+++ b/vendor/goa.design/goa/eval/eval.go
@@ -0,0 +1,250 @@
+package eval
+
+import (
+ "fmt"
+ "path/filepath"
+ "reflect"
+ "runtime"
+ "strings"
+)
+
+// RunDSL iterates through the root expressions and calls WalkSets on each to
+// retrieve the expression sets. It iterates over the expression sets multiple
+// times to first execute the DSL, then validate the resulting expressions and
+// lastly to finalize them. The executed DSL may register additional roots
+// during initial execution via Register to have them be executed (last) in the
+// same run.
+func RunDSL() error {
+ roots, err := Context.Roots()
+ if err != nil {
+ return err
+ }
+ if len(roots) == 0 {
+ return nil
+ }
+ executed := 0
+ recursed := 0
+ for executed < len(roots) {
+ recursed++
+ start := executed
+ executed = len(roots)
+ for _, root := range roots[start:] {
+ root.WalkSets(runSet)
+ }
+ if recursed > 100 {
+ // Let's cross that bridge once we get there
+ return fmt.Errorf("too many generated roots, infinite loop?")
+ }
+ }
+ if Context.Errors != nil {
+ return Context.Errors
+ }
+ for _, root := range roots {
+ prepareSet(ExpressionSet{root})
+ root.WalkSets(prepareSet)
+ }
+ for _, root := range roots {
+ validateSet(ExpressionSet{root})
+ root.WalkSets(validateSet)
+ }
+ if Context.Errors != nil {
+ return Context.Errors
+ }
+ for _, root := range roots {
+ finalizeSet(ExpressionSet{root})
+ root.WalkSets(finalizeSet)
+ }
+
+ return nil
+}
+
+// runSet executes the DSL for all expressions in the given set. The expression
+// DSLs may append to the set as they execute.
+func runSet(set ExpressionSet) error {
+ executed := 0
+ recursed := 0
+ for executed < len(set) {
+ recursed++
+ for _, def := range set[executed:] {
+ executed++
+ if def == nil {
+ continue
+ }
+ if source, ok := def.(Source); ok {
+ Execute(source.DSL(), def)
+ }
+ }
+ if recursed > 100 {
+ return fmt.Errorf("too many generated expressions, infinite loop?")
+ }
+ }
+ return nil
+}
+
+// prepareSet runs the pre validation steps on all the set expressions that
+// define one.
+func prepareSet(set ExpressionSet) error {
+ for _, def := range set {
+ if def == nil {
+ continue
+ }
+ if p, ok := def.(Preparer); ok {
+ p.Prepare()
+ }
+ }
+ return nil
+}
+
+// validateSet runs the validation on all the set expressions that define one.
+func validateSet(set ExpressionSet) error {
+ errors := &ValidationErrors{}
+ for _, def := range set {
+ if def == nil {
+ continue
+ }
+ if validate, ok := def.(Validator); ok {
+ if err := validate.Validate(); err != nil {
+ errors.AddError(def, err)
+ }
+ }
+ }
+ if len(errors.Errors) > 0 {
+ Context.Record(&Error{GoError: errors})
+ }
+ return Context.Errors
+}
+
+// finalizeSet runs the finalizer on all the set expressions that define one.
+func finalizeSet(set ExpressionSet) error {
+ for _, def := range set {
+ if def == nil {
+ continue
+ }
+ if f, ok := def.(Finalizer); ok {
+ f.Finalize()
+ }
+ }
+ return nil
+}
+
+// Execute runs the given DSL to initialize the given expression. It returns
+// true on success. It returns false and appends to Context.Errors on failure.
+// Note that Run takes care of calling Execute on all expressions that implement
+// Source. This function is intended for use by expressions that run the DSL at
+// declaration time rather than store the DSL for execution by the dsl engine
+// (usually simple independent expressions). The DSL should use ReportError to
+// record DSL execution errors.
+func Execute(fn func(), def Expression) bool {
+ if fn == nil {
+ return true
+ }
+ var startCount int
+ if Context.Errors != nil {
+ startCount = len(Context.Errors.(MultiError))
+ }
+ Context.Stack = append(Context.Stack, def)
+ fn()
+ Context.Stack = Context.Stack[:len(Context.Stack)-1]
+ var endCount int
+ if Context.Errors != nil {
+ endCount = len(Context.Errors.(MultiError))
+ }
+ return endCount <= startCount
+}
+
+// Current returns the expression whose DSL is currently being executed.
+// As a special case Current returns Top when the execution stack is empty.
+func Current() Expression {
+ current := Context.Stack.Current()
+ if current == nil {
+ return Top
+ }
+ return current
+}
+
+// ReportError records a DSL error for reporting post DSL execution. It accepts
+// a format and values a la fmt.Printf.
+func ReportError(fm string, vals ...interface{}) {
+ var suffix string
+ if cur := Context.Stack.Current(); cur != nil {
+ if name := cur.EvalName(); name != "" {
+ suffix = fmt.Sprintf(" in %s", name)
+ }
+ } else {
+ suffix = " (top level)"
+ }
+ err := fmt.Errorf(fm+suffix, vals...)
+ file, line := computeErrorLocation()
+ Context.Record(&Error{
+ GoError: err,
+ File: file,
+ Line: line,
+ })
+}
+
+// IncompatibleDSL should be called by DSL functions when they are invoked in an
+// incorrect context (e.g. "Params" in "Service").
+func IncompatibleDSL() {
+ elems := strings.Split(caller(), ".")
+ ReportError("invalid use of %s", elems[len(elems)-1])
+}
+
+// InvalidArgError records an invalid argument error. It is used by DSL
+// functions that take dynamic arguments.
+func InvalidArgError(expected string, actual interface{}) {
+ ReportError("cannot use %#v (type %s) as type %s", actual, reflect.TypeOf(actual), expected)
+}
+
+// ValidationErrors records the errors encountered when running Validate.
+type ValidationErrors struct {
+ Errors []error
+ Expressions []Expression
+}
+
+// Error implements the error interface.
+func (verr *ValidationErrors) Error() string {
+ msg := make([]string, len(verr.Errors))
+ for i, err := range verr.Errors {
+ msg[i] = fmt.Sprintf("%s: %s", verr.Expressions[i].EvalName(), err)
+ }
+ return strings.Join(msg, "\n")
+}
+
+// Merge merges validation errors into the target.
+func (verr *ValidationErrors) Merge(err *ValidationErrors) {
+ if err == nil {
+ return
+ }
+ verr.Errors = append(verr.Errors, err.Errors...)
+ verr.Expressions = append(verr.Expressions, err.Expressions...)
+}
+
+// Add adds a validation error to the target.
+func (verr *ValidationErrors) Add(def Expression, format string, vals ...interface{}) {
+ verr.AddError(def, fmt.Errorf(format, vals...))
+}
+
+// AddError adds a validation error to the target. It "flattens" validation
+// errors so that the recorded errors are never ValidationErrors themselves.
+func (verr *ValidationErrors) AddError(def Expression, err error) {
+ if v, ok := err.(*ValidationErrors); ok {
+ verr.Errors = append(verr.Errors, v.Errors...)
+ verr.Expressions = append(verr.Expressions, v.Expressions...)
+ return
+ }
+ verr.Errors = append(verr.Errors, err)
+ verr.Expressions = append(verr.Expressions, def)
+}
+
+// caller returns the name of calling function.
+func caller() string {
+ pc, file, _, ok := runtime.Caller(2)
+ if ok && filepath.Base(file) == "current.go" {
+ pc, _, _, ok = runtime.Caller(3)
+ }
+ if !ok {
+ return ""
+ }
+
+ return runtime.FuncForPC(pc).Name()
+}
diff --git a/vendor/goa.design/goa/eval/expression.go b/vendor/goa.design/goa/eval/expression.go
new file mode 100644
index 000000000..b30218384
--- /dev/null
+++ b/vendor/goa.design/goa/eval/expression.go
@@ -0,0 +1,101 @@
+package eval
+
+type (
+ // Expression built by the engine through the DSL functions.
+ Expression interface {
+ // EvalName is the qualified name of the DSL expression e.g.
+ // "service bottle".
+ EvalName() string
+ }
+
+ // A Root expression represents an entry point to the executed DSL: upon
+ // execution the DSL engine iterates over all root expressions and calls
+ // their WalkSets methods to iterate over the sub-expressions.
+ Root interface {
+ Expression
+ // WalkSets implements the visitor pattern: is is called by
+ // the engine so the DSL can control the order of execution.
+ // WalkSets calls back the engine via the given iterator as
+ // many times as needed providing the expression sets on each
+ // callback.
+ WalkSets(SetWalker)
+ // DependsOn returns the list of other DSL roots this root
+ // depends on. The engine uses this function to order the
+ // execution of the DSL roots.
+ DependsOn() []Root
+ // Packages returns the import path to the Go packages that make
+ // up the DSL. This is used to skip frames that point to files
+ // in these packages when computing the location of errors.
+ Packages() []string
+ }
+
+ // A Source expression embeds DSL to be executed after the process is
+ // loaded.
+ Source interface {
+ // DSL returns the DSL used to initialize the expression in a
+ // second pass.
+ DSL() func()
+ }
+
+ // A Preparer expression requires an additional pass after the DSL has
+ // executed and BEFORE it is validated (e.g. to flatten inheritance)
+ Preparer interface {
+ // Prepare is run by the engine right after the DSL has run.
+ // Prepare cannot fail, any potential failure should be returned
+ // by implementing Validator instead.
+ Prepare()
+ }
+
+ // A Validator expression can be validated.
+ Validator interface {
+ // Validate runs after Prepare if the expression is a Preparer.
+ // It returns nil if the expression contains no validation
+ // error. The Validate implementation may take advantage of
+ // ValidationErrors to report more than one errors at a time.
+ Validate() error
+ }
+
+ // A Finalizer expression requires an additional pass after the DSL has
+ // executed and has been validated (e.g. to merge generated expressions
+ // or initialize default values).
+ Finalizer interface {
+ // Finalize is run by the engine as the last step. Finalize
+ // cannot fail, any potential failure should be returned by
+ // implementing Validator instead.
+ Finalize()
+ }
+
+ // DSLFunc is a type that DSL expressions may embed to store DSL. It
+ // implements Source.
+ DSLFunc func()
+
+ // TopExpr is the type of Top.
+ TopExpr string
+
+ // ExpressionSet is a sequence of expressions processed in order. Each
+ // DSL implementation provides an arbitrary number of expression sets to
+ // the engine via iterators (see the Root interface WalkSets method).
+ //
+ // The items in the set may implement the Source, Validator and/or
+ // Finalizer interfaces to enable the corresponding behaviors during DSL
+ // execution. The engine first runs the expression DSLs (for the ones
+ // that implement Source) then validates them (for the ones that
+ // implement Validator) and finalizes them (for the ones that implement
+ // Finalizer).
+ ExpressionSet []Expression
+
+ // SetWalker is the function signature used to iterate over expression
+ // sets with WalkSets.
+ SetWalker func(s ExpressionSet) error
+)
+
+// Top is the expression returned by Current when the execution stack is empty.
+const Top TopExpr = "top-level"
+
+// DSL returns the DSL function.
+func (f DSLFunc) DSL() func() {
+ return f
+}
+
+// EvalName is the name is the qualified name of the expression.
+func (t TopExpr) EvalName() string { return string(t) }
diff --git a/vendor/goa.design/goa/expr/api.go b/vendor/goa.design/goa/expr/api.go
new file mode 100644
index 000000000..80c29f05e
--- /dev/null
+++ b/vendor/goa.design/goa/expr/api.go
@@ -0,0 +1,157 @@
+package expr
+
+import (
+ "sort"
+
+ "goa.design/goa/eval"
+)
+
+type (
+ // APIExpr contains the global properties for a API expression.
+ APIExpr struct {
+ // DSLFunc contains the DSL used to initialize the expression.
+ eval.DSLFunc
+ // Name of API
+ Name string
+ // Title of API
+ Title string
+ // Description of API
+ Description string
+ // Version is the version of the API described by this DSL.
+ Version string
+ // Servers lists the API hosts.
+ Servers []*ServerExpr
+ // TermsOfService describes or links to the service terms of API.
+ TermsOfService string
+ // Contact provides the API users with contact information.
+ Contact *ContactExpr
+ // License describes the API license.
+ License *LicenseExpr
+ // Docs points to the API external documentation.
+ Docs *DocsExpr
+ // Meta is a list of key/value pairs.
+ Meta MetaExpr
+ // Requirements contains the security requirements that apply to
+ // all the API service methods. One requirement is composed of
+ // potentially multiple schemes. Incoming requests must validate
+ // at least one requirement to be authorized.
+ Requirements []*SecurityExpr
+ // HTTP contains the HTTP specific API level expressions.
+ HTTP *HTTPExpr
+ // GRPC contains the gRPC specific API level expressions.
+ GRPC *GRPCExpr
+
+ // random generator used to build examples for the API types.
+ random *Random
+ }
+
+ // ContactExpr contains the API contact information.
+ ContactExpr struct {
+ // Name of the contact person/organization
+ Name string `json:"name,omitempty"`
+ // Email address of the contact person/organization
+ Email string `json:"email,omitempty"`
+ // URL pointing to the contact information
+ URL string `json:"url,omitempty"`
+ }
+
+ // LicenseExpr contains the license information for the API.
+ LicenseExpr struct {
+ // Name of license used for the API
+ Name string `json:"name,omitempty"`
+ // URL to the license used for the API
+ URL string `json:"url,omitempty"`
+ }
+
+ // DocsExpr points to external documentation.
+ DocsExpr struct {
+ // Description of documentation.
+ Description string `json:"description,omitempty"`
+ // URL to documentation.
+ URL string `json:"url,omitempty"`
+ }
+)
+
+// NewAPIExpr initializes an API expression.
+func NewAPIExpr(name string, dsl func()) *APIExpr {
+ return &APIExpr{
+ Name: name,
+ HTTP: new(HTTPExpr),
+ GRPC: new(GRPCExpr),
+ DSLFunc: dsl,
+ }
+}
+
+// Schemes returns the list of transport schemes used by all the API servers.
+// The possible values for the elements of the returned slice are "http",
+// "https", "grpc" and "grpcs".
+func (a *APIExpr) Schemes() []string {
+ schemes := make(map[string]struct{})
+ for _, s := range a.Servers {
+ for _, sch := range s.Schemes() {
+ schemes[sch] = struct{}{}
+ }
+ }
+ ss := make([]string, len(schemes))
+ i := 0
+ for s := range schemes {
+ ss[i] = s
+ i++
+ }
+ sort.Strings(ss)
+ return ss
+}
+
+// Random returns the random generator associated with a. APIs with identical
+// names return generators that return the same sequence of pseudo random values.
+func (a *APIExpr) Random() *Random {
+ if a.random == nil {
+ a.random = NewRandom(a.Name)
+ }
+ return a.random
+}
+
+// DefaultServer returns a server expression that describes a server which
+// exposes all the services in the design and listens on localhost port 80 for
+// HTTP requests and port 8080 for gRPC requests.
+func (a *APIExpr) DefaultServer() *ServerExpr {
+ svcs := make([]string, len(Root.Services))
+ for i, svc := range Root.Services {
+ svcs[i] = svc.Name
+ }
+ return &ServerExpr{
+ Name: a.Name,
+ Description: "Default server for " + a.Name,
+ Services: svcs,
+ Hosts: []*HostExpr{{
+ Name: "localhost",
+ ServerName: a.Name,
+ URIs: []URIExpr{URIExpr("http://localhost:80"), URIExpr("grpc://localhost:8080")},
+ }},
+ }
+}
+
+// EvalName is the qualified name of the expression.
+func (a *APIExpr) EvalName() string { return "API " + a.Name }
+
+// Hash returns a unique hash value for a.
+func (a *APIExpr) Hash() string { return "_api_+" + a.Name }
+
+// Finalize makes sure there's one server definition.
+func (a *APIExpr) Finalize() {
+ if a.Name == "" {
+ a.Name = "api"
+ }
+ if len(a.Servers) == 0 {
+ a.Servers = []*ServerExpr{a.DefaultServer()}
+ }
+}
+
+// EvalName is the qualified name of the expression.
+func (l *LicenseExpr) EvalName() string { return "License " + l.Name }
+
+// EvalName is the qualified name of the expression.
+func (d *DocsExpr) EvalName() string { return "Documentation " + d.URL }
+
+// EvalName is the qualified name of the expression.
+func (c *ContactExpr) EvalName() string { return "Contact " + c.Name }
diff --git a/vendor/goa.design/goa/expr/attribute.go b/vendor/goa.design/goa/expr/attribute.go
new file mode 100644
index 000000000..67ec8eed0
--- /dev/null
+++ b/vendor/goa.design/goa/expr/attribute.go
@@ -0,0 +1,679 @@
+package expr
+
+import (
+ "fmt"
+
+ "goa.design/goa/eval"
+)
+
+type (
+ // AttributeExpr defines an object field with optional description,
+ // default value and validations.
+ AttributeExpr struct {
+ // DSLFunc contains the DSL used to initialize the expression.
+ eval.DSLFunc
+ // Attribute type
+ Type DataType
+ // Base types if any
+ Bases []DataType
+ // Attribute reference types if any
+ References []DataType
+ // Optional description
+ Description string
+ // Docs points to external documentation
+ Docs *DocsExpr
+ // Optional validations
+ Validation *ValidationExpr
+ // Meta is a list of key/value pairs
+ Meta MetaExpr
+ // Optional member default value
+ DefaultValue interface{}
+ // UserExample set in DSL or computed in Finalize
+ UserExamples []*ExampleExpr
+ }
+
+ // ExampleExpr represents an example.
+ ExampleExpr struct {
+ // Summary is the example short summary.
+ Summary string
+ // Description is an optional long description.
+ Description string
+ // Value is the example value.
+ Value interface{}
+ }
+
+ // Val is the type used to provide the value of examples for attributes that are
+ // objects.
+ Val map[string]interface{}
+
+ // CompositeExpr defines a generic composite expression that contains an
+ // attribute. This makes it possible for plugins to use attributes in
+ // their own data structures.
+ CompositeExpr interface {
+ // Attribute returns the composite expression embedded attribute.
+ Attribute() *AttributeExpr
+ }
+
+ // ValidationExpr contains validation rules for an attribute.
+ ValidationExpr struct {
+ // Values represents an enum validation as described at
+ // http://json-schema.org/latest/json-schema-validation.html#anchor76.
+ Values []interface{}
+ // Format represents a format validation as described at
+ // http://json-schema.org/latest/json-schema-validation.html#anchor104.
+ Format ValidationFormat
+ // PatternValidationExpr represents a pattern validation as
+ // described at
+ // http://json-schema.org/latest/json-schema-validation.html#anchor33
+ Pattern string
+ // Minimum represents an minimum value validation as described
+ // at
+ // http://json-schema.org/latest/json-schema-validation.html#anchor21.
+ Minimum *float64
+ // Maximum represents a maximum value validation as described at
+ // http://json-schema.org/latest/json-schema-validation.html#anchor17.
+ Maximum *float64
+ // MinLength represents an minimum length validation as
+ // described at
+ // http://json-schema.org/latest/json-schema-validation.html#anchor29.
+ MinLength *int
+ // MaxLength represents an maximum length validation as
+ // described at
+ // http://json-schema.org/latest/json-schema-validation.html#anchor26.
+ MaxLength *int
+ // Required list the required fields of object attributes as
+ // described at
+ // http://json-schema.org/latest/json-schema-validation.html#anchor61.
+ Required []string
+ }
+
+ // ValidationFormat is the type used to enumerate the possible string
+ // formats.
+ ValidationFormat string
+)
+
+const (
+ // FormatDate describes RFC3339 date values.
+ FormatDate ValidationFormat = "date"
+
+ // FormatDateTime describes RFC3339 date time values.
+ FormatDateTime ValidationFormat = "date-time"
+
+ // FormatUUID describes RFC4122 UUID values.
+ FormatUUID = "uuid"
+
+ // FormatEmail describes RFC5322 email addresses.
+ FormatEmail = "email"
+
+ // FormatHostname describes RFC1035 Internet hostnames.
+ FormatHostname = "hostname"
+
+ // FormatIPv4 describes RFC2373 IPv4 address values.
+ FormatIPv4 = "ipv4"
+
+ // FormatIPv6 describes RFC2373 IPv6 address values.
+ FormatIPv6 = "ipv6"
+
+ // FormatIP describes RFC2373 IPv4 or IPv6 address values.
+ FormatIP = "ip"
+
+ // FormatURI describes RFC3986 URI values.
+ FormatURI = "uri"
+
+ // FormatMAC describes IEEE 802 MAC-48, EUI-48 or EUI-64 MAC address values.
+ FormatMAC = "mac"
+
+ // FormatCIDR describes RFC4632 and RFC4291 CIDR notation IP address values.
+ FormatCIDR = "cidr"
+
+ // FormatRegexp describes regular expression syntax accepted by RE2.
+ FormatRegexp = "regexp"
+
+ // FormatJSON describes JSON text.
+ FormatJSON = "json"
+
+ // FormatRFC1123 describes RFC1123 date time values.
+ FormatRFC1123 = "rfc1123"
+)
+
+// EvalName returns the name used by the DSL evaluation.
+func (a *AttributeExpr) EvalName() string {
+ return "attribute"
+}
+
+// validated keeps track of validated attributes to handle cyclical definitions.
+var validated = make(map[*AttributeExpr]bool)
+
+// TaggedAttribute returns the name of the child attribute of a with the given
+// tag if a is an object.
+func TaggedAttribute(a *AttributeExpr, tag string) string {
+ obj := AsObject(a.Type)
+ if obj == nil {
+ return ""
+ }
+ for _, at := range *obj {
+ if _, ok := at.Attribute.Meta[tag]; ok {
+ return at.Name
+ }
+ }
+ return ""
+}
+
+// Validate tests whether the attribute required fields exist. Since attributes
+// are unaware of their context, additional context information can be provided
+// to be used in error messages. The parent definition context is automatically
+// added to error messages.
+func (a *AttributeExpr) Validate(ctx string, parent eval.Expression) *eval.ValidationErrors {
+ if validated[a] {
+ return nil
+ }
+ validated[a] = true
+ verr := new(eval.ValidationErrors)
+ if a.Type == nil {
+ verr.Add(parent, "attribute type is nil")
+ return verr
+ }
+ if ctx != "" {
+ ctx += " - "
+ }
+ verr.Merge(a.validateEnumDefault(ctx, parent))
+ if o := AsObject(a.Type); o != nil {
+ for _, n := range a.AllRequired() {
+ if a.Find(n) == nil {
+ verr.Add(parent, `%srequired field %q does not exist`, ctx, n)
+ }
+ }
+ for _, nat := range *o {
+ ctx = fmt.Sprintf("field %s", nat.Name)
+ verr.Merge(nat.Attribute.Validate(ctx, parent))
+ }
+ } else {
+ if ar := AsArray(a.Type); ar != nil {
+ elemType := ar.ElemType
+ verr.Merge(elemType.Validate(ctx, a))
+ }
+ }
+
+ if views, ok := a.Meta["view"]; ok {
+ rt, ok := a.Type.(*ResultTypeExpr)
+ if !ok {
+ verr.Add(parent, "%sdefines a view %v but is not a result type", ctx, views)
+ }
+ if name := views[0]; name != "default" && rt != nil {
+ found := false
+ for _, v := range rt.Views {
+ if v.Name == name {
+ found = true
+ break
+ }
+ }
+ if !found {
+ verr.Add(parent, "%stype does not define view %q", ctx, name)
+ }
+ }
+ }
+
+ return verr
+}
+
+// Finalize merges base type attributes and finalize the Type attribute.
+func (a *AttributeExpr) Finalize() {
+ if ut, ok := a.Type.(UserType); ok {
+ ut.Attribute().Finalize()
+ }
+ if IsObject(a.Type) {
+ for _, ref := range a.References {
+ ru, ok := ref.(UserType)
+ if !ok {
+ continue
+ }
+ a.Inherit(ru.Attribute())
+ }
+ for _, base := range a.Bases {
+ ru, ok := base.(UserType)
+ if !ok {
+ continue
+ }
+ a.Merge(ru.Attribute())
+ }
+ }
+}
+
+// Merge merges other's attributes into a overriding attributes of a with
+// attributes of other with identical names.
+//
+// This only applies to attributes of type Object and Merge panics if the
+// argument or the target is not of type Object.
+func (a *AttributeExpr) Merge(other *AttributeExpr) {
+ if other == nil {
+ return
+ }
+ left := AsObject(a.Type)
+ right := AsObject(other.Type)
+ if left == nil || right == nil {
+ panic("cannot merge non object attributes") // bug
+ }
+ if a.Type == Empty && len(*right) > 0 {
+ a.Type = &Object{}
+ left = AsObject(a.Type)
+ }
+ if other.Validation != nil {
+ if a.Validation == nil {
+ a.Validation = other.Validation.Dup()
+ } else {
+ a.Validation.Merge(other.Validation)
+ }
+ }
+ for _, nat := range *right {
+ left.Set(nat.Name, nat.Attribute)
+ }
+}
+
+// Inherit merges the properties of existing target type attributes with the
+// argument's. The algorithm is recursive so that child attributes are also
+// merged.
+func (a *AttributeExpr) Inherit(parent *AttributeExpr) {
+ if !a.shouldInherit(parent) {
+ return
+ }
+ pobj := AsObject(parent.Type)
+ if a.Type == Empty && len(*pobj) > 0 {
+ a.Type = &Object{}
+ }
+ a.inheritValidations(parent)
+ a.inheritRecursive(parent, make(map[*AttributeExpr]struct{}))
+}
+
+// AllRequired returns the list of all required fields from the underlying
+// object. This method recurses if the type is itself an attribute (i.e. a
+// UserType, this happens with the Reference DSL for example).
+func (a *AttributeExpr) AllRequired() []string {
+ if u, ok := a.Type.(UserType); ok {
+ return u.Attribute().AllRequired()
+ }
+ if a.Validation != nil {
+ return a.Validation.Required
+ }
+ return nil
+}
+
+// IsRequired returns true if the given string matches the name of a required
+// attribute, false otherwise. This method only applies to attributes of type
+// Object.
+func (a *AttributeExpr) IsRequired(attName string) bool {
+ for _, name := range a.AllRequired() {
+ if name == attName {
+ return true
+ }
+ }
+ return false
+}
+
+// IsRequiredNoDefault returns true if the given string matches the name of a
+// required attribute and the attribute has no default value, false otherwise.
+// This method only applies to attributes of type Object.
+func (a *AttributeExpr) IsRequiredNoDefault(attName string) bool {
+ for _, name := range a.AllRequired() {
+ if name == attName {
+ return a.GetDefault(name) == nil
+ }
+ }
+ return false
+}
+
+// IsPrimitivePointer returns true if the field generated for the given
+// attribute should be a pointer to a primitive type. The receiver attribute must
+// be an object.
+//
+// If useDefault is true and the attribute has a default value then
+// IsPrimitivePointer returns false. This makes it possible to differentiate
+// between request types where attributes with default values should not be
+// generated using a pointer value and response types where they should.
+//
+// DefaultValue UseDefault Pointer (assuming all other conditions are true)
+// Yes True False
+// Yes False True
+// No True True
+// No False True
+//
+func (a *AttributeExpr) IsPrimitivePointer(attName string, useDefault bool) bool {
+ o := AsObject(a.Type)
+ if o == nil {
+ panic("checking pointer field on non-object") // bug
+ }
+ att := o.Attribute(attName)
+ if att == nil {
+ return false
+ }
+ if IsPrimitive(att.Type) {
+ return att.Type.Kind() != BytesKind && att.Type.Kind() != AnyKind &&
+ !a.IsRequired(attName) && (!a.HasDefaultValue(attName) || !useDefault)
+ }
+ return false
+}
+
+// HasTag returns true if the attribute is an object that has an attribute with
+// the given tag.
+func (a *AttributeExpr) HasTag(tag string) bool {
+ if a == nil {
+ return false
+ }
+ obj := AsObject(a.Type)
+ if obj == nil {
+ return false
+ }
+ for _, at := range *obj {
+ if _, ok := at.Attribute.Meta[tag]; ok {
+ return true
+ }
+ }
+ return false
+}
+
+// HasDefaultValue returns true if the attribute with the given name has a
+// default value.
+func (a *AttributeExpr) HasDefaultValue(attName string) bool {
+ return a.GetDefault(attName) != nil
+}
+
+// GetDefault gets the default for the attribute.
+func (a *AttributeExpr) GetDefault(attName string) interface{} {
+ if o := AsObject(a.Type); o != nil {
+ return o.Attribute(attName).DefaultValue
+ }
+ return nil
+}
+
+// SetDefault sets the default for the attribute. It also converts HashVal
+// and ArrayVal to map and slice respectively.
+func (a *AttributeExpr) SetDefault(def interface{}) {
+ switch actual := def.(type) {
+ case MapVal:
+ a.DefaultValue = actual.ToMap()
+ case ArrayVal:
+ a.DefaultValue = actual.ToSlice()
+ default:
+ a.DefaultValue = actual
+ }
+}
+
+// Find finds an attribute with the given name in the object and any
+// extended attribute expr. If the attribute is not a user
+// type or object, Find returns nil.
+func (a *AttributeExpr) Find(name string) *AttributeExpr {
+ findAttrFn := func(typ DataType) *AttributeExpr {
+ switch t := typ.(type) {
+ case UserType:
+ return t.Attribute().Find(name)
+ case *Object:
+ if att := t.Attribute(name); att != nil {
+ return att
+ }
+ }
+ return nil
+ }
+
+ if att := findAttrFn(a.Type); att != nil {
+ return att
+ }
+ for _, b := range a.Bases {
+ if att := findAttrFn(b); att != nil {
+ return att
+ }
+ }
+ return nil
+}
+
+// Delete removes an attribute with the given name. It does nothing if the
+// attribute expression is not a user type or object.
+func (a *AttributeExpr) Delete(name string) {
+ switch t := a.Type.(type) {
+ case UserType:
+ t.Attribute().Delete(name)
+ case *Object:
+ AsObject(t).Delete(name)
+ if a.Validation != nil {
+ a.Validation.RemoveRequired(name)
+ }
+ for _, ex := range a.UserExamples {
+ if m, ok := ex.Value.(map[string]interface{}); ok {
+ delete(m, name)
+ }
+ }
+ }
+}
+
+// Debug dumps the attribute to STDOUT in a goa developer friendly way.
+func (a *AttributeExpr) Debug(prefix string) { a.debug(prefix, make(map[*AttributeExpr]int), 0) }
+func (a *AttributeExpr) debug(prefix string, seen map[*AttributeExpr]int, indent int) {
+ for i := 0; i < indent; i++ {
+ prefix = " " + prefix
+ }
+ if c, ok := seen[a]; ok && c > 1 {
+ fmt.Printf("%s: ...\n", prefix)
+ return
+ }
+ seen[a]++
+ fmt.Printf("%s: %q\n", prefix, a.Type.Name())
+ if o := AsObject(a.Type); o != nil {
+ for _, att := range *o {
+ att.Attribute.debug(" - "+att.Name, seen, indent+1)
+ }
+ }
+ if a := AsArray(a.Type); a != nil {
+ a.ElemType.debug(" Elem", seen, indent+2)
+ }
+ if m := AsMap(a.Type); m != nil {
+ m.KeyType.debug(" Key", seen, indent+2)
+ m.ElemType.debug(" Elem", seen, indent+2)
+ }
+}
+
+// validateEnumDefault makes sure that the attribute default value is one of the
+// enum values.
+func (a *AttributeExpr) validateEnumDefault(ctx string, parent eval.Expression) *eval.ValidationErrors {
+ //TODO: We only do the default value and enum check just for primitive types.
+ if _, ok := a.Type.(Primitive); !ok {
+ return nil
+ }
+ verr := new(eval.ValidationErrors)
+ if a.DefaultValue != nil && a.Validation != nil && a.Validation.Values != nil {
+ var found bool
+ for _, e := range a.Validation.Values {
+ if e == a.DefaultValue {
+ found = true
+ break
+ }
+ }
+ if !found {
+ verr.Add(
+ parent,
+ "%sdefault value %#v is not one of the accepted values: %#v",
+ ctx,
+ a.DefaultValue,
+ a.Validation.Values,
+ )
+ }
+ }
+ return verr
+}
+
+func (a *AttributeExpr) inheritRecursive(parent *AttributeExpr, seen map[*AttributeExpr]struct{}) {
+ if !a.shouldInherit(parent) {
+ return
+ }
+ for _, nat := range *AsObject(a.Type) {
+ if patt := AsObject(parent.Type).Attribute(nat.Name); patt != nil {
+ att := nat.Attribute
+ if att.Description == "" {
+ att.Description = patt.Description
+ }
+ att.inheritValidations(patt)
+ if att.DefaultValue == nil {
+ att.DefaultValue = patt.DefaultValue
+ }
+ if att.Type == nil {
+ att.Type = patt.Type
+ } else if att.shouldInherit(patt) {
+ if _, ok := seen[att]; ok {
+ continue
+ }
+ seen[att] = struct{}{}
+ for _, nat := range *AsObject(att.Type) {
+ child := nat.Attribute
+ parent := AsObject(patt.Type).Attribute(nat.Name)
+ child.inheritValidations(parent)
+ child.inheritRecursive(parent, seen)
+ }
+ }
+ }
+ }
+}
+
+func (a *AttributeExpr) inheritValidations(parent *AttributeExpr) {
+ if parent.Validation == nil {
+ return
+ }
+ if a.Validation == nil {
+ a.Validation = &ValidationExpr{}
+ }
+ a.Validation.AddRequired(parent.Validation.Required...)
+}
+
+func (a *AttributeExpr) shouldInherit(parent *AttributeExpr) bool {
+ return a != nil && AsObject(a.Type) != nil &&
+ parent != nil && AsObject(parent.Type) != nil
+}
+
+// EvalName returns the name used by the DSL evaluation.
+func (a *ExampleExpr) EvalName() string {
+ return `example "` + a.Summary + `"`
+}
+
+// Context returns the generic definition name used in error messages.
+func (v *ValidationExpr) Context() string {
+ return "validation"
+}
+
+// Merge merges other into v.
+func (v *ValidationExpr) Merge(other *ValidationExpr) {
+ if v.Values == nil {
+ v.Values = other.Values
+ }
+ if v.Format == "" {
+ v.Format = other.Format
+ }
+ if v.Pattern == "" {
+ v.Pattern = other.Pattern
+ }
+ if v.Minimum == nil || (other.Minimum != nil && *v.Minimum > *other.Minimum) {
+ v.Minimum = other.Minimum
+ }
+ if v.Maximum == nil || (other.Maximum != nil && *v.Maximum < *other.Maximum) {
+ v.Maximum = other.Maximum
+ }
+ if v.MinLength == nil || (other.MinLength != nil && *v.MinLength > *other.MinLength) {
+ v.MinLength = other.MinLength
+ }
+ if v.MaxLength == nil || (other.MaxLength != nil && *v.MaxLength < *other.MaxLength) {
+ v.MaxLength = other.MaxLength
+ }
+ v.AddRequired(other.Required...)
+}
+
+// AddRequired merges the required fields from other into v
+func (v *ValidationExpr) AddRequired(required ...string) {
+ for _, r := range required {
+ found := false
+ for _, rr := range v.Required {
+ if r == rr {
+ found = true
+ break
+ }
+ }
+ if !found {
+ v.Required = append(v.Required, r)
+ }
+ }
+}
+
+// RemoveRequired removes the given field from the list of required fields
+func (v *ValidationExpr) RemoveRequired(required string) {
+ for i, r := range v.Required {
+ if required == r {
+ v.Required = append(v.Required[:i], v.Required[i+1:]...)
+ break
+ }
+ }
+}
+
+// HasRequiredOnly returns true if the validation only has the Required field
+// with a non-zero value.
+func (v *ValidationExpr) HasRequiredOnly() bool {
+ if len(v.Values) > 0 {
+ return false
+ }
+ if v.Format != "" || v.Pattern != "" {
+ return false
+ }
+ if (v.Minimum != nil) || (v.Maximum != nil) || (v.MinLength != nil) || (v.MaxLength != nil) {
+ return false
+ }
+ return true
+}
+
+// Dup makes a shallow dup of the validation.
+func (v *ValidationExpr) Dup() *ValidationExpr {
+ var req []string
+ if len(v.Required) > 0 {
+ req = make([]string, len(v.Required))
+ for i, r := range v.Required {
+ req[i] = r
+ }
+ }
+ return &ValidationExpr{
+ Values: v.Values,
+ Format: v.Format,
+ Pattern: v.Pattern,
+ Minimum: v.Minimum,
+ Maximum: v.Maximum,
+ MinLength: v.MinLength,
+ MaxLength: v.MaxLength,
+ Required: req,
+ }
+}
+
+// IsSupportedValidationFormat checks if the validation format is supported by goa.
+func (a *AttributeExpr) IsSupportedValidationFormat(vf ValidationFormat) bool {
+ switch vf {
+ case FormatDate:
+ return true
+ case FormatDateTime:
+ return true
+ case FormatUUID:
+ return true
+ case FormatEmail:
+ return true
+ case FormatHostname:
+ return true
+ case FormatIPv4:
+ return true
+ case FormatIPv6:
+ return true
+ case FormatIP:
+ return true
+ case FormatURI:
+ return true
+ case FormatMAC:
+ return true
+ case FormatCIDR:
+ return true
+ case FormatRegexp:
+ return true
+ case FormatJSON:
+ return true
+ case FormatRFC1123:
+ return true
+ }
+ return false
+}
diff --git a/vendor/goa.design/goa/expr/dup.go b/vendor/goa.design/goa/expr/dup.go
new file mode 100644
index 000000000..53bedb1fe
--- /dev/null
+++ b/vendor/goa.design/goa/expr/dup.go
@@ -0,0 +1,92 @@
+package expr
+
+import "fmt"
+
+// Dup creates a copy the given data type.
+func Dup(d DataType) DataType {
+ res := newDupper().DupType(d)
+ if rt, ok := d.(*ResultTypeExpr); ok {
+ if Root.GeneratedResultType(rt.Identifier) != nil {
+ *Root.GeneratedTypes = append(*Root.GeneratedTypes, res.(*ResultTypeExpr))
+ }
+ }
+ return res
+}
+
+// DupAtt creates a copy of the given attribute.
+func DupAtt(att *AttributeExpr) *AttributeExpr {
+ return newDupper().DupAttribute(att)
+}
+
+// dupper implements recursive and cycle safe copy of data types.
+type dupper struct {
+ uts map[string]UserType
+ ats map[*AttributeExpr]struct{}
+}
+
+// newDupper returns a new initialized dupper.
+func newDupper() *dupper {
+ return &dupper{
+ uts: make(map[string]UserType),
+ ats: make(map[*AttributeExpr]struct{}),
+ }
+}
+
+// DupAttribute creates a copy of the given attribute.
+func (d *dupper) DupAttribute(att *AttributeExpr) *AttributeExpr {
+ if _, ok := d.ats[att]; ok {
+ return att
+ }
+ var valDup *ValidationExpr
+ if att.Validation != nil {
+ valDup = att.Validation.Dup()
+ }
+ dup := AttributeExpr{
+ Type: d.DupType(att.Type),
+ Description: att.Description,
+ References: att.References,
+ Bases: att.Bases,
+ Validation: valDup,
+ Meta: att.Meta,
+ DefaultValue: att.DefaultValue,
+ DSLFunc: att.DSLFunc,
+ UserExamples: att.UserExamples,
+ }
+ d.ats[&dup] = struct{}{}
+ return &dup
+}
+
+// DupType creates a copy of the given data type.
+func (d *dupper) DupType(t DataType) DataType {
+ if t == Empty {
+ // Don't dup Empty so that code may check against it.
+ return t
+ }
+ switch actual := t.(type) {
+ case Primitive:
+ return t
+ case *Array:
+ return &Array{ElemType: d.DupAttribute(actual.ElemType)}
+ case *Object:
+ res := &Object{}
+ for _, nat := range *actual {
+ res.Set(nat.Name, d.DupAttribute(nat.Attribute))
+ }
+ return res
+ case *Map:
+ return &Map{
+ KeyType: d.DupAttribute(actual.KeyType),
+ ElemType: d.DupAttribute(actual.ElemType),
+ }
+ case UserType:
+ if u, ok := d.uts[actual.ID()]; ok {
+ return u
+ }
+ dp := actual.Dup(nil)
+ d.uts[actual.ID()] = dp
+ dupAtt := d.DupAttribute(actual.Attribute())
+ dp.SetAttribute(dupAtt)
+ return dp
+ }
+ panic("unknown type " + fmt.Sprintf("%T", t))
+}
diff --git a/vendor/goa.design/goa/expr/example.go b/vendor/goa.design/goa/expr/example.go
new file mode 100644
index 000000000..74c21e216
--- /dev/null
+++ b/vendor/goa.design/goa/expr/example.go
@@ -0,0 +1,316 @@
+package expr
+
+import (
+ "fmt"
+ "math"
+ "regexp"
+ "time"
+
+ regen "github.com/zach-klippenstein/goregen"
+)
+
+const (
+ maxAttempts = 500 // Max number of retries to generate valid example.
+ maxLength = 3 // Max length for array and map examples.
+ maxValue = 1000 // Max value for integer and float examples.
+)
+
+// Example returns the example set on the attribute at design time. If there
+// isn't such a value then Example computes a random value for the attribute
+// using the given random value producer.
+func (a *AttributeExpr) Example(r *Random) interface{} {
+ if len(a.UserExamples) > 0 {
+ return a.UserExamples[0].Value
+ }
+ // randomize array length first, since that's from higher level
+ if hasLengthValidation(a) {
+ return byLength(a, r)
+ }
+ // enum should dominate, because the potential "examples" are fixed
+ if hasEnumValidation(a) {
+ return byEnum(a, r)
+ }
+ // loop until a satisfying example is generated
+ var (
+ hasFormat = hasFormatValidation(a)
+ hasPattern = hasPatternValidation(a)
+ hasMinMax = hasMinMaxValidation(a)
+ attempts = 0
+ )
+ for attempts < maxAttempts {
+ attempts++
+ var example interface{}
+ // Format comes first, since it initiates the example
+ if hasFormat {
+ example = byFormat(a, r)
+ }
+ // now validate with rest of matchers; redo if not satisified
+ if hasPattern {
+ if example == nil {
+ example = byPattern(a, r)
+ } else if !checkPattern(a, example) {
+ continue
+ }
+ }
+ if hasMinMax {
+ if example == nil {
+ example = byMinMax(a, r)
+ } else if !checkMinMaxValue(a, example) {
+ continue
+ }
+ }
+ if example == nil {
+ example = a.Type.Example(r)
+ }
+ return example
+ }
+ return a.Type.Example(r)
+}
+
+// NewLength returns an int that validates the generator attribute length validations if any.
+func NewLength(a *AttributeExpr, r *Random) int {
+ if hasLengthValidation(a) {
+ minlength, maxlength := math.Inf(1), math.Inf(-1)
+ if a.Validation.MinLength != nil {
+ minlength = float64(*a.Validation.MinLength)
+ }
+ if a.Validation.MaxLength != nil {
+ maxlength = float64(*a.Validation.MaxLength)
+ }
+ count := 0
+ if math.IsInf(minlength, 1) {
+ count = int(maxlength) - (r.Int() % 3)
+ } else if math.IsInf(maxlength, -1) {
+ count = int(minlength) + (r.Int() % 3)
+ } else if minlength < maxlength {
+ diff := int(maxlength - minlength)
+ if diff > maxLength {
+ diff = maxLength
+ }
+ count = int(minlength) + (r.Int() % diff)
+ } else if minlength == maxlength {
+ count = int(minlength)
+ } else {
+ panic("Validation: MinLength > MaxLength")
+ }
+ if count > maxLength {
+ count = maxLength
+ }
+ return count
+ }
+ return r.Int()%3 + 2
+}
+
+func hasLengthValidation(a *AttributeExpr) bool {
+ if a.Validation == nil {
+ return false
+ }
+ return a.Validation.MinLength != nil || a.Validation.MaxLength != nil
+}
+
+func hasEnumValidation(a *AttributeExpr) bool {
+ return a.Validation != nil && len(a.Validation.Values) > 0
+}
+
+func hasFormatValidation(a *AttributeExpr) bool {
+ return a.Validation != nil && a.Validation.Format != ""
+}
+
+func hasPatternValidation(a *AttributeExpr) bool {
+ return a.Validation != nil && a.Validation.Pattern != ""
+}
+
+func hasMinMaxValidation(a *AttributeExpr) bool {
+ if a.Validation == nil {
+ return false
+ }
+ return a.Validation.Minimum != nil || a.Validation.Maximum != nil
+}
+
+// byLength generates a random size array of examples based on what's given.
+func byLength(a *AttributeExpr, r *Random) interface{} {
+ count := NewLength(a, r)
+ switch a.Type.Kind() {
+ case StringKind:
+ return r.faker.Characters(count)
+ case BytesKind:
+ return []byte(r.faker.Characters(count))
+ case MapKind:
+ raw := make(map[interface{}]interface{})
+ m := a.Type.(*Map)
+ for i := 0; i < count; i++ {
+ raw[m.KeyType.Example(r)] = m.ElemType.Example(r)
+ }
+ return m.MakeMap(raw)
+ case ArrayKind:
+ raw := make([]interface{}, count)
+ ar := a.Type.(*Array)
+ for i := 0; i < count; i++ {
+ raw[i] = ar.ElemType.Example(r)
+ }
+ return ar.MakeSlice(raw)
+ default:
+ panic("invalid type for length validation: " + a.Type.Name())
+ }
+}
+
+// byEnum returns a random selected enum value.
+func byEnum(a *AttributeExpr, r *Random) interface{} {
+ if !hasEnumValidation(a) {
+ return nil
+ }
+ values := a.Validation.Values
+ count := len(values)
+ i := r.Int() % count
+ return values[i]
+}
+
+// byFormat returns a random example based on the format the user asks.
+func byFormat(a *AttributeExpr, r *Random) interface{} {
+ if !hasFormatValidation(a) {
+ return nil
+ }
+ format := a.Validation.Format
+ if res, ok := map[ValidationFormat]interface{}{
+ FormatEmail: r.faker.Email(),
+ FormatHostname: r.faker.DomainName() + "." + r.faker.DomainSuffix(),
+ FormatDate: time.Unix(int64(r.Int())%1454957045, 0).UTC().Format("2006-01-02"), // to obtain a "fixed" rand
+ FormatDateTime: time.Unix(int64(r.Int())%1454957045, 0).UTC().Format(time.RFC3339), // to obtain a "fixed" rand
+ FormatIPv4: r.faker.IPv4Address().String(),
+ FormatIPv6: r.faker.IPv6Address().String(),
+ FormatIP: r.faker.IPv4Address().String(),
+ FormatURI: r.faker.URL(),
+ FormatMAC: func() string {
+ res, err := regen.Generate(`([0-9A-F]{2}-){5}[0-9A-F]{2}`)
+ if err != nil {
+ return "12-34-56-78-9A-BC"
+ }
+ return res
+ }(),
+ FormatCIDR: "192.168.100.14/24",
+ FormatRegexp: r.faker.Characters(3) + ".*",
+ FormatRFC1123: time.Unix(int64(r.Int())%1454957045, 0).UTC().Format(time.RFC1123), // to obtain a "fixed" rand
+ }[format]; ok {
+ return res
+ }
+ panic("Validation: unknown format '" + format + "'") // bug
+}
+
+// byPattern generates a random value that satisfies the pattern.
+//
+// Note: if
+// multiple patterns are given, only one of them is used.
+func byPattern(a *AttributeExpr, r *Random) interface{} {
+ if !hasPatternValidation(a) {
+ return false
+ }
+ pattern := a.Validation.Pattern
+ gen, err := regen.NewGenerator(pattern, ®en.GeneratorArgs{MaxUnboundedRepeatCount: 6})
+ if err != nil {
+ return r.faker.Name()
+ }
+ return gen.Generate()
+}
+
+func byMinMax(a *AttributeExpr, r *Random) interface{} {
+ if !hasMinMaxValidation(a) {
+ return nil
+ }
+ var (
+ i = a.Type.Kind() == IntKind || a.Type.Kind() == UIntKind
+ i32 = a.Type.Kind() == Int32Kind || a.Type.Kind() == UInt32Kind
+ i64 = a.Type.Kind() == Int64Kind || a.Type.Kind() == UInt64Kind
+ f32 = a.Type.Kind() == Float32Kind
+ min = math.Inf(-1)
+ max = math.Inf(1)
+ sign = 1
+ )
+ if a.Validation.Maximum != nil {
+ max = *a.Validation.Maximum
+ }
+ if a.Validation.Minimum != nil {
+ min = *a.Validation.Minimum
+ } else {
+ sign = -1
+ min = max
+ max = math.Inf(1)
+ }
+
+ if math.IsInf(max, 1) {
+ switch {
+ case i:
+ return sign * (r.Int() + int(min))
+ case i32:
+ return int32(sign) * (r.Int32() + int32(min))
+ case i64:
+ return int64(sign) * (r.Int64() + int64(min))
+ case f32:
+ return float32(sign) * (r.Float32() + float32(min))
+ default:
+ return float64(sign) * (r.Float64() + min)
+ }
+ }
+ if min < max {
+ delta := max - min
+ switch {
+ case i:
+ return r.Int()%int(delta) + int(min)
+ case i32:
+ return r.Int32()%int32(delta) + int32(min)
+ case i64:
+ return r.Int64()%int64(delta) + int64(min)
+ case f32:
+ return r.Float32()*float32(delta) + float32(min)
+ default:
+ return r.Float64()*delta + min
+ }
+ }
+ switch {
+ case i:
+ return int(min)
+ case i32:
+ return int32(min)
+ case i64:
+ return int64(min)
+ case f32:
+ return float32(min)
+ default:
+ return min
+ }
+}
+
+func checkPattern(a *AttributeExpr, example interface{}) bool {
+ if !hasPatternValidation(a) {
+ return true
+ }
+ pattern := a.Validation.Pattern
+ re, err := regexp.Compile(pattern)
+ if err != nil {
+ panic("Validation: invalid pattern '" + pattern + "'")
+ }
+ if !re.MatchString(fmt.Sprint(example)) {
+ return false
+ }
+ return true
+}
+
+func checkMinMaxValue(a *AttributeExpr, example interface{}) bool {
+ if !hasMinMaxValidation(a) {
+ return true
+ }
+ if min := a.Validation.Minimum; min != nil {
+ if v, ok := example.(int); ok && float64(v) < *min {
+ return false
+ } else if v, ok := example.(float64); ok && v < *min {
+ return false
+ }
+ }
+ if max := a.Validation.Maximum; max != nil {
+ if v, ok := example.(int); ok && float64(v) > *max {
+ return false
+ } else if v, ok := example.(float64); ok && v > *max {
+ return false
+ }
+ }
+ return true
+}
diff --git a/vendor/goa.design/goa/expr/generated.go b/vendor/goa.design/goa/expr/generated.go
new file mode 100644
index 000000000..f845adf51
--- /dev/null
+++ b/vendor/goa.design/goa/expr/generated.go
@@ -0,0 +1,41 @@
+package expr
+
+import "goa.design/goa/eval"
+
+type (
+ // GeneratedRoot records the generated result types and is a DSL root
+ // evaluated after Root.
+ GeneratedRoot []UserType
+)
+
+// EvalName is the name of the expression used by eval.
+func (r *GeneratedRoot) EvalName() string {
+ return "generated result types"
+}
+
+// WalkSets returns the generated result types for evaluation.
+func (r *GeneratedRoot) WalkSets(w eval.SetWalker) {
+ if r == nil {
+ return
+ }
+ set := make(eval.ExpressionSet, len(*r))
+ for i, t := range *r {
+ rt := t.(*ResultTypeExpr)
+ Root.ResultTypes = append(Root.ResultTypes, rt)
+ set[i] = rt
+ }
+ w(set)
+}
+
+// DependsOn ensures that Root executes first.
+func (r *GeneratedRoot) DependsOn() []eval.Root {
+ return []eval.Root{Root}
+}
+
+// Packages returns the Go import path to this and the dsl packages.
+func (r *GeneratedRoot) Packages() []string {
+ return []string{
+ "goa.design/goa/expr",
+ "goa.design/goa/dsl",
+ }
+}
diff --git a/vendor/goa.design/goa/expr/grpc.go b/vendor/goa.design/goa/expr/grpc.go
new file mode 100644
index 000000000..ad7bc299e
--- /dev/null
+++ b/vendor/goa.design/goa/expr/grpc.go
@@ -0,0 +1,39 @@
+package expr
+
+type (
+ // GRPCExpr contains the API level gRPC specific expressions.
+ GRPCExpr struct {
+ // Services contains the gRPC services created by the DSL.
+ Services []*GRPCServiceExpr
+ // Errors lists the error gRPC error responses defined globally.
+ Errors []*GRPCErrorExpr
+ }
+)
+
+// Service returns the service with the given name if any.
+func (g *GRPCExpr) Service(name string) *GRPCServiceExpr {
+ for _, res := range g.Services {
+ if res.Name() == name {
+ return res
+ }
+ }
+ return nil
+}
+
+// ServiceFor creates a new or returns the existing service definition for
+// the given service.
+func (g *GRPCExpr) ServiceFor(s *ServiceExpr) *GRPCServiceExpr {
+ if res := g.Service(s.Name); res != nil {
+ return res
+ }
+ res := &GRPCServiceExpr{
+ ServiceExpr: s,
+ }
+ g.Services = append(g.Services, res)
+ return res
+}
+
+// EvalName returns the name printed in case of evaluation error.
+func (g *GRPCExpr) EvalName() string {
+ return "API GRPC"
+}
diff --git a/vendor/goa.design/goa/expr/grpc_endpoint.go b/vendor/goa.design/goa/expr/grpc_endpoint.go
new file mode 100644
index 000000000..66360c656
--- /dev/null
+++ b/vendor/goa.design/goa/expr/grpc_endpoint.go
@@ -0,0 +1,466 @@
+package expr
+
+import (
+ "fmt"
+
+ "goa.design/goa/eval"
+)
+
+type (
+ // GRPCEndpointExpr describes a service endpoint. It embeds a MethodExpr
+ // and adds gRPC specific properties.
+ GRPCEndpointExpr struct {
+ eval.DSLFunc
+ // MethodExpr is the underlying method expression.
+ MethodExpr *MethodExpr
+ // Service is the parent service.
+ Service *GRPCServiceExpr
+ // Request is the message passed to the gRPC method.
+ Request *AttributeExpr
+ // StreamingRequest is the message passed to the gRPC method through a
+ // stream.
+ StreamingRequest *AttributeExpr
+ // Responses is the success gRPC response from the method.
+ Response *GRPCResponseExpr
+ // GRPCErrors is the list of all the possible error gRPC responses.
+ GRPCErrors []*GRPCErrorExpr
+ // Metadata is the metadata to be sent in a gRPC request.
+ Metadata *MappedAttributeExpr
+ // Requirements is the list of security requirements for the gRPC endpoint.
+ Requirements []*SecurityExpr
+ // Meta is a set of key/value pairs with semantic that is
+ // specific to each generator, see dsl.Meta.
+ Meta MetaExpr
+ }
+)
+
+// Name of gRPC endpoint
+func (e *GRPCEndpointExpr) Name() string {
+ return e.MethodExpr.Name
+}
+
+// Description of gRPC endpoint
+func (e *GRPCEndpointExpr) Description() string {
+ return e.MethodExpr.Description
+}
+
+// EvalName returns the generic expression name used in error messages.
+func (e *GRPCEndpointExpr) EvalName() string {
+ var prefix, suffix string
+ if e.Name() != "" {
+ suffix = fmt.Sprintf("gRPC endpoint %#v", e.Name())
+ } else {
+ suffix = "unnamed gRPC endpoint"
+ }
+ if e.Service != nil {
+ prefix = e.Service.EvalName() + " "
+ }
+ return prefix + suffix
+}
+
+// Prepare initializes the Request and Response if nil.
+func (e *GRPCEndpointExpr) Prepare() {
+ if e.Request == nil {
+ e.Request = &AttributeExpr{Type: Empty}
+ }
+ if e.Request.Validation == nil {
+ e.Request.Validation = &ValidationExpr{}
+ }
+ if e.StreamingRequest == nil {
+ e.StreamingRequest = &AttributeExpr{Type: Empty}
+ }
+ if e.StreamingRequest.Validation == nil {
+ e.StreamingRequest.Validation = &ValidationExpr{}
+ }
+ if e.Metadata == nil {
+ e.Metadata = NewEmptyMappedAttributeExpr()
+ }
+ if e.Metadata.Validation == nil {
+ e.Metadata.Validation = &ValidationExpr{}
+ }
+
+ // Make sure there's a default response if none define explicitly
+ if e.Response == nil {
+ e.Response = &GRPCResponseExpr{StatusCode: 0}
+ }
+ e.Response.Prepare()
+
+ // Inherit gRPC errors from service and root
+ for _, r := range e.Service.GRPCErrors {
+ e.GRPCErrors = append(e.GRPCErrors, r.Dup())
+ }
+ for _, r := range Root.API.GRPC.Errors {
+ e.GRPCErrors = append(e.GRPCErrors, r.Dup())
+ }
+
+ // Prepare error response
+ for _, er := range e.GRPCErrors {
+ er.Response.Prepare()
+ }
+}
+
+// Validate validates the endpoint expression by checking if the request
+// and responses contains the "rpc:tag" in the meta. It also makes sure
+// that there is only one response per status code.
+func (e *GRPCEndpointExpr) Validate() error {
+ verr := new(eval.ValidationErrors)
+ if e.Name() == "" {
+ verr.Add(e, "Endpoint name cannot be empty")
+ }
+
+ var hasMessage, hasMetadata bool
+ // Validate request
+ if e.Request.Type != Empty {
+ hasMessage = true
+ verr.Merge(e.Request.Validate("gRPC request message", e))
+ verr.Merge(validateMessage(e.Request, e.MethodExpr.Payload, e, true))
+ }
+ if !e.Metadata.IsEmpty() {
+ hasMetadata = true
+ verr.Merge(e.Metadata.Validate("gRPC request metadata", e))
+ verr.Merge(validateMetadata(e.Metadata, e.MethodExpr.Payload, e, true))
+ }
+
+ if pobj := AsObject(e.MethodExpr.Payload.Type); pobj != nil {
+ secAttrs := getSecurityAttributes(e.MethodExpr)
+ switch {
+ case hasMessage && hasMetadata:
+ // ensure the attributes defined in message are not defined in metadata.
+ msgObj := AsObject(e.Request.Type)
+ metObj := AsObject(e.Metadata.Type)
+ for _, msgnat := range *msgObj {
+ for _, metnat := range *metObj {
+ if metnat.Name == msgnat.Name {
+ verr.Add(e, "Attribute %q defined in both request message and metadata. Define the attribute in either message or metadata.", metnat.Name)
+ break
+ }
+ }
+ }
+ case !hasMessage && !hasMetadata:
+ // no request message or metadata is defined. Ensure that the method
+ // payload attributes have "rpc:tag" set (except for security attributes
+ // as they are added to request metadata by default)
+ msgFields := &Object{}
+ if len(secAttrs) > 0 {
+ // add attributes to msgFields from the payload that are not
+ // security attributes
+ var found bool
+ for _, nat := range *pobj {
+ found = false
+ for _, n := range secAttrs {
+ if n == nat.Name {
+ found = true
+ break
+ }
+ }
+ if !found {
+ msgFields.Set(nat.Name, nat.Attribute)
+ }
+ }
+ } else {
+ msgFields = pobj
+ }
+ if len(*msgFields) > 0 {
+ validateRPCTags(msgFields, e)
+ }
+ }
+ } else {
+ if hasMessage && hasMetadata {
+ verr.Add(e, "Both request message and metadata are defined, but payload is not an object. Define either metadata or message or make payload an object type.")
+ }
+ }
+
+ // Validate response
+ verr.Merge(e.Response.Validate(e))
+
+ // Validate errors
+ for _, er := range e.GRPCErrors {
+ verr.Merge(er.Validate())
+ }
+ return verr
+}
+
+// Finalize ensures the request and response attributes are initialized.
+func (e *GRPCEndpointExpr) Finalize() {
+ if pobj := AsObject(e.MethodExpr.Payload.Type); pobj != nil {
+ // addToMetadata adds the given field to metadata. tName maps the attribute
+ // name to the given transport name.
+ addToMetadata := func(field string, tName string) {
+ attr := pobj.Attribute(field)
+ e.Metadata.Type.(*Object).Set(field, attr)
+ if tName != "" {
+ e.Metadata.Map(tName, field)
+ }
+ if e.MethodExpr.Payload.IsRequired(field) {
+ e.Metadata.Validation.AddRequired(field)
+ }
+ }
+
+ // Initialize any security attributes in request metadata unless it is
+ // specified explicitly in the request message via the DSL.
+ if reqLen := len(e.MethodExpr.Requirements); reqLen > 0 {
+ e.Requirements = make([]*SecurityExpr, 0, reqLen)
+ for _, req := range e.MethodExpr.Requirements {
+ dupReq := DupRequirement(req)
+ for _, sch := range dupReq.Schemes {
+ var field string
+ switch sch.Kind {
+ case NoKind:
+ continue
+ case BasicAuthKind:
+ field = TaggedAttribute(e.MethodExpr.Payload, "security:username")
+ sch.Name, sch.In = findKey(e, field)
+ if sch.Name == "" {
+ addToMetadata(field, "")
+ }
+ field = TaggedAttribute(e.MethodExpr.Payload, "security:password")
+ sch.Name, sch.In = findKey(e, field)
+ if sch.Name == "" {
+ addToMetadata(field, "")
+ }
+ continue
+ case APIKeyKind:
+ field = TaggedAttribute(e.MethodExpr.Payload, "security:apikey:"+sch.SchemeName)
+ case JWTKind:
+ field = TaggedAttribute(e.MethodExpr.Payload, "security:token")
+ case OAuth2Kind:
+ field = TaggedAttribute(e.MethodExpr.Payload, "security:accesstoken")
+ }
+ sch.Name, sch.In = findKey(e, field)
+ if sch.Name == "" {
+ sch.Name = "authorization"
+ addToMetadata(field, sch.Name)
+ }
+ }
+ e.Requirements = append(e.Requirements, dupReq)
+ }
+ }
+
+ // If endpoint defines streaming payload, then add the attributes in method
+ // payload type to request metadata.
+ if e.MethodExpr.StreamingPayload.Type != Empty {
+ for _, nat := range *pobj {
+ addToMetadata(nat.Name, "")
+ }
+ }
+
+ // msgObj contains only the attributes in the method payload that must
+ // be added to the request message type after removing attributes
+ // specified in the request metadata.
+ msgObj := Dup(pobj).(*Object)
+ for _, nat := range *AsObject(e.Metadata.Type) {
+ // initialize metadata attribute from method payload
+ initAttrFromDesign(nat.Attribute, pobj.Attribute(nat.Name))
+ if e.MethodExpr.Payload.IsRequired(nat.Name) {
+ e.Metadata.Validation.AddRequired(nat.Name)
+ }
+ // remove metadata attributes from the message attributes
+ msgObj.Delete(nat.Name)
+ }
+
+ // add any message attributes to request message if not added already
+ if len(*msgObj) > 0 {
+ if e.Request.Type == Empty {
+ e.Request.Type = &Object{}
+ }
+ reqObj := AsObject(e.Request.Type)
+ for _, nat := range *msgObj {
+ if reqObj.Attribute(nat.Name) == nil {
+ reqObj.Set(nat.Name, nat.Attribute)
+ }
+ if e.MethodExpr.Payload.IsRequired(nat.Name) {
+ e.Request.Validation.AddRequired(nat.Name)
+ }
+ }
+ }
+ for _, nat := range *AsObject(e.Request.Type) {
+ // initialize message attribute
+ patt := pobj.Attribute(nat.Name)
+ initAttrFromDesign(nat.Attribute, patt)
+ if nat.Attribute.Meta == nil {
+ nat.Attribute.Meta = patt.Meta
+ } else {
+ nat.Attribute.Meta.Merge(patt.Meta)
+ }
+ }
+ } else {
+ // method payload is not an object type.
+ if e.MethodExpr.StreamingPayload.Type != Empty {
+ // endpoint defines streaming payload. So add the method payload to
+ // request metadata under "goa-payload" field
+ e.Metadata.Type.(*Object).Set("goa_payload", e.MethodExpr.Payload)
+ e.Metadata.Validation.AddRequired("goa_payload")
+ } else {
+ initAttrFromDesign(e.Request, e.MethodExpr.Payload)
+ }
+ }
+
+ // Finalize streaming payload type if defined
+ if e.MethodExpr.StreamingPayload.Type != Empty {
+ initAttrFromDesign(e.StreamingRequest, e.MethodExpr.StreamingPayload)
+ }
+
+ // Finalize response
+ e.Response.Finalize(e, e.MethodExpr.Result)
+
+ // Finalize errors
+ for _, gerr := range e.GRPCErrors {
+ gerr.Finalize(e)
+ }
+}
+
+// validateMessage validates the gRPC message. It compares the given message
+// with the service type (Payload or Result) and ensures all the attributes
+// defined in the message type are found in the service type and the attributes
+// are set with unique "rpc:tag" numbers.
+//
+// msgAtt is the Request/Response message attribute. validateMessage assumes
+// that the msgAtt is not Empty.
+// serviceAtt is the Payload/Result attribute.
+// e is the endpoint expression.
+// req if true indicates the Request message is being validated.
+func validateMessage(msgAtt, serviceAtt *AttributeExpr, e *GRPCEndpointExpr, req bool) *eval.ValidationErrors {
+ verr := new(eval.ValidationErrors)
+ msgKind := "Response"
+ serviceKind := "Result"
+ if req {
+ msgKind = "Request"
+ serviceKind = "Payload"
+ }
+ if serviceAtt.Type == Empty {
+ verr.Add(e, "%s message is defined but %s is not defined in method", msgKind, serviceKind)
+ return verr
+ }
+
+ if srvcObj := AsObject(serviceAtt.Type); srvcObj == nil {
+ // service type (payload or result) is a primitive, array, or map
+ // The message type must have at most one field and that field must be
+ // of the same type as the service type.
+ msgObj := AsObject(msgAtt.Type)
+ if flen := len(*msgObj); flen != 1 {
+ verr.Add(e, "%s is not an object type. %s message should have at most 1 field. Got %d.", serviceKind, msgKind, flen)
+ } else {
+ for _, f := range *msgObj {
+ if f.Attribute.Type != serviceAtt.Type {
+ verr.Add(e, "%s message field %q is %q type but the %s type is %q.", msgKind, f.Name, f.Attribute.Type.Name(), serviceKind, serviceAtt.Type.Name())
+ }
+ }
+ }
+ } else {
+ // service type is an object. Verify the attributes defined in the
+ // message are found in the service type.
+ // msgFields will contain the attributes from the service type that has the
+ // same name as the message attributes so that we can validate the
+ // rpc:tag in the meta.
+ msgFields := &Object{}
+ var found bool
+ for _, nat := range *AsObject(msgAtt.Type) {
+ found = false
+ for _, snat := range *srvcObj {
+ if nat.Name == snat.Name {
+ msgFields.Set(snat.Name, snat.Attribute)
+ found = true
+ break
+ }
+ }
+ if !found {
+ verr.Add(e, "%s message attribute %q is not found in %s", msgKind, nat.Name, serviceKind)
+ }
+ }
+ // validate rpc:tag in meta for the message fields
+ verr.Merge(validateRPCTags(msgFields, e))
+ }
+ return verr
+}
+
+// validateRPCTags verifies whether every attribute in the object type has
+// "rpc:tag" set in the meta and the tag numbers are unique.
+func validateRPCTags(fields *Object, e *GRPCEndpointExpr) *eval.ValidationErrors {
+ verr := new(eval.ValidationErrors)
+ foundRPC := make(map[string]string)
+ for _, nat := range *fields {
+ if tag, ok := nat.Attribute.Meta["rpc:tag"]; !ok {
+ verr.Add(e, "attribute %q does not have \"rpc:tag\" defined in the meta", nat.Name)
+ } else if a, ok := foundRPC[tag[0]]; ok {
+ verr.Add(e, "field number %s in attribute %q already exists for attribute %q", tag[0], nat.Name, a)
+ } else {
+ foundRPC[tag[0]] = nat.Name
+ }
+ }
+ return verr
+}
+
+// validateMetadata validates the gRPC metadata. It compares the given metadata
+// with the service type (Payload or Result) and ensures all the attributes
+// defined in the metadata type are found in the service type.
+//
+// metAtt is the Request/Response metadata attribute. validateMetadata assumes
+// that the metAtt is not Empty.
+// serviceAtt is the Payload/Result attribute.
+// e is the endpoint expression.
+// req if true indicates the Request metadata is being validated.
+func validateMetadata(metAtt *MappedAttributeExpr, serviceAtt *AttributeExpr, e *GRPCEndpointExpr, req bool) *eval.ValidationErrors {
+ verr := new(eval.ValidationErrors)
+ metKind := "Response"
+ serviceKind := "Result"
+ if req {
+ metKind = "Request"
+ serviceKind = "Payload"
+ }
+ if serviceAtt.Type == Empty {
+ verr.Add(e, "%s metadata is defined but %s is not defined in method", metKind, serviceKind)
+ return verr
+ }
+ if svcObj := AsObject(serviceAtt.Type); svcObj != nil {
+ // service type is an object type. Ensure the attributes defined in
+ // the metadata are found in the service type.
+ var found bool
+ for _, nat := range *AsObject(metAtt.Type) {
+ found = false
+ for _, tnat := range *svcObj {
+ if nat.Name == tnat.Name {
+ found = true
+ break
+ }
+ }
+ if !found {
+ verr.Add(e, "%s metadata attribute %q is not found in %s", metKind, nat.Name, serviceKind)
+ }
+ }
+ } else {
+ verr.Add(e, "%s metadata is defined but method %s is not an object type", metKind, serviceKind)
+ }
+ return verr
+}
+
+// getSecurityAttributes returns the attributes that describes a security
+// scheme from a method expression.
+func getSecurityAttributes(m *MethodExpr) []string {
+ secAttrs := []string{}
+ for _, req := range m.Requirements {
+ for _, sch := range req.Schemes {
+ switch sch.Kind {
+ case BasicAuthKind:
+ if field := TaggedAttribute(m.Payload, "security:username"); field != "" {
+ secAttrs = append(secAttrs, field)
+ }
+ if field := TaggedAttribute(m.Payload, "security:password"); field != "" {
+ secAttrs = append(secAttrs, field)
+ }
+ case APIKeyKind:
+ if field := TaggedAttribute(m.Payload, "security:apikey:"+sch.SchemeName); field != "" {
+ secAttrs = append(secAttrs, field)
+ }
+ case JWTKind:
+ if field := TaggedAttribute(m.Payload, "security:token"); field != "" {
+ secAttrs = append(secAttrs, field)
+ }
+ case OAuth2Kind:
+ if field := TaggedAttribute(m.Payload, "security:accesstoken"); field != "" {
+ secAttrs = append(secAttrs, field)
+ }
+ }
+ }
+ }
+ return secAttrs
+}
diff --git a/vendor/goa.design/goa/expr/grpc_error.go b/vendor/goa.design/goa/expr/grpc_error.go
new file mode 100644
index 000000000..2d38568f5
--- /dev/null
+++ b/vendor/goa.design/goa/expr/grpc_error.go
@@ -0,0 +1,69 @@
+package expr
+
+import (
+ "goa.design/goa/eval"
+)
+
+type (
+ // GRPCErrorExpr defines a gRPC error response including its name,
+ // status, and result type.
+ GRPCErrorExpr struct {
+ // ErrorExpr is the underlying goa design error expression.
+ *ErrorExpr
+ // Name of error, we need a separate copy of the name to match it
+ // up with the appropriate ErrorExpr.
+ Name string
+ // Response is the corresponding gRPC response.
+ Response *GRPCResponseExpr
+ }
+)
+
+// EvalName returns the generic definition name used in error messages.
+func (e *GRPCErrorExpr) EvalName() string {
+ return "gRPC error " + e.Name
+}
+
+// Validate makes sure there is a error expression that matches the gRPC error
+// expression.
+func (e *GRPCErrorExpr) Validate() *eval.ValidationErrors {
+ verr := new(eval.ValidationErrors)
+ switch p := e.Response.Parent.(type) {
+ case *GRPCEndpointExpr:
+ if p.MethodExpr.Error(e.Name) == nil {
+ verr.Add(e, "Error %#v does not match an error defined in the method", e.Name)
+ }
+ case *GRPCServiceExpr:
+ if p.Error(e.Name) == nil {
+ verr.Add(e, "Error %#v does not match an error defined in the service", e.Name)
+ }
+ case *RootExpr:
+ if Root.Error(e.Name) == nil {
+ verr.Add(e, "Error %#v does not match an error defined in the API", e.Name)
+ }
+ }
+ return verr
+}
+
+// Finalize looks up the corresponding method error expression.
+func (e *GRPCErrorExpr) Finalize(a *GRPCEndpointExpr) {
+ var ee *ErrorExpr
+ switch p := e.Response.Parent.(type) {
+ case *GRPCEndpointExpr:
+ ee = p.MethodExpr.Error(e.Name)
+ case *GRPCServiceExpr:
+ ee = p.Error(e.Name)
+ case *RootExpr:
+ ee = Root.Error(e.Name)
+ }
+ e.ErrorExpr = ee
+ e.Response.Finalize(a, e.AttributeExpr)
+}
+
+// Dup creates a copy of the error expression.
+func (e *GRPCErrorExpr) Dup() *GRPCErrorExpr {
+ return &GRPCErrorExpr{
+ ErrorExpr: e.ErrorExpr,
+ Name: e.Name,
+ Response: e.Response.Dup(),
+ }
+}
diff --git a/vendor/goa.design/goa/expr/grpc_response.go b/vendor/goa.design/goa/expr/grpc_response.go
new file mode 100644
index 000000000..c72a18bd8
--- /dev/null
+++ b/vendor/goa.design/goa/expr/grpc_response.go
@@ -0,0 +1,211 @@
+package expr
+
+import (
+ "fmt"
+
+ "goa.design/goa/eval"
+)
+
+type (
+ // GRPCResponseExpr defines a gRPC response including its status code
+ // and result type.
+ GRPCResponseExpr struct {
+ // gRPC status code
+ StatusCode int
+ // Response description
+ Description string
+ // Response Message if any
+ Message *AttributeExpr
+ // Parent expression, one of EndpointExpr, ServiceExpr or
+ // RootExpr.
+ Parent eval.Expression
+ // Headers is the header metadata to be sent in the gRPC response.
+ Headers *MappedAttributeExpr
+ // Trailers is the trailer metadata to be sent in the gRPC response.
+ Trailers *MappedAttributeExpr
+ // Meta is a list of key/value pairs.
+ Meta MetaExpr
+ }
+)
+
+// EvalName returns the generic definition name used in error messages.
+func (r *GRPCResponseExpr) EvalName() string {
+ var suffix string
+ if r.Parent != nil {
+ suffix = fmt.Sprintf(" of %s", r.Parent.EvalName())
+ }
+ return "gRPC response" + suffix
+}
+
+// Prepare makes sure the response is initialized even if not done explicitly
+// by
+func (r *GRPCResponseExpr) Prepare() {
+ if r.Message == nil {
+ r.Message = &AttributeExpr{Type: Empty}
+ }
+ if r.Message.Validation == nil {
+ r.Message.Validation = &ValidationExpr{}
+ }
+ if r.Headers == nil {
+ r.Headers = NewEmptyMappedAttributeExpr()
+ }
+ if r.Headers.Validation == nil {
+ r.Headers.Validation = &ValidationExpr{}
+ }
+ if r.Trailers == nil {
+ r.Trailers = NewEmptyMappedAttributeExpr()
+ }
+ if r.Trailers.Validation == nil {
+ r.Trailers.Validation = &ValidationExpr{}
+ }
+}
+
+// Validate checks that the response definition is consistent: its status is set
+// and the result type definition if any is valid.
+func (r *GRPCResponseExpr) Validate(e *GRPCEndpointExpr) *eval.ValidationErrors {
+ verr := new(eval.ValidationErrors)
+
+ var hasMessage, hasHeaders, hasTrailers bool
+ if r.Message.Type != Empty {
+ hasMessage = true
+ verr.Merge(r.Message.Validate("gRPC response message", r))
+ verr.Merge(validateMessage(r.Message, e.MethodExpr.Result, e, false))
+ }
+ if !r.Headers.IsEmpty() {
+ hasHeaders = true
+ verr.Merge(r.Headers.Validate("gRPC response header metadata", r))
+ verr.Merge(validateMetadata(r.Headers, e.MethodExpr.Result, e, false))
+ }
+ if !r.Trailers.IsEmpty() {
+ hasTrailers = true
+ verr.Merge(r.Trailers.Validate("gRPC response trailer metadata", r))
+ verr.Merge(validateMetadata(r.Trailers, e.MethodExpr.Result, e, false))
+ }
+
+ if robj := AsObject(e.MethodExpr.Result.Type); robj != nil {
+ switch {
+ case hasMessage && hasHeaders:
+ // ensure the attributes defined in message are not defined in
+ // header metadata.
+ metObj := AsObject(r.Headers.Type)
+ for _, nat := range *AsObject(r.Message.Type) {
+ if metObj.Attribute(nat.Name) != nil {
+ verr.Add(e, "Attribute %q defined in both response message and header metadata. Define the attribute in either message or header metadata.", nat.Name)
+ }
+ }
+ case hasMessage && hasTrailers:
+ // ensure the attributes defined in message are not defined in
+ // trailer metadata.
+ metObj := AsObject(r.Trailers.Type)
+ for _, nat := range *AsObject(r.Message.Type) {
+ if metObj.Attribute(nat.Name) != nil {
+ verr.Add(e, "Attribute %q defined in both response message and trailer metadata. Define the attribute in either message or trailer metadata.", nat.Name)
+ }
+ }
+ case hasHeaders && hasTrailers:
+ // ensure the attributes defined in header metadata are not defined in
+ // trailer metadata
+ hdrObj := AsObject(r.Headers.Type)
+ for _, nat := range *AsObject(r.Trailers.Type) {
+ if hdrObj.Attribute(nat.Name) != nil {
+ verr.Add(e, "Attribute %q defined in both response header and trailer metadata. Define the attribute in either header or trailer metadata.", nat.Name)
+ }
+ }
+ case !hasMessage && !hasHeaders && !hasTrailers:
+ // no response message or metadata is defined. Ensure that the method
+ // result attributes have "rpc:tag" set
+ validateRPCTags(robj, e)
+ }
+ } else {
+ switch {
+ case hasMessage && hasHeaders:
+ verr.Add(e, "Both response message and header metadata are defined, but result is not an object. Define either header metadata or message or make result an object type.")
+ case hasMessage && hasTrailers:
+ verr.Add(e, "Both response message and trailer metadata are defined, but result is not an object. Define either trailer metadata or message or make result an object type.")
+ case hasHeaders && hasTrailers:
+ verr.Add(e, "Both response header and trailer metadata are defined, but result is not an object. Define either trailer or header metadata or make result an object type.")
+ }
+ }
+
+ return verr
+}
+
+// Finalize ensures that the response message type is set. If Message DSL is
+// used to set the response message then the message type is set by mapping
+// the attributes to the method Result expression. If no response message set
+// explicitly, the message is set from the method Result expression.
+func (r *GRPCResponseExpr) Finalize(a *GRPCEndpointExpr, svcAtt *AttributeExpr) {
+ r.Parent = a
+
+ if svcObj := AsObject(svcAtt.Type); svcObj != nil {
+ // msgObj contains only the attributes in the method result that must
+ // be added to the response message type after removing attributes
+ // specified in the response metadata.
+ msgObj := Dup(svcObj).(*Object)
+ // Initialize response header metadata if present
+ for _, nat := range *AsObject(r.Headers.Type) {
+ // initialize metadata attribute from method result
+ initAttrFromDesign(nat.Attribute, svcObj.Attribute(nat.Name))
+ if svcAtt.IsRequired(nat.Name) {
+ r.Headers.Validation.AddRequired(nat.Name)
+ }
+ // remove metadata attributes from the message attributes
+ msgObj.Delete(nat.Name)
+ }
+ // Initialize response trailer metadata if present
+ for _, nat := range *AsObject(r.Trailers.Type) {
+ // initialize metadata attribute from method result
+ initAttrFromDesign(nat.Attribute, svcObj.Attribute(nat.Name))
+ if svcAtt.IsRequired(nat.Name) {
+ r.Trailers.Validation.AddRequired(nat.Name)
+ }
+ // remove metadata attributes from the message attributes
+ msgObj.Delete(nat.Name)
+ }
+ // add any message attributes to response message if not added already
+ if len(*msgObj) > 0 {
+ if r.Message.Type == Empty {
+ r.Message.Type = &Object{}
+ }
+ resObj := AsObject(r.Message.Type)
+ for _, nat := range *msgObj {
+ if resObj.Attribute(nat.Name) == nil {
+ resObj.Set(nat.Name, nat.Attribute)
+ }
+ }
+ }
+ for _, nat := range *AsObject(r.Message.Type) {
+ // initialize message attribute from method result
+ svcAtt := svcObj.Attribute(nat.Name)
+ initAttrFromDesign(nat.Attribute, svcAtt)
+ if nat.Attribute.Meta == nil {
+ nat.Attribute.Meta = svcAtt.Meta
+ } else {
+ nat.Attribute.Meta.Merge(svcAtt.Meta)
+ }
+ }
+ } else {
+ // method result is not an object type. Initialize response header or
+ // trailer metadata if defined or else initialize response message.
+ if !r.Headers.IsEmpty() {
+ initAttrFromDesign(r.Headers.AttributeExpr, svcAtt)
+ } else if !r.Trailers.IsEmpty() {
+ initAttrFromDesign(r.Trailers.AttributeExpr, svcAtt)
+ } else {
+ initAttrFromDesign(r.Message, svcAtt)
+ }
+ }
+}
+
+// Dup creates a copy of the response expression.
+func (r *GRPCResponseExpr) Dup() *GRPCResponseExpr {
+ return &GRPCResponseExpr{
+ StatusCode: r.StatusCode,
+ Description: r.Description,
+ Parent: r.Parent,
+ Meta: r.Meta,
+ Message: DupAtt(r.Message),
+ Headers: NewMappedAttributeExpr(r.Headers.Attribute()),
+ Trailers: NewMappedAttributeExpr(r.Trailers.Attribute()),
+ }
+}
diff --git a/vendor/goa.design/goa/expr/grpc_service.go b/vendor/goa.design/goa/expr/grpc_service.go
new file mode 100644
index 000000000..f23e3eb08
--- /dev/null
+++ b/vendor/goa.design/goa/expr/grpc_service.go
@@ -0,0 +1,113 @@
+package expr
+
+import (
+ "fmt"
+
+ "goa.design/goa/eval"
+)
+
+type (
+ // GRPCServiceExpr describes a gRPC service.
+ GRPCServiceExpr struct {
+ eval.DSLFunc
+ // ServiceExpr is the service expression that backs this service.
+ ServiceExpr *ServiceExpr
+ // Name of parent service if any
+ ParentName string
+ // GRPCEndpoints is the list of service endpoints.
+ GRPCEndpoints []*GRPCEndpointExpr
+ // GRPCErrors lists gRPC errors that apply to all endpoints.
+ GRPCErrors []*GRPCErrorExpr
+ // Meta is a set of key/value pairs with semantic that is
+ // specific to each generator.
+ Meta MetaExpr
+ }
+)
+
+// Name of service (service)
+func (svc *GRPCServiceExpr) Name() string {
+ return svc.ServiceExpr.Name
+}
+
+// Description of service (service)
+func (svc *GRPCServiceExpr) Description() string {
+ return svc.ServiceExpr.Description
+}
+
+// Endpoint returns the service endpoint with the given name or nil if there
+// isn't one.
+func (svc *GRPCServiceExpr) Endpoint(name string) *GRPCEndpointExpr {
+ for _, a := range svc.GRPCEndpoints {
+ if a.Name() == name {
+ return a
+ }
+ }
+ return nil
+}
+
+// EndpointFor builds the endpoint for the given method.
+func (svc *GRPCServiceExpr) EndpointFor(name string, m *MethodExpr) *GRPCEndpointExpr {
+ if a := svc.Endpoint(name); a != nil {
+ return a
+ }
+ a := &GRPCEndpointExpr{
+ MethodExpr: m,
+ Service: svc,
+ }
+ svc.GRPCEndpoints = append(svc.GRPCEndpoints, a)
+ return a
+}
+
+// Error returns the error with the given name.
+func (svc *GRPCServiceExpr) Error(name string) *ErrorExpr {
+ for _, erro := range svc.ServiceExpr.Errors {
+ if erro.Name == name {
+ return erro
+ }
+ }
+ return Root.Error(name)
+}
+
+// GRPCError returns the service gRPC error with given name if any.
+func (svc *GRPCServiceExpr) GRPCError(name string) *GRPCErrorExpr {
+ for _, erro := range svc.GRPCErrors {
+ if erro.Name == name {
+ return erro
+ }
+ }
+ return nil
+}
+
+// EvalName returns the generic definition name used in error messages.
+func (svc *GRPCServiceExpr) EvalName() string {
+ if svc.Name() == "" {
+ return "unnamed service"
+ }
+ return fmt.Sprintf("service %#v", svc.Name())
+}
+
+// Prepare initializes the error responses.
+func (svc *GRPCServiceExpr) Prepare() {
+ for _, er := range svc.GRPCErrors {
+ er.Response.Prepare()
+ }
+}
+
+// Validate makes sure the service is valid.
+func (svc *GRPCServiceExpr) Validate() error {
+ verr := new(eval.ValidationErrors)
+ // Validate errors
+ for _, er := range svc.GRPCErrors {
+ verr.Merge(er.Validate())
+ }
+ for _, er := range Root.API.GRPC.Errors {
+ // This may result in the same error being validated multiple
+ // times however service is the top level expression being
+ // walked and errors cannot be walked until all expressions have
+ // run. Another solution could be to append a new dynamically
+ // generated root that the eval engine would process after. Keep
+ // things simple for now.
+ verr.Merge(er.Validate())
+ }
+ return verr
+}
diff --git a/vendor/goa.design/goa/expr/helpers.go b/vendor/goa.design/goa/expr/helpers.go
new file mode 100644
index 000000000..500e2eb14
--- /dev/null
+++ b/vendor/goa.design/goa/expr/helpers.go
@@ -0,0 +1,38 @@
+package expr
+
+import (
+ "goa.design/goa/eval"
+)
+
+// findKey finds the given key in the endpoint expression and returns the
+// transport element name and the position (header, query, or body for HTTP or
+// message, metadata for gRPC endpoint).
+func findKey(exp eval.Expression, keyAtt string) (string, string) {
+ switch e := exp.(type) {
+ case *HTTPEndpointExpr:
+ if n, exists := e.Params.FindKey(keyAtt); exists {
+ return n, "query"
+ } else if n, exists := e.Headers.FindKey(keyAtt); exists {
+ return n, "header"
+ } else if e.Body == nil {
+ return "", "header"
+ }
+ if _, ok := e.Body.Meta["http:body"]; ok {
+ if e.Body.Find(keyAtt) != nil {
+ return keyAtt, "body"
+ }
+ if m, ok := e.Body.Meta["origin:attribute"]; ok && m[0] == keyAtt {
+ return keyAtt, "body"
+ }
+ }
+ return "", "header"
+ case *GRPCEndpointExpr:
+ if e.Request.Find(keyAtt) != nil {
+ return keyAtt, "message"
+ } else if n, exists := e.Metadata.FindKey(keyAtt); exists {
+ return n, "metadata"
+ }
+ return "", "metadata"
+ }
+ return "", ""
+}
diff --git a/vendor/goa.design/goa/expr/http.go b/vendor/goa.design/goa/expr/http.go
new file mode 100644
index 000000000..5534bbace
--- /dev/null
+++ b/vendor/goa.design/goa/expr/http.go
@@ -0,0 +1,83 @@
+package expr
+
+import (
+ "regexp"
+)
+
+type (
+ // HTTPExpr contains the API level HTTP specific expressions.
+ HTTPExpr struct {
+ // Path is the common request path prefix to all the service
+ // HTTP endpoints.
+ Path string
+ // Params defines the HTTP request path and query parameters
+ // common to all the API endpoints.
+ Params *MappedAttributeExpr
+ // Headers defines the HTTP request headers common to to all
+ // the API endpoints.
+ Headers *MappedAttributeExpr
+ // Consumes lists the mime types supported by the API
+ // controllers.
+ Consumes []string
+ // Produces lists the mime types generated by the API
+ // controllers.
+ Produces []string
+ // Services contains the services created by the DSL.
+ Services []*HTTPServiceExpr
+ // Errors lists the error HTTP responses.
+ Errors []*HTTPErrorExpr
+ }
+)
+
+// HTTPWildcardRegex is the regular expression used to capture path
+// parameters.
+var HTTPWildcardRegex = regexp.MustCompile(`/{\*?([a-zA-Z0-9_]+)}`)
+
+// ExtractHTTPWildcards returns the names of the wildcards that appear in
+// a HTTP path.
+func ExtractHTTPWildcards(path string) []string {
+ matches := HTTPWildcardRegex.FindAllStringSubmatch(path, -1)
+ wcs := make([]string, len(matches))
+ for i, m := range matches {
+ wcs[i] = m[1]
+ }
+ return wcs
+}
+
+// Service returns the service with the given name if any.
+func (h *HTTPExpr) Service(name string) *HTTPServiceExpr {
+ for _, res := range h.Services {
+ if res.Name() == name {
+ return res
+ }
+ }
+ return nil
+}
+
+// ServiceFor creates a new or returns the existing service definition for the
+// given service.
+func (h *HTTPExpr) ServiceFor(s *ServiceExpr) *HTTPServiceExpr {
+ if res := h.Service(s.Name); res != nil {
+ return res
+ }
+ res := &HTTPServiceExpr{
+ ServiceExpr: s,
+ }
+ h.Services = append(h.Services, res)
+ return res
+}
+
+// EvalName returns the name printed in case of evaluation error.
+func (h *HTTPExpr) EvalName() string {
+ return "API HTTP"
+}
+
+// Finalize initializes Consumes and Produces with defaults if not set.
+func (h *HTTPExpr) Finalize() {
+ if len(h.Consumes) == 0 {
+ h.Consumes = []string{"application/json", "application/xml", "application/gob"}
+ }
+ if len(h.Produces) == 0 {
+ h.Produces = []string{"application/json", "application/xml", "application/gob"}
+ }
+}
diff --git a/vendor/goa.design/goa/expr/http_body_types.go b/vendor/goa.design/goa/expr/http_body_types.go
new file mode 100644
index 000000000..baea9c62a
--- /dev/null
+++ b/vendor/goa.design/goa/expr/http_body_types.go
@@ -0,0 +1,329 @@
+package expr
+
+import (
+ "net/http"
+ "strings"
+ "unicode"
+)
+
+// httpRequestBody returns an attribute describing the HTTP request body of the
+// given endpoint. If the DSL defines a body explicitly via the Body function
+// then the corresponding attribute is used. Otherwise the attribute is computed
+// by removing the attributes of the method payload used to define headers and
+// parameters.
+func httpRequestBody(a *HTTPEndpointExpr) *AttributeExpr {
+ if a.Body != nil {
+ return a.Body
+ }
+
+ const suffix = "RequestBody"
+ var (
+ payload = a.MethodExpr.Payload
+ headers = a.Headers
+ params = a.Params
+ name = concat(a.Name(), "Request", "Body")
+ userField string
+ passField string
+ )
+ {
+ obj := AsObject(payload.Type)
+ if obj != nil {
+ for _, at := range *obj {
+ if _, ok := at.Attribute.Meta["security:username"]; ok {
+ userField = at.Name
+ }
+ if _, ok := at.Attribute.Meta["security:password"]; ok {
+ passField = at.Name
+ }
+ if userField != "" && passField != "" {
+ break
+ }
+ }
+ }
+ }
+
+ bodyOnly := headers.IsEmpty() && params.IsEmpty() && a.MapQueryParams == nil
+
+ // 1. If Payload is not an object then check whether there are params or
+ // headers defined and if so return empty type (payload encoded in
+ // request params or headers) otherwise return payload type (payload
+ // encoded in request body).
+ if !IsObject(payload.Type) {
+ if bodyOnly {
+ payload = DupAtt(payload)
+ renameType(payload, name, suffix)
+ return payload
+ }
+ return &AttributeExpr{Type: Empty}
+ }
+
+ // 2. Remove header and param attributes
+ body := NewMappedAttributeExpr(payload)
+ removeAttributes(body, headers)
+ removeAttributes(body, params)
+ if a.MapQueryParams != nil && *a.MapQueryParams != "" {
+ removeAttribute(body, *a.MapQueryParams)
+ }
+ if userField != "" {
+ removeAttribute(body, userField)
+ }
+ if passField != "" {
+ removeAttribute(body, passField)
+ }
+
+ // 3. Return empty type if no attribute left
+ if len(*AsObject(body.Type)) == 0 {
+ return &AttributeExpr{Type: Empty}
+ }
+
+ // 4. Build computed user type
+ att := body.Attribute()
+ ut := &UserTypeExpr{
+ AttributeExpr: att,
+ TypeName: name,
+ }
+ appendSuffix(ut.Attribute().Type, suffix)
+
+ return &AttributeExpr{
+ Type: ut,
+ Validation: att.Validation,
+ UserExamples: att.UserExamples,
+ }
+}
+
+// httpStreamingBody returns an attribute representing the structs being
+// streamed via websocket.
+func httpStreamingBody(e *HTTPEndpointExpr) *AttributeExpr {
+ if !e.MethodExpr.IsStreaming() || e.MethodExpr.Stream == ServerStreamKind {
+ return nil
+ }
+ att := e.MethodExpr.StreamingPayload
+ if !IsObject(att.Type) {
+ return DupAtt(att)
+ }
+ const suffix = "StreamingBody"
+ ut := &UserTypeExpr{
+ AttributeExpr: DupAtt(att),
+ TypeName: concat(e.Name(), "Streaming", "Body"),
+ }
+ appendSuffix(ut.Attribute().Type, suffix)
+
+ return &AttributeExpr{
+ Type: ut,
+ Validation: att.Validation,
+ UserExamples: att.UserExamples,
+ }
+}
+
+// httpResponseBody returns an attribute representing the HTTP response body for
+// the given endpoint and response. If the DSL defines a body explicitly via the
+// Body function then the corresponding attribute is used. Otherwise the
+// attribute is computed by removing the attributes of the method payload used
+// to define headers.
+func httpResponseBody(a *HTTPEndpointExpr, resp *HTTPResponseExpr) *AttributeExpr {
+ var name, suffix string
+ if len(a.Responses) > 1 {
+ suffix = http.StatusText(resp.StatusCode)
+ }
+ name = a.Name() + suffix
+ return buildHTTPResponseBody(name, a.MethodExpr.Result, resp)
+}
+
+// httpErrorResponseBody returns an attribute describing the response body of a
+// given error. If the DSL defines a body explicitly via the Body function then
+// the corresponding attribute is returned. Otherwise the attribute is computed
+// by removing the attributes of the error used to define headers and
+// parameters.
+func httpErrorResponseBody(a *HTTPEndpointExpr, v *HTTPErrorExpr) *AttributeExpr {
+ name := a.Name() + "_" + v.ErrorExpr.Name
+ return buildHTTPResponseBody(name, v.ErrorExpr.AttributeExpr, v.Response)
+}
+
+func buildHTTPResponseBody(name string, attr *AttributeExpr, resp *HTTPResponseExpr) *AttributeExpr {
+ const suffix = "ResponseBody"
+ name = concat(name, "Response", "Body")
+ if attr == nil || attr.Type == Empty {
+ return &AttributeExpr{Type: Empty}
+ }
+ if resp.Body != nil {
+ return resp.Body
+ }
+
+ // 1. If attribute is not an object then check whether there are headers
+ // defined and if so return empty type (attr encoded in response
+ // headers) otherwise return renamed attr type (attr encoded in
+ // response body).
+ if !IsObject(attr.Type) {
+ if resp.Headers.IsEmpty() {
+ attr = DupAtt(attr)
+ renameType(attr, name, "Response") // Do not use ResponseBody as it could clash with name of element
+ return attr
+ }
+ return &AttributeExpr{Type: Empty}
+ }
+
+ // 2. Remove header attributes
+ body := NewMappedAttributeExpr(attr)
+ removeAttributes(body, resp.Headers)
+
+ // 3. Return empty type if no attribute left
+ if len(*AsObject(body.Type)) == 0 {
+ return &AttributeExpr{Type: Empty}
+ }
+
+ // 4. Build computed user type
+ userType := &UserTypeExpr{
+ AttributeExpr: body.Attribute(),
+ TypeName: name,
+ }
+ appendSuffix(userType.Attribute().Type, suffix)
+ rt, isrt := attr.Type.(*ResultTypeExpr)
+ if !isrt {
+ return &AttributeExpr{
+ Type: userType,
+ Validation: userType.Validation,
+ Meta: attr.Meta,
+ }
+ }
+ views := make([]*ViewExpr, len(rt.Views))
+ for i, v := range rt.Views {
+ mv := NewMappedAttributeExpr(v.AttributeExpr)
+ removeAttributes(mv, resp.Headers)
+ nv := &ViewExpr{
+ AttributeExpr: mv.Attribute(),
+ Name: v.Name,
+ }
+ views[i] = nv
+ }
+ nmt := &ResultTypeExpr{
+ UserTypeExpr: userType,
+ Identifier: rt.Identifier,
+ ContentType: rt.ContentType,
+ Views: views,
+ }
+ for _, v := range views {
+ v.Parent = nmt
+ }
+ return &AttributeExpr{
+ Type: nmt,
+ Validation: userType.Validation,
+ Meta: attr.Meta,
+ }
+}
+
+// buildBodyTypeName concatenates the given strings to generate the
+// endpoint's body type name.
+//
+// The concatenation algorithm is:
+// 1) If the first string contains underscores and starts with a lower case,
+// the rest of the strings are converted to lower case and concatenated with
+// underscores.
+// e.g. concat("my_endpoint", "Request", "BODY") => "my_endpoint_request_body"
+// 2) If the first string contains underscores and starts with a upper case,
+// the rest of the strings are converted to title case and concatenated with
+// underscores.
+// e.g. concat("My_endpoint", "response", "body") => "My_endpoint_Response_Body"
+// 3) If the first string is a single word or camelcased, the rest of the
+// strings are concatenated to form a valid upper camelcase.
+// e.g. concat("myEndpoint", "streaming", "Body") => "MyEndpointStreamingBody"
+//
+func concat(strs ...string) string {
+ if len(strs) == 1 {
+ return strs[0]
+ }
+
+ // hasUnderscore returns true if the string has at least one underscore.
+ hasUnderscore := func(str string) bool {
+ for i := 0; i < len(str); i++ {
+ if rune(str[i]) == '_' {
+ return true
+ }
+ }
+ return false
+ }
+
+ // isLower returns true if the first letter in the screen is lower-case.
+ isLower := func(str string) bool {
+ return unicode.IsLower(rune(str[0]))
+ }
+
+ name := strs[0]
+ switch {
+ case isLower(name) && hasUnderscore(name):
+ for i := 1; i < len(strs); i++ {
+ name += "_" + strings.ToLower(strs[i])
+ }
+ case !isLower(name) && hasUnderscore(name):
+ for i := 1; i < len(strs); i++ {
+ name += "_" + strings.Title(strs[i])
+ }
+ default:
+ name = strings.Title(name)
+ for i := 1; i < len(strs); i++ {
+ name += strings.Title(strs[i])
+ }
+ }
+ return name
+}
+
+func renameType(att *AttributeExpr, name, suffix string) {
+ rt := att.Type
+ switch rt.(type) {
+ case UserType:
+ rt.(UserType).Rename(name)
+ appendSuffix(rt.(UserType).Attribute().Type, suffix)
+ case *Object:
+ appendSuffix(rt, suffix)
+ case *Array:
+ appendSuffix(rt, suffix)
+ case *Map:
+ appendSuffix(rt, suffix)
+ }
+}
+
+func appendSuffix(dt DataType, suffix string, seen ...map[string]struct{}) {
+ var s map[string]struct{}
+ if len(seen) > 0 {
+ s = seen[0]
+ } else {
+ s = make(map[string]struct{})
+ seen = append(seen, s)
+ }
+ switch actual := dt.(type) {
+ case UserType:
+ if _, ok := s[actual.ID()]; ok {
+ return
+ }
+ actual.Rename(actual.Name() + suffix)
+ s[actual.ID()] = struct{}{}
+ appendSuffix(actual.Attribute().Type, suffix, seen...)
+ case *Object:
+ for _, nat := range *actual {
+ appendSuffix(nat.Attribute.Type, suffix, seen...)
+ }
+ case *Array:
+ appendSuffix(actual.ElemType.Type, suffix, seen...)
+ case *Map:
+ appendSuffix(actual.KeyType.Type, suffix, seen...)
+ appendSuffix(actual.ElemType.Type, suffix, seen...)
+ }
+}
+
+func removeAttributes(attr, sub *MappedAttributeExpr) {
+ o := AsObject(sub.Type)
+ for _, nat := range *o {
+ removeAttribute(attr, nat.Name)
+ }
+}
+
+func removeAttribute(attr *MappedAttributeExpr, name string) {
+ attr.Delete(name)
+ if attr.Validation != nil {
+ attr.Validation.RemoveRequired(name)
+ }
+ for _, ex := range attr.UserExamples {
+ if m, ok := ex.Value.(map[string]interface{}); ok {
+ delete(m, name)
+ }
+ }
+}
diff --git a/vendor/goa.design/goa/expr/http_endpoint.go b/vendor/goa.design/goa/expr/http_endpoint.go
new file mode 100644
index 000000000..44b88fd61
--- /dev/null
+++ b/vendor/goa.design/goa/expr/http_endpoint.go
@@ -0,0 +1,826 @@
+package expr
+
+import (
+ "fmt"
+ "path"
+ "strings"
+
+ "github.com/dimfeld/httppath"
+ "goa.design/goa/eval"
+)
+
+type (
+ // HTTPEndpointExpr describes a service endpoint. It embeds a
+ // MethodExpr and adds HTTP specific properties.
+ //
+ // It defines both an HTTP endpoint and the shape of HTTP requests and
+ // responses made to that endpoint. The shape of requests is defined via
+ // "parameters", there are path parameters (i.e. portions of the URL
+ // that define parameter values), query string parameters and a payload
+ // parameter (request body).
+ HTTPEndpointExpr struct {
+ eval.DSLFunc
+ // MethodExpr is the underlying method expression.
+ MethodExpr *MethodExpr
+ // Service is the parent service.
+ Service *HTTPServiceExpr
+ // Endpoint routes
+ Routes []*RouteExpr
+ // MapQueryParams - when not nil - indicates that the HTTP
+ // request query string parameters are used to build a map.
+ // - If the value is the empty string then the map is stored
+ // in the method payload (which must be of type Map)
+ // - If the value is a non-empty string then the map is
+ // stored in the payload attribute with the corresponding
+ // name (which must of be of type Map)
+ MapQueryParams *string
+ // Params defines the HTTP request path and query parameters.
+ Params *MappedAttributeExpr
+ // Headers defines the HTTP request headers.
+ Headers *MappedAttributeExpr
+ // Body describes the HTTP request body.
+ Body *AttributeExpr
+ // StreamingBody describes the body transferred through the websocket
+ // stream.
+ StreamingBody *AttributeExpr
+ // Responses is the list of all the possible success HTTP
+ // responses.
+ Responses []*HTTPResponseExpr
+ // HTTPErrors is the list of all the possible error HTTP
+ // responses.
+ HTTPErrors []*HTTPErrorExpr
+ // Requirements contains the security requirements for the HTTP endpoint.
+ Requirements []*SecurityExpr
+ // MultipartRequest indicates that the request content type for
+ // the endpoint is a multipart type.
+ MultipartRequest bool
+ // Meta is a set of key/value pairs with semantic that is
+ // specific to each generator, see dsl.Meta.
+ Meta MetaExpr
+ }
+
+ // RouteExpr represents an endpoint route (HTTP endpoint).
+ RouteExpr struct {
+ // Method is the HTTP method, e.g. "GET", "POST", etc.
+ Method string
+ // Path is the URL path e.g. "/tasks/{id}"
+ Path string
+ // Endpoint is the endpoint this route applies to.
+ Endpoint *HTTPEndpointExpr
+ // Meta is an arbitrary set of key/value pairs, see
+ // dsl.Meta
+ Meta MetaExpr
+ }
+)
+
+// ExtractRouteWildcards returns the names of the wildcards that appear in path.
+func ExtractRouteWildcards(path string) []string {
+ matches := HTTPWildcardRegex.FindAllStringSubmatch(path, -1)
+ wcs := make([]string, len(matches))
+ for i, m := range matches {
+ wcs[i] = m[1]
+ }
+ return wcs
+}
+
+// Name of HTTP endpoint
+func (e *HTTPEndpointExpr) Name() string {
+ return e.MethodExpr.Name
+}
+
+// Description of HTTP endpoint
+func (e *HTTPEndpointExpr) Description() string {
+ return e.MethodExpr.Description
+}
+
+// EvalName returns the generic expression name used in error messages.
+func (e *HTTPEndpointExpr) EvalName() string {
+ var prefix, suffix string
+ if e.Name() != "" {
+ suffix = fmt.Sprintf("HTTP endpoint %#v", e.Name())
+ } else {
+ suffix = "unnamed HTTP endpoint"
+ }
+ if e.Service != nil {
+ prefix = e.Service.EvalName() + " "
+ }
+ return prefix + suffix
+}
+
+// HasAbsoluteRoutes returns true if all the endpoint routes are absolute.
+func (e *HTTPEndpointExpr) HasAbsoluteRoutes() bool {
+ for _, r := range e.Routes {
+ if !r.IsAbsolute() {
+ return false
+ }
+ }
+ return true
+}
+
+// PathParams computes a mapped attribute containing the subset of e.Params that
+// describe path parameters.
+func (e *HTTPEndpointExpr) PathParams() *MappedAttributeExpr {
+ obj := Object{}
+ v := &ValidationExpr{}
+ pat := e.Params.Attribute() // need "attribute:name" style keys
+ for _, r := range e.Routes {
+ for _, p := range r.Params() {
+ att := pat.Find(p)
+ obj.Set(p, att)
+ if e.Params.IsRequired(p) {
+ v.AddRequired(p)
+ }
+ }
+ }
+ at := &AttributeExpr{Type: &obj, Validation: v}
+ return NewMappedAttributeExpr(at)
+}
+
+// QueryParams computes a mapped attribute containing the subset of e.Params
+// that describe query parameters.
+func (e *HTTPEndpointExpr) QueryParams() *MappedAttributeExpr {
+ obj := Object{}
+ v := &ValidationExpr{}
+ pp := make(map[string]struct{})
+ for _, r := range e.Routes {
+ for _, p := range r.Params() {
+ pp[p] = struct{}{}
+ }
+ }
+ pat := e.Params.Attribute() // need "attribute:name" style keys
+ for _, at := range *(pat.Type.(*Object)) {
+ found := false
+ for n := range pp {
+ if n == at.Name {
+ found = true
+ break
+ }
+ }
+ if !found {
+ obj.Set(at.Name, at.Attribute)
+ // when looking for required attributes we need the unmapped keys
+ // (i.e. without the "attribute:name" syntax)
+ attName := strings.Split(at.Name, ":")[0]
+ if e.Params.IsRequired(attName) {
+ v.AddRequired(attName)
+ }
+ }
+ }
+ at := &AttributeExpr{Type: &obj, Validation: v}
+ return NewMappedAttributeExpr(at)
+}
+
+// Prepare computes the request path and query string parameters as well as the
+// headers and body taking into account the inherited values from the service.
+func (e *HTTPEndpointExpr) Prepare() {
+ // Inherit headers and params from parent service and API
+ headers := NewEmptyMappedAttributeExpr()
+ headers.Merge(Root.API.HTTP.Headers)
+ headers.Merge(e.Service.Headers)
+
+ params := NewEmptyMappedAttributeExpr()
+ params.Merge(Root.API.HTTP.Params)
+ params.Merge(e.Service.Params)
+
+ if p := e.Service.Parent(); p != nil {
+ if c := p.CanonicalEndpoint(); c != nil {
+ if !e.HasAbsoluteRoutes() {
+ headers.Merge(c.Headers)
+ params.Merge(c.Params)
+ }
+ }
+ }
+ headers.Merge(e.Headers)
+ params.Merge(e.Params)
+
+ e.Headers = headers
+ e.Params = params
+
+ // Initialize path params that are not defined explicitly in
+ for _, r := range e.Routes {
+ for _, p := range r.Params() {
+ if a := params.Find(p); a == nil {
+ params.Merge(NewMappedAttributeExpr(&AttributeExpr{
+ Type: &Object{
+ &NamedAttributeExpr{
+ Name: p,
+ Attribute: &AttributeExpr{Type: String},
+ },
+ },
+ }))
+ }
+ }
+ }
+
+ // Make sure there's a default response if none define explicitly
+ if len(e.Responses) == 0 {
+ status := StatusOK
+ if e.MethodExpr.Payload.Type == Empty {
+ status = StatusNoContent
+ }
+ e.Responses = []*HTTPResponseExpr{{StatusCode: status}}
+ }
+
+ // Inherit HTTP errors from service
+ for _, r := range e.Service.HTTPErrors {
+ e.HTTPErrors = append(e.HTTPErrors, r.Dup())
+ }
+
+ // Prepare responses
+ for _, r := range e.Responses {
+ r.Prepare()
+ }
+ for _, er := range e.HTTPErrors {
+ er.Response.Prepare()
+ }
+}
+
+// Validate validates the endpoint expression.
+func (e *HTTPEndpointExpr) Validate() error {
+ verr := new(eval.ValidationErrors)
+
+ // Name cannot be empty
+ if e.Name() == "" {
+ verr.Add(e, "Endpoint name cannot be empty")
+ }
+
+ // Validate routes
+
+ // Routes cannot be empty
+ if len(e.Routes) == 0 {
+ verr.Add(e, "No route defined for HTTP endpoint")
+ } else {
+ for _, r := range e.Routes {
+ verr.Merge(r.Validate())
+ }
+ // Make sure that the same parameters are used in all routes
+ params := e.Routes[0].Params()
+ for _, r := range e.Routes[1:] {
+ for _, p := range params {
+ found := false
+ for _, p2 := range r.Params() {
+ if p == p2 {
+ found = true
+ break
+ }
+ }
+ if !found {
+ verr.Add(e, "Param %q does not appear in all routes", p)
+ }
+ }
+ for _, p2 := range r.Params() {
+ found := false
+ for _, p := range params {
+ if p == p2 {
+ found = true
+ break
+ }
+ }
+ if !found {
+ verr.Add(e, "Param %q does not appear in all routes", p2)
+ }
+ }
+ }
+ }
+
+ // Validate responses
+
+ // All responses but one must have tags for the same status code
+ hasTags := false
+ allTagged := true
+ successResp := false
+ for i, r := range e.Responses {
+ verr.Merge(r.Validate(e))
+ for j, r2 := range e.Responses {
+ if i != j && r.StatusCode == r2.StatusCode {
+ verr.Add(r, "Multiple response definitions with status code %d", r.StatusCode)
+ }
+ }
+ if r.Tag[0] == "" {
+ allTagged = false
+ } else {
+ hasTags = true
+ }
+ if r.StatusCode < 400 && e.MethodExpr.Stream == ServerStreamKind {
+ if successResp {
+ verr.Add(r, "Multiple success response defined for a streaming endpoint. At most one success response can be defined for a streaming endpoint.")
+ } else {
+ successResp = true
+ }
+ if r.Body != nil && r.Body.Type == Empty {
+ verr.Add(r, "Response body is empty but the endpoint uses streaming result. Response body cannot be empty for a success response if endpoint defines streaming result.")
+ }
+ }
+ }
+ if hasTags && allTagged {
+ verr.Add(e, "All responses define a Tag, at least one response must define no Tag.")
+ }
+ if hasTags && !IsObject(e.MethodExpr.Result.Type) {
+ verr.Add(e, "Some responses define a Tag but the method Result type is not an object.")
+ }
+
+ // Make sure parameters and headers use compatible types
+ verr.Merge(e.validateParams())
+ verr.Merge(e.validateHeaders())
+
+ // Validate body attribute (required fields exist etc.)
+ if e.Body != nil {
+ verr.Merge(e.Body.Validate("HTTP endpoint payload", e))
+ }
+
+ // Validate errors
+ for _, er := range e.HTTPErrors {
+ verr.Merge(er.Validate())
+ }
+
+ // Validate definitions of params, headers and bodies against definition of payload
+ if isEmpty(e.MethodExpr.Payload) {
+ if e.MapQueryParams != nil {
+ verr.Add(e, "MapParams is set but Payload is not defined")
+ }
+ if e.MultipartRequest {
+ verr.Add(e, "MultipartRequest is set but Payload is not defined")
+ }
+ if !e.Params.IsEmpty() {
+ verr.Add(e, "Params are set but Payload is not defined.")
+ }
+ if !e.Headers.IsEmpty() {
+ verr.Add(e, "Headers are set but Payload is not defined.")
+ }
+ return verr
+ }
+ if IsArray(e.MethodExpr.Payload.Type) {
+ if e.MapQueryParams != nil {
+ verr.Add(e, "MapParams is set but Payload type is array. Payload type must be map or an object with a map attribute")
+ }
+ var hasParams, hasHeaders bool
+ if !e.Params.IsEmpty() {
+ if e.MultipartRequest {
+ verr.Add(e, "Payload type is array but HTTP endpoint defines MultipartRequest and route/query string parameters. At most one of these must be defined.")
+ }
+ hasParams = true
+ }
+ if !e.Headers.IsEmpty() {
+ if e.MultipartRequest {
+ verr.Add(e, "Payload type is array but HTTP endpoint defines MultipartRequest and headers. At most one of these must be defined.")
+ }
+ hasHeaders = true
+ if hasParams {
+ verr.Add(e, "Payload type is array but HTTP endpoint defines both route or query string parameters and headers. At most one parameter or header must be defined and it must be of type array.")
+ }
+ }
+ if e.Body != nil && e.Body.Type != Empty {
+ if e.MultipartRequest {
+ verr.Add(e, "Payload type is array but HTTP endpoint defines MultipartRequest and body. At most one of these must be defined.")
+ }
+ if !IsArray(e.Body.Type) {
+ verr.Add(e, "Payload type is array but HTTP endpoint body is not.")
+ }
+ if hasParams {
+ verr.Add(e, "Payload type is array but HTTP endpoint defines both a body and route or query string parameters. At most one of these must be defined and it must be an array.")
+ }
+ if hasHeaders {
+ verr.Add(e, "Payload type is array but HTTP endpoint defines both a body and headers. At most one of these must be defined and it must be an array.")
+ }
+ }
+ }
+
+ if pMap := AsMap(e.MethodExpr.Payload.Type); pMap != nil {
+ if e.MapQueryParams != nil {
+ if e.MultipartRequest {
+ verr.Add(e, "Payload type is map but HTTP endpoint defines MultipartRequest and MapParams. At most one of these must be defined.")
+ }
+ if *e.MapQueryParams != "" {
+ verr.Add(e, "MapParams is set to an attribute in the Payload but Payload is a map. Payload must be an object with an attribute of map type")
+ }
+ if !IsPrimitive(pMap.KeyType.Type) {
+ verr.Add(e, "MapParams is set and Payload type is map. But payload key type must be a primitive")
+ }
+ if !IsPrimitive(pMap.ElemType.Type) && !IsArray(pMap.ElemType.Type) {
+ verr.Add(e, "MapParams is set and Payload type is map. But payload element type must be a primitive or array")
+ }
+ if IsArray(pMap.ElemType.Type) && !IsPrimitive(AsArray(pMap.ElemType.Type).ElemType.Type) {
+ verr.Add(e, "MapParams is set and Payload type is map. But array elements in payload element type must be primitive")
+ }
+ }
+ var hasParams bool
+ if !e.Params.IsEmpty() {
+ if e.MultipartRequest {
+ verr.Add(e, "Payload type is map but HTTP endpoint defines MultipartRequest and route/query string parameters. At most one of these must be defined.")
+ }
+ hasParams = true
+ }
+ if e.Body != nil && e.Body.Type != Empty {
+ if e.MultipartRequest {
+ verr.Add(e, "Payload type is map but HTTP endpoint defines MultipartRequest and body. At most one of these must be defined.")
+ }
+ if !IsMap(e.Body.Type) {
+ verr.Add(e, "Payload type is map but HTTP endpoint body is not.")
+ }
+ if hasParams {
+ verr.Add(e, "Payload type is map but HTTP endpoint defines both a body and route or query string parameters. At most one of these must be defined and it must be a map.")
+ }
+ }
+ }
+
+ if IsObject(e.MethodExpr.Payload.Type) {
+ if e.MapQueryParams != nil {
+ if pAttr := *e.MapQueryParams; pAttr == "" {
+ verr.Add(e, "MapParams is set to map entire payload but payload is an object. Payload must be a map.")
+ } else if e.MethodExpr.Payload.Find(pAttr) == nil {
+ verr.Add(e, "MapParams is set to an attribute in Payload. But payload has no attribute with type map and name %s", pAttr)
+ }
+ }
+ if e.Body != nil {
+ if e.MultipartRequest {
+ verr.Add(e, "HTTP endpoint defines MultipartRequest and body. At most one of these must be defined.")
+ }
+ if bObj := AsObject(e.Body.Type); bObj != nil {
+ var props []string
+ props, ok := e.Body.Meta["origin:attribute"]
+ if !ok {
+ for _, nat := range *bObj {
+ name := strings.Split(nat.Name, ":")[0]
+ props = append(props, name)
+ }
+ }
+ for _, prop := range props {
+ if e.MethodExpr.Payload.Find(prop) == nil {
+ verr.Add(e, "Body %q is not found in Payload.", prop)
+ }
+ }
+ }
+ }
+ }
+
+ return verr
+}
+
+// Finalize is run post DSL execution. It merges response definitions, creates
+// implicit endpoint parameters and initializes querystring parameters. It also
+// flattens the error responses and makes sure the error types are all user
+// types so that the response encoding code can properly use the type to infer
+// the response that it needs to build.
+func (e *HTTPEndpointExpr) Finalize() {
+ payload := AsObject(e.MethodExpr.Payload.Type)
+
+ // Initialize Authorization header implicitly defined via security DSL
+ // prior to computing headers and body.
+ if reqLen := len(e.MethodExpr.Requirements); reqLen > 0 {
+ e.Requirements = make([]*SecurityExpr, 0, reqLen)
+ for _, req := range e.MethodExpr.Requirements {
+ dupReq := DupRequirement(req)
+ for _, sch := range dupReq.Schemes {
+ var field string
+ switch sch.Kind {
+ case BasicAuthKind, NoKind:
+ continue
+ case APIKeyKind:
+ field = TaggedAttribute(e.MethodExpr.Payload, "security:apikey:"+sch.SchemeName)
+ case JWTKind:
+ field = TaggedAttribute(e.MethodExpr.Payload, "security:token")
+ case OAuth2Kind:
+ field = TaggedAttribute(e.MethodExpr.Payload, "security:accesstoken")
+ }
+ sch.Name, sch.In = findKey(e, field)
+ if sch.Name == "" {
+ sch.Name = "Authorization"
+ attr := e.MethodExpr.Payload.Find(field)
+ e.Headers.Type.(*Object).Set(field, attr)
+ e.Headers.Map(sch.Name, field)
+ if e.MethodExpr.Payload.IsRequired(field) {
+ if e.Headers.Validation == nil {
+ e.Headers.Validation = &ValidationExpr{}
+ }
+ e.Headers.Validation.AddRequired(field)
+ }
+ }
+ }
+ e.Requirements = append(e.Requirements, dupReq)
+ }
+ }
+
+ // Initialize the HTTP specific attributes with the corresponding
+ // payload attributes.
+ init := func(ma *MappedAttributeExpr) {
+ for _, nat := range *AsObject(ma.Type) {
+ var patt *AttributeExpr
+ var required bool
+ if payload != nil {
+ patt = payload.Attribute(nat.Name)
+ required = e.MethodExpr.Payload.IsRequired(nat.Name)
+ } else {
+ patt = e.MethodExpr.Payload
+ required = true
+ }
+ initAttrFromDesign(nat.Attribute, patt)
+ if required {
+ if ma.Validation == nil {
+ ma.Validation = &ValidationExpr{}
+ }
+ ma.Validation.AddRequired(nat.Name)
+ }
+ }
+ }
+ init(e.Params)
+ init(e.Headers)
+
+ if e.Body != nil {
+ e.Body.Finalize()
+ }
+
+ if e.Body == nil {
+ // No explicit body, compute it
+ e.Body = httpRequestBody(e)
+ }
+
+ e.StreamingBody = httpStreamingBody(e)
+
+ // Initialize responses parent, headers and body
+ for _, r := range e.Responses {
+ r.Finalize(e, e.MethodExpr.Result)
+ if r.Body == nil {
+ r.Body = httpResponseBody(e, r)
+ }
+ }
+
+ // Lookup undefined HTTP errors in API.
+ for _, err := range e.MethodExpr.Errors {
+ found := false
+ for _, herr := range e.HTTPErrors {
+ if err.Name == herr.Name {
+ found = true
+ break
+ }
+ }
+ if !found {
+ for _, herr := range Root.API.HTTP.Errors {
+ if herr.Name == err.Name {
+ e.HTTPErrors = append(e.HTTPErrors, herr.Dup())
+ }
+ }
+ }
+ }
+
+ // Make sure all error types are user types and have a body.
+ for _, herr := range e.HTTPErrors {
+ herr.Finalize(e)
+ }
+}
+
+// validateParams checks the endpoint parameters are of an allowed type and the
+// method payload contains the parameters.
+func (e *HTTPEndpointExpr) validateParams() *eval.ValidationErrors {
+ if e.Params.IsEmpty() {
+ return nil
+ }
+
+ var (
+ pparams = *AsObject(e.PathParams().Type)
+ qparams = *AsObject(e.QueryParams().Type)
+ )
+ verr := new(eval.ValidationErrors)
+ for _, nat := range pparams {
+ if IsObject(nat.Attribute.Type) {
+ verr.Add(e, "path parameter %s cannot be an object, path parameter types must be primitive, array or map (query string only)", nat.Name)
+ } else if IsMap(nat.Attribute.Type) {
+ verr.Add(e, "path parameter %s cannot be a map, path parameter types must be primitive or array", nat.Name)
+ } else if arr := AsArray(nat.Attribute.Type); arr != nil {
+ if !IsPrimitive(arr.ElemType.Type) {
+ verr.Add(e, "elements of array path parameter %s must be primitive", nat.Name)
+ }
+ } else {
+ ctx := fmt.Sprintf("path parameter %s", nat.Name)
+ verr.Merge(nat.Attribute.Validate(ctx, e))
+ }
+ }
+ for _, nat := range qparams {
+ if IsObject(nat.Attribute.Type) {
+ verr.Add(e, "query parameter %s cannot be an object, query parameter types must be primitive, array or map (query string only)", nat.Name)
+ } else if arr := AsArray(nat.Attribute.Type); arr != nil {
+ if !IsPrimitive(arr.ElemType.Type) {
+ verr.Add(e, "elements of array query parameter %s must be primitive", nat.Name)
+ }
+ } else {
+ ctx := fmt.Sprintf("query parameter %s", nat.Name)
+ verr.Merge(nat.Attribute.Validate(ctx, e))
+ }
+ }
+ if e.MethodExpr.Payload != nil {
+ switch e.MethodExpr.Payload.Type.(type) {
+ case *Object:
+ for _, nat := range pparams {
+ name := strings.Split(nat.Name, ":")[0]
+ if e.MethodExpr.Payload.Find(name) == nil {
+ verr.Add(e, "Path parameter %q not found in payload.", nat.Name)
+ }
+ }
+ for _, nat := range qparams {
+ name := strings.Split(nat.Name, ":")[0]
+ if e.MethodExpr.Payload.Find(name) == nil {
+ verr.Add(e, "Querys string parameter %q not found in payload.", nat.Name)
+ }
+ }
+ case *Array:
+ if len(pparams)+len(qparams) > 1 {
+ verr.Add(e, "Payload type is array but HTTP endpoint defines multiple parameters. At most one parameter must be defined and it must be an array.")
+ }
+ case *Map:
+ if len(pparams)+len(qparams) > 1 {
+ verr.Add(e, "Payload type is map but HTTP endpoint defines multiple parameters. At most one query string parameter must be defined and it must be a map.")
+ }
+ }
+ }
+ return verr
+}
+
+// validateHeaders makes sure headers are of an allowed type and the method
+// payload contains the headers.
+func (e *HTTPEndpointExpr) validateHeaders() *eval.ValidationErrors {
+ headers := AsObject(e.Headers.Type)
+ if len(*headers) == 0 {
+ return nil
+ }
+ verr := new(eval.ValidationErrors)
+ for _, nat := range *headers {
+ if IsObject(nat.Attribute.Type) {
+ verr.Add(e, "header %s cannot be an object, header type must be primitive or array", nat.Name)
+ } else if arr := AsArray(nat.Attribute.Type); arr != nil {
+ if !IsPrimitive(arr.ElemType.Type) {
+ verr.Add(e, "elements of array header %s must be primitive", nat.Name)
+ }
+ } else {
+ ctx := fmt.Sprintf("header %s", nat.Name)
+ verr.Merge(nat.Attribute.Validate(ctx, e))
+ }
+ }
+ if e.MethodExpr.Payload == nil {
+ if len(*headers) > 0 {
+ verr.Add(e, "Headers are defined but Payload is not defined")
+ }
+ } else {
+ switch e.MethodExpr.Payload.Type.(type) {
+ case *Object:
+ for _, nat := range *headers {
+ name := strings.Split(nat.Name, ":")[0]
+ if e.MethodExpr.Payload.Find(name) == nil {
+ verr.Add(e, "header %q is not found in payload.", nat.Name)
+ }
+ }
+ case *Array:
+ if len(*headers) > 1 {
+ verr.Add(e, "Payload type is array but HTTP endpoint defines multiple headers. At most one header must be defined and it must be an array.")
+ }
+ case *Map:
+ if len(*headers) > 0 {
+ verr.Add(e, "Payload type is map but HTTP endpoint defines headers. Map payloads can only be decoded from HTTP request bodies or query strings.")
+ }
+ }
+ }
+ return verr
+}
+
+// EvalName returns the generic definition name used in error messages.
+func (r *RouteExpr) EvalName() string {
+ return fmt.Sprintf(`route %s "%s" of %s`, r.Method, r.Path, r.Endpoint.EvalName())
+}
+
+// Validate validates a route expression by ensuring that the route parameters
+// can be inferred from the method payload and there is no duplicate parameters
+// in an absolute route.
+func (r *RouteExpr) Validate() *eval.ValidationErrors {
+ verr := new(eval.ValidationErrors)
+
+ // Make sure route params are defined in the method payload
+ if rparams := r.Params(); len(rparams) > 0 {
+ if r.Endpoint.MethodExpr.Payload == nil {
+ verr.Add(r, "Route parameters are defined, but method payload is not defined.")
+ } else {
+ switch r.Endpoint.MethodExpr.Payload.Type.(type) {
+ case *Map:
+ verr.Add(r, "Route parameters are defined, but method payload is a map. Method payload must be a primitive or an object.")
+ case *Object:
+ for _, p := range rparams {
+ if r.Endpoint.MethodExpr.Payload.Find(p) == nil {
+ verr.Add(r, "Route param %q not found in method payload", p)
+ }
+ }
+ }
+ if len(rparams) > 1 && IsPrimitive(r.Endpoint.MethodExpr.Payload.Type) {
+ verr.Add(r, "Multiple route parameters are defined, but method payload is a primitive. Only one router parameter can be defined if payload is primitive.")
+ }
+ }
+ }
+
+ // Make sure there's no duplicate params in absolute route
+ paths := r.FullPaths()
+ for _, path := range paths {
+ matches := HTTPWildcardRegex.FindAllStringSubmatch(path, -1)
+ wcs := make(map[string]struct{}, len(matches))
+ for _, match := range matches {
+ if _, ok := wcs[match[1]]; ok {
+ verr.Add(r, "Wildcard %q appears multiple times in full path %q", match[1], path)
+ }
+ wcs[match[1]] = struct{}{}
+ }
+ }
+
+ // For streaming endpoints, websockets does not support verbs other than GET
+ if r.Endpoint.MethodExpr.IsStreaming() {
+ if r.Method != "GET" {
+ verr.Add(r, "Streaming endpoint supports only \"GET\" method. Got %q.", r.Method)
+ }
+ }
+ return verr
+}
+
+// Params returns all the route parameters across all the base paths. For
+// example for the route "GET /foo/{fooID:foo_id}" Params returns
+// []string{"fooID:foo_id"}.
+func (r *RouteExpr) Params() []string {
+ paths := r.FullPaths()
+ var res []string
+ for _, p := range paths {
+ ws := ExtractWildcards(p)
+ for _, w := range ws {
+ found := false
+ for _, r := range res {
+ if r == w {
+ found = true
+ break
+ }
+ }
+ if !found {
+ res = append(res, w)
+ }
+ }
+ }
+ return res
+}
+
+// FullPaths returns the endpoint full paths computed by concatenating the
+// service base paths with the route specific path.
+func (r *RouteExpr) FullPaths() []string {
+ if r.IsAbsolute() {
+ return []string{httppath.Clean(r.Path[1:])}
+ }
+ bases := r.Endpoint.Service.FullPaths()
+ res := make([]string, len(bases))
+ for i, b := range bases {
+ res[i] = httppath.Clean(path.Join(b, r.Path))
+ }
+ return res
+}
+
+// IsAbsolute returns true if the endpoint path should not be concatenated to
+// the service and API base paths.
+func (r *RouteExpr) IsAbsolute() bool {
+ return strings.HasPrefix(r.Path, "//")
+}
+
+// initAttrFromDesign overrides the type of att with the one of patt and
+// initializes other non-initialized fields of att with the one of patt except
+// Meta.
+func initAttrFromDesign(att, patt *AttributeExpr) {
+ if patt == nil || patt.Type == Empty {
+ return
+ }
+ att.Type = patt.Type
+ if att.Description == "" {
+ att.Description = patt.Description
+ }
+ if att.Docs == nil {
+ att.Docs = patt.Docs
+ }
+ if att.Validation == nil {
+ att.Validation = patt.Validation
+ }
+ if att.DefaultValue == nil {
+ att.DefaultValue = patt.DefaultValue
+ }
+ if att.UserExamples == nil {
+ att.UserExamples = patt.UserExamples
+ }
+ if att.DefaultValue == nil {
+ att.DefaultValue = patt.DefaultValue
+ }
+ if att.Meta == nil {
+ att.Meta = patt.Meta
+ }
+}
+
+// isEmpty returns true if an attribute is Empty type and it has no bases and
+// references, or if an attribute is an empty object.
+func isEmpty(a *AttributeExpr) bool {
+ if a.Type == Empty {
+ if len(a.Bases) != 0 || len(a.References) != 0 {
+ return false
+ }
+ return true
+ }
+ obj := AsObject(a.Type)
+ if obj != nil {
+ return len(*obj) == 0
+ }
+ return false
+}
diff --git a/vendor/goa.design/goa/expr/http_error.go b/vendor/goa.design/goa/expr/http_error.go
new file mode 100644
index 000000000..65c9da431
--- /dev/null
+++ b/vendor/goa.design/goa/expr/http_error.go
@@ -0,0 +1,85 @@
+package expr
+
+import (
+ "goa.design/goa/eval"
+)
+
+type (
+ // HTTPErrorExpr defines a HTTP error response including its name,
+ // status, headers and result type.
+ HTTPErrorExpr struct {
+ // ErrorExpr is the underlying goa design error expression.
+ *ErrorExpr
+ // Name of error, we need a separate copy of the name to match it
+ // up with the appropriate ErrorExpr.
+ Name string
+ // Response is the corresponding HTTP response.
+ Response *HTTPResponseExpr
+ }
+)
+
+// EvalName returns the generic definition name used in error messages.
+func (e *HTTPErrorExpr) EvalName() string {
+ return "HTTP error " + e.Name
+}
+
+// Validate makes sure there is a error expression that matches the HTTP error
+// expression.
+func (e *HTTPErrorExpr) Validate() *eval.ValidationErrors {
+ verr := new(eval.ValidationErrors)
+ switch p := e.Response.Parent.(type) {
+ case *HTTPEndpointExpr:
+ if p.MethodExpr.Error(e.Name) == nil {
+ verr.Add(e, "Error %#v does not match an error defined in the method", e.Name)
+ }
+ case *HTTPServiceExpr:
+ if p.Error(e.Name) == nil {
+ verr.Add(e, "Error %#v does not match an error defined in the service", e.Name)
+ }
+ case *RootExpr:
+ if Root.Error(e.Name) == nil {
+ verr.Add(e, "Error %#v does not match an error defined in the API", e.Name)
+ }
+ }
+ return verr
+}
+
+// Finalize looks up the corresponding method error expression.
+func (e *HTTPErrorExpr) Finalize(a *HTTPEndpointExpr) {
+ var ee *ErrorExpr
+ switch p := e.Response.Parent.(type) {
+ case *HTTPEndpointExpr:
+ ee = p.MethodExpr.Error(e.Name)
+ case *HTTPServiceExpr:
+ ee = p.Error(e.Name)
+ case *RootExpr:
+ ee = Root.Error(e.Name)
+ }
+ e.ErrorExpr = ee
+ e.Response.Finalize(a, e.AttributeExpr)
+ if e.Response.Body == nil {
+ e.Response.Body = httpErrorResponseBody(a, e)
+ }
+
+ // Initialize response content type if result is media type.
+ if e.Response.Body.Type == Empty {
+ return
+ }
+ if e.Response.ContentType != "" {
+ return
+ }
+ mt, ok := e.Response.Body.Type.(*ResultTypeExpr)
+ if !ok {
+ return
+ }
+ e.Response.ContentType = mt.Identifier
+}
+
+// Dup creates a copy of the error expression.
+func (e *HTTPErrorExpr) Dup() *HTTPErrorExpr {
+ return &HTTPErrorExpr{
+ ErrorExpr: e.ErrorExpr,
+ Name: e.Name,
+ Response: e.Response.Dup(),
+ }
+}
diff --git a/vendor/goa.design/goa/expr/http_file_server.go b/vendor/goa.design/goa/expr/http_file_server.go
new file mode 100644
index 000000000..ba234ce8c
--- /dev/null
+++ b/vendor/goa.design/goa/expr/http_file_server.go
@@ -0,0 +1,59 @@
+package expr
+
+import (
+ "fmt"
+ "path"
+ "strings"
+)
+
+type (
+ // HTTPFileServerExpr defines an endpoint that serves static assets
+ // through HTTP.
+ HTTPFileServerExpr struct {
+ // Service is the parent service.
+ Service *HTTPServiceExpr
+ // Description for docs
+ Description string
+ // Docs points to the service external documentation
+ Docs *DocsExpr
+ // FilePath is the file path to the static asset(s)
+ FilePath string
+ // RequestPaths is the list of HTTP paths that serve the assets.
+ RequestPaths []string
+ // Meta is a list of key/value pairs
+ Meta MetaExpr
+ }
+)
+
+// EvalName returns the generic definition name used in error messages.
+func (f *HTTPFileServerExpr) EvalName() string {
+ suffix := fmt.Sprintf("file server %s", f.FilePath)
+ var prefix string
+ if f.Service != nil {
+ prefix = f.Service.EvalName() + " "
+ }
+ return prefix + suffix
+}
+
+// Finalize normalizes the request path.
+func (f *HTTPFileServerExpr) Finalize() {
+ current := f.RequestPaths[0]
+ paths := f.Service.Paths
+ if len(paths) == 0 {
+ paths = []string{"/"}
+ }
+ f.RequestPaths = make([]string, len(paths))
+ for i, sp := range paths {
+ p := path.Join(Root.API.HTTP.Path, sp, current)
+ // Make sure request path starts with a "/" so codegen can rely on it.
+ if !strings.HasPrefix(p, "/") {
+ p = "/" + p
+ }
+ f.RequestPaths[i] = p
+ }
+}
+
+// IsDir returns true if the file server serves a directory, false otherwise.
+func (f *HTTPFileServerExpr) IsDir() bool {
+ return HTTPWildcardRegex.MatchString(f.RequestPaths[0])
+}
diff --git a/vendor/goa.design/goa/expr/http_response.go b/vendor/goa.design/goa/expr/http_response.go
new file mode 100644
index 000000000..f6d5ebf48
--- /dev/null
+++ b/vendor/goa.design/goa/expr/http_response.go
@@ -0,0 +1,271 @@
+package expr
+
+import (
+ "fmt"
+ "strings"
+
+ "goa.design/goa/eval"
+)
+
+const (
+ StatusContinue = 100 // RFC 7231, 6.2.1
+ StatusSwitchingProtocols = 101 // RFC 7231, 6.2.2
+ StatusProcessing = 102 // RFC 2518, 10.1
+
+ StatusOK = 200 // RFC 7231, 6.3.1
+ StatusCreated = 201 // RFC 7231, 6.3.2
+ StatusAccepted = 202 // RFC 7231, 6.3.3
+ StatusNonAuthoritativeInfo = 203 // RFC 7231, 6.3.4
+ StatusNoContent = 204 // RFC 7231, 6.3.5
+ StatusResetContent = 205 // RFC 7231, 6.3.6
+ StatusPartialContent = 206 // RFC 7233, 4.1
+ StatusMultiStatus = 207 // RFC 4918, 11.1
+ StatusAlreadyReported = 208 // RFC 5842, 7.1
+ StatusIMUsed = 226 // RFC 3229, 10.4.1
+
+ StatusMultipleChoices = 300 // RFC 7231, 6.4.1
+ StatusMovedPermanently = 301 // RFC 7231, 6.4.2
+ StatusFound = 302 // RFC 7231, 6.4.3
+ StatusSeeOther = 303 // RFC 7231, 6.4.4
+ StatusNotModified = 304 // RFC 7232, 4.1
+ StatusUseProxy = 305 // RFC 7231, 6.4.5
+
+ StatusTemporaryRedirect = 307 // RFC 7231, 6.4.7
+ StatusPermanentRedirect = 308 // RFC 7538, 3
+
+ StatusBadRequest = 400 // RFC 7231, 6.5.1
+ StatusUnauthorized = 401 // RFC 7235, 3.1
+ StatusPaymentRequired = 402 // RFC 7231, 6.5.2
+ StatusForbidden = 403 // RFC 7231, 6.5.3
+ StatusNotFound = 404 // RFC 7231, 6.5.4
+ StatusMethodNotAllowed = 405 // RFC 7231, 6.5.5
+ StatusNotAcceptable = 406 // RFC 7231, 6.5.6
+ StatusProxyAuthRequired = 407 // RFC 7235, 3.2
+ StatusRequestTimeout = 408 // RFC 7231, 6.5.7
+ StatusConflict = 409 // RFC 7231, 6.5.8
+ StatusGone = 410 // RFC 7231, 6.5.9
+ StatusLengthRequired = 411 // RFC 7231, 6.5.10
+ StatusPreconditionFailed = 412 // RFC 7232, 4.2
+ StatusRequestEntityTooLarge = 413 // RFC 7231, 6.5.11
+ StatusRequestURITooLong = 414 // RFC 7231, 6.5.12
+ StatusUnsupportedResultType = 415 // RFC 7231, 6.5.13
+ StatusRequestedRangeNotSatisfiable = 416 // RFC 7233, 4.4
+ StatusExpectationFailed = 417 // RFC 7231, 6.5.14
+ StatusTeapot = 418 // RFC 7168, 2.3.3
+ StatusUnprocessableEntity = 422 // RFC 4918, 11.2
+ StatusLocked = 423 // RFC 4918, 11.3
+ StatusFailedDependency = 424 // RFC 4918, 11.4
+ StatusUpgradeRequired = 426 // RFC 7231, 6.5.15
+ StatusPreconditionRequired = 428 // RFC 6585, 3
+ StatusTooManyRequests = 429 // RFC 6585, 4
+ StatusRequestHeaderFieldsTooLarge = 431 // RFC 6585, 5
+ StatusUnavailableForLegalReasons = 451 // RFC 7725, 3
+
+ StatusInternalServerError = 500 // RFC 7231, 6.6.1
+ StatusNotImplemented = 501 // RFC 7231, 6.6.2
+ StatusBadGateway = 502 // RFC 7231, 6.6.3
+ StatusServiceUnavailable = 503 // RFC 7231, 6.6.4
+ StatusGatewayTimeout = 504 // RFC 7231, 6.6.5
+ StatusHTTPVersionNotSupported = 505 // RFC 7231, 6.6.6
+ StatusVariantAlsoNegotiates = 506 // RFC 2295, 8.1
+ StatusInsufficientStorage = 507 // RFC 4918, 11.5
+ StatusLoopDetected = 508 // RFC 5842, 7.2
+ StatusNotExtended = 510 // RFC 2774, 7
+ StatusNetworkAuthenticationRequired = 511 // RFC 6585, 6
+)
+
+type (
+ // HTTPResponseExpr defines a HTTP response including its status code,
+ // headers and result type.
+ HTTPResponseExpr struct {
+ // HTTP status
+ StatusCode int
+ // Response description
+ Description string
+ // Headers describe the HTTP response headers.
+ Headers *MappedAttributeExpr
+ // Response body if any
+ Body *AttributeExpr
+ // Response Content-Type header value
+ ContentType string
+ // Tag the value a field of the result must have for this
+ // response to be used.
+ Tag [2]string
+ // Parent expression, one of EndpointExpr, ServiceExpr or
+ // RootExpr.
+ Parent eval.Expression
+ // Meta is a list of key/value pairs
+ Meta MetaExpr
+ }
+)
+
+// EvalName returns the generic definition name used in error messages.
+func (r *HTTPResponseExpr) EvalName() string {
+ var suffix string
+ if r.Parent != nil {
+ suffix = fmt.Sprintf(" of %s", r.Parent.EvalName())
+ }
+ return "HTTP response" + suffix
+}
+
+// Prepare makes sure the response is initialized even if not done explicitly
+// by
+func (r *HTTPResponseExpr) Prepare() {
+ if r.Headers == nil {
+ r.Headers = NewEmptyMappedAttributeExpr()
+ }
+}
+
+// Validate checks that the response definition is consistent: its status is set
+// and the result type definition if any is valid.
+func (r *HTTPResponseExpr) Validate(e *HTTPEndpointExpr) *eval.ValidationErrors {
+ verr := new(eval.ValidationErrors)
+
+ if r.StatusCode == 0 {
+ verr.Add(r, "HTTP response status not defined")
+ } else if !bodyAllowedForStatus(r.StatusCode) && r.bodyExists() && !e.MethodExpr.IsStreaming() {
+ verr.Add(r, "Response body defined for status code %d which does not allow response body.", r.StatusCode)
+ }
+
+ if e.MethodExpr.Result.Type == Empty {
+ if !r.Headers.IsEmpty() {
+ verr.Add(r, "response defines headers but result is empty")
+ }
+ return verr
+ }
+
+ rt, isrt := e.MethodExpr.Result.Type.(*ResultTypeExpr)
+ var inview string
+ if isrt {
+ inview = " all views in"
+ }
+ hasAttribute := func(name string) bool {
+ if !IsObject(e.MethodExpr.Result.Type) {
+ return false
+ }
+ if !isrt {
+ return e.MethodExpr.Result.Find(name) != nil
+ }
+ if v, ok := e.MethodExpr.Result.Meta["view"]; ok {
+ return rt.ViewHasAttribute(v[0], name)
+ }
+ for _, v := range rt.Views {
+ if !rt.ViewHasAttribute(v.Name, name) {
+ return false
+ }
+ }
+ return true
+ }
+ if !r.Headers.IsEmpty() {
+ verr.Merge(r.Headers.Validate("HTTP response headers", r))
+ if e.MethodExpr.Result.Type == Empty {
+ verr.Add(r, "response defines headers but result is empty")
+ } else if IsObject(e.MethodExpr.Result.Type) {
+ mobj := AsObject(r.Headers.Type)
+ for _, h := range *mobj {
+ if !hasAttribute(h.Name) {
+ verr.Add(r, "header %q has no equivalent attribute in%s result type, use notation 'attribute_name:header_name' to identify corresponding result type attribute.", h.Name, inview)
+ }
+ }
+ } else if len(*AsObject(r.Headers.Type)) > 1 {
+ verr.Add(r, "response defines more than one header but result type is not an object")
+ }
+ }
+ if r.Body != nil {
+ verr.Merge(r.Body.Validate("HTTP response body", r))
+ if att, ok := r.Body.Meta["origin:attribute"]; ok {
+ if !hasAttribute(att[0]) {
+ verr.Add(r, "body %q has no equivalent attribute in%s result type", att[0], inview)
+ }
+ } else if bobj := AsObject(r.Body.Type); bobj != nil {
+ for _, n := range *bobj {
+ if !hasAttribute(n.Name) {
+ verr.Add(r, "body %q has no equivalent attribute in%s result type", n.Name, inview)
+ }
+ }
+ }
+ }
+ return verr
+}
+
+// Finalize sets the response result type from its type if the type is a result
+// type and no result type is already specified.
+func (r *HTTPResponseExpr) Finalize(a *HTTPEndpointExpr, svcAtt *AttributeExpr) {
+ r.Parent = a
+
+ // Initialize the body attributes (if an object) with the corresponding
+ // result attributes.
+ svcObj := AsObject(svcAtt.Type)
+ if r.Body != nil {
+ if body := AsObject(r.Body.Type); body != nil {
+ for _, nat := range *body {
+ n := nat.Name
+ att := nat.Attribute
+ n = strings.Split(n, ":")[0]
+ var patt *AttributeExpr
+ var required bool
+ if svcObj != nil {
+ att = svcObj.Attribute(n)
+ required = svcAtt.IsRequired(n)
+ } else {
+ att = svcAtt
+ required = svcAtt.Type != Empty
+ }
+ initAttrFromDesign(att, patt)
+ if required {
+ if r.Body.Validation == nil {
+ r.Body.Validation = &ValidationExpr{}
+ }
+ r.Body.Validation.Required = append(r.Body.Validation.Required, n)
+ }
+ }
+ }
+ if r.Body.Meta == nil {
+ r.Body.Meta = svcAtt.Meta
+ }
+ }
+ // Set response content type if empty and if set in the result type
+ if r.ContentType == "" {
+ if rt, ok := svcAtt.Type.(*ResultTypeExpr); ok && rt.ContentType != "" {
+ r.ContentType = rt.ContentType
+ }
+ }
+}
+
+// Dup creates a copy of the response expression.
+func (r *HTTPResponseExpr) Dup() *HTTPResponseExpr {
+ res := HTTPResponseExpr{
+ StatusCode: r.StatusCode,
+ Description: r.Description,
+ ContentType: r.ContentType,
+ Parent: r.Parent,
+ Meta: r.Meta,
+ }
+ if r.Body != nil {
+ res.Body = DupAtt(r.Body)
+ }
+ res.Headers = DupMappedAtt(r.Headers)
+ return &res
+}
+
+// bodyAllowedForStatus reports whether a given response status code
+// permits a body. See RFC 2616, section 4.4.
+// See https://golang.org/src/net/http/transfer.go
+func bodyAllowedForStatus(status int) bool {
+ switch {
+ case status >= 100 && status <= 199:
+ return false
+ case status == 204:
+ return false
+ case status == 304:
+ return false
+ }
+ return true
+}
+
+// bodyExists returns true if a response body is defined in the
+// response expression via Body() or Result() in the method expression.
+func (r *HTTPResponseExpr) bodyExists() bool {
+ ep, ok := r.Parent.(*HTTPEndpointExpr)
+ return ok && httpResponseBody(ep, r).Type != Empty
+}
diff --git a/vendor/goa.design/goa/expr/http_service.go b/vendor/goa.design/goa/expr/http_service.go
new file mode 100644
index 000000000..b9422aec7
--- /dev/null
+++ b/vendor/goa.design/goa/expr/http_service.go
@@ -0,0 +1,231 @@
+package expr
+
+import (
+ "fmt"
+ "path"
+ "strings"
+
+ "github.com/dimfeld/httppath"
+ "goa.design/goa/eval"
+)
+
+type (
+ // HTTPServiceExpr describes a HTTP service. It defines both a result
+ // type and a set of endpoints that can be executed through HTTP
+ // requests. HTTPServiceExpr embeds a HTTPServiceExpr and adds HTTP specific
+ // properties.
+ HTTPServiceExpr struct {
+ eval.DSLFunc
+ // ServiceExpr is the service expression that backs this
+ // service.
+ ServiceExpr *ServiceExpr
+ // Common URL prefixes to all service endpoint HTTP requests
+ Paths []string
+ // Params defines the HTTP request path and query parameters
+ // common to all the service endpoints.
+ Params *MappedAttributeExpr
+ // Headers defines the HTTP request headers common to all the
+ // service endpoints.
+ Headers *MappedAttributeExpr
+ // Name of parent service if any
+ ParentName string
+ // Endpoint with canonical service path
+ CanonicalEndpointName string
+ // HTTPEndpoints is the list of service endpoints.
+ HTTPEndpoints []*HTTPEndpointExpr
+ // HTTPErrors lists HTTP errors that apply to all endpoints.
+ HTTPErrors []*HTTPErrorExpr
+ // FileServers is the list of static asset serving endpoints
+ FileServers []*HTTPFileServerExpr
+ // Meta is a set of key/value pairs with semantic that is
+ // specific to each generator.
+ Meta MetaExpr
+ }
+)
+
+// Name of service (service)
+func (svc *HTTPServiceExpr) Name() string {
+ return svc.ServiceExpr.Name
+}
+
+// Description of service (service)
+func (svc *HTTPServiceExpr) Description() string {
+ return svc.ServiceExpr.Description
+}
+
+// Error returns the error with the given name.
+func (svc *HTTPServiceExpr) Error(name string) *ErrorExpr {
+ for _, erro := range svc.ServiceExpr.Errors {
+ if erro.Name == name {
+ return erro
+ }
+ }
+ return Root.Error(name)
+}
+
+// Endpoint returns the service endpoint with the given name or nil if there
+// isn't one.
+func (svc *HTTPServiceExpr) Endpoint(name string) *HTTPEndpointExpr {
+ for _, a := range svc.HTTPEndpoints {
+ if a.Name() == name {
+ return a
+ }
+ }
+ return nil
+}
+
+// EndpointFor builds the endpoint for the given method.
+func (svc *HTTPServiceExpr) EndpointFor(name string, m *MethodExpr) *HTTPEndpointExpr {
+ if a := svc.Endpoint(name); a != nil {
+ return a
+ }
+ a := &HTTPEndpointExpr{
+ MethodExpr: m,
+ Service: svc,
+ }
+ svc.HTTPEndpoints = append(svc.HTTPEndpoints, a)
+ return a
+}
+
+// CanonicalEndpoint returns the canonical endpoint of the service if any.
+// The canonical endpoint is used to compute hrefs to services.
+func (svc *HTTPServiceExpr) CanonicalEndpoint() *HTTPEndpointExpr {
+ name := svc.CanonicalEndpointName
+ if name == "" {
+ name = "show"
+ }
+ return svc.Endpoint(name)
+}
+
+// URITemplate returns a URI template to this service.
+// The result is the empty string if the service does not have a "show" endpoint
+// and does not define a different canonical endpoint.
+func (svc *HTTPServiceExpr) URITemplate() string {
+ ca := svc.CanonicalEndpoint()
+ if ca == nil || len(ca.Routes) == 0 {
+ return ""
+ }
+ return ca.Routes[0].FullPaths()[0]
+}
+
+// FullPaths computes the base paths to the service endpoints concatenating the
+// API and parent service base paths as needed.
+func (svc *HTTPServiceExpr) FullPaths() []string {
+ if len(svc.Paths) == 0 {
+ return []string{path.Join(Root.API.HTTP.Path)}
+ }
+ var paths []string
+ for _, p := range svc.Paths {
+ if strings.HasPrefix(p, "//") {
+ paths = append(paths, httppath.Clean(p))
+ continue
+ }
+ var basePaths []string
+ if p := svc.Parent(); p != nil {
+ if ca := p.CanonicalEndpoint(); ca != nil {
+ if routes := ca.Routes; len(routes) > 0 {
+ // Note: all these tests should be true at code
+ // generation time as DSL validation makes sure
+ // that parent services have a canonical path.
+ fullPaths := routes[0].FullPaths()
+ basePaths = make([]string, len(fullPaths))
+ for i, p := range fullPaths {
+ basePaths[i] = path.Join(p)
+ }
+ }
+ }
+ } else {
+ basePaths = []string{Root.API.HTTP.Path}
+ }
+ for _, base := range basePaths {
+ paths = append(paths, httppath.Clean(path.Join(base, p)))
+ }
+ }
+ return paths
+}
+
+// Parent returns the parent service if any, nil otherwise.
+func (svc *HTTPServiceExpr) Parent() *HTTPServiceExpr {
+ if svc.ParentName != "" {
+ if parent := Root.API.HTTP.Service(svc.ParentName); parent != nil {
+ return parent
+ }
+ }
+ return nil
+}
+
+// HTTPError returns the service HTTP error with given name if any.
+func (svc *HTTPServiceExpr) HTTPError(name string) *HTTPErrorExpr {
+ for _, erro := range svc.HTTPErrors {
+ if erro.Name == name {
+ return erro
+ }
+ }
+ return nil
+}
+
+// EvalName returns the generic definition name used in error messages.
+func (svc *HTTPServiceExpr) EvalName() string {
+ if svc.Name() == "" {
+ return "unnamed service"
+ }
+ return fmt.Sprintf("service %#v", svc.Name())
+}
+
+// Prepare initializes the error responses.
+func (svc *HTTPServiceExpr) Prepare() {
+ for _, er := range svc.HTTPErrors {
+ er.Response.Prepare()
+ }
+}
+
+// Validate makes sure the service is valid.
+func (svc *HTTPServiceExpr) Validate() error {
+ verr := new(eval.ValidationErrors)
+ if svc.Params != nil {
+ verr.Merge(svc.Params.Validate("parameters", svc))
+ }
+ if svc.Headers != nil {
+ verr.Merge(svc.Headers.Validate("headers", svc))
+ }
+ if n := svc.ParentName; n != "" {
+ if p := Root.API.HTTP.Service(n); p == nil {
+ verr.Add(svc, "Parent service %s not found", n)
+ } else {
+ if p.CanonicalEndpoint() == nil {
+ verr.Add(svc, "Parent service %s has no canonical endpoint", n)
+ }
+ if p.ParentName == svc.Name() {
+ verr.Add(svc, "Parent service %s is also child", n)
+ }
+ }
+ }
+ if n := svc.CanonicalEndpointName; n != "" {
+ if a := svc.Endpoint(n); a == nil {
+ verr.Add(svc, "Unknown canonical endpoint %s", n)
+ }
+ }
+
+ // Validate errors (have status codes and bodies are valid)
+ for _, er := range svc.HTTPErrors {
+ verr.Merge(er.Validate())
+ }
+ for _, er := range Root.API.HTTP.Errors {
+ // This may result in the same error being validated multiple
+ // times however service is the top level expression being
+ // walked and errors cannot be walked until all expressions have
+ // run. Another solution could be to append a new dynamically
+ // generated root that the eval engine would process after. Keep
+ // things simple for now.
+ verr.Merge(er.Validate())
+ }
+
+ return verr
+}
+
+// Finalize initializes the path if no path is set in design.
+func (svc *HTTPServiceExpr) Finalize() {
+ if len(svc.Paths) == 0 {
+ svc.Paths = []string{"/"}
+ }
+}
diff --git a/vendor/goa.design/goa/expr/init.go b/vendor/goa.design/goa/expr/init.go
new file mode 100644
index 000000000..29a97abfb
--- /dev/null
+++ b/vendor/goa.design/goa/expr/init.go
@@ -0,0 +1,15 @@
+package expr
+
+import (
+ "goa.design/goa/eval"
+)
+
+// Register DSL roots.
+func init() {
+ if err := eval.Register(Root); err != nil {
+ panic(err) // bug
+ }
+ if err := eval.Register(Root.GeneratedTypes); err != nil {
+ panic(err) // bug
+ }
+}
diff --git a/vendor/goa.design/goa/expr/mapped_attribute.go b/vendor/goa.design/goa/expr/mapped_attribute.go
new file mode 100644
index 000000000..6a33b4e24
--- /dev/null
+++ b/vendor/goa.design/goa/expr/mapped_attribute.go
@@ -0,0 +1,185 @@
+package expr
+
+import (
+ "strings"
+)
+
+// MappedAttributeExpr is an attribute expression of type object that map the
+// object keys to external names (e.g. HTTP header names).
+type MappedAttributeExpr struct {
+ *AttributeExpr
+ nameMap map[string]string
+ reverseMap map[string]string
+}
+
+// NewEmptyMappedAttributeExpr creates an empty mapped attribute expression.
+func NewEmptyMappedAttributeExpr() *MappedAttributeExpr {
+ return NewMappedAttributeExpr(&AttributeExpr{Type: &Object{}})
+}
+
+// NewMappedAttributeExpr instantiates a mapped attribute expression for the
+// given attribute. The type of att must be Object.
+func NewMappedAttributeExpr(att *AttributeExpr) *MappedAttributeExpr {
+ if att == nil {
+ return NewEmptyMappedAttributeExpr()
+ }
+ if !IsObject(att.Type) {
+ panic("cannot create a mapped attribute with a non object attribute") // bug
+ }
+ var (
+ nameMap = make(map[string]string)
+ reverseMap = make(map[string]string)
+ validation *ValidationExpr
+ )
+ if att.Validation != nil {
+ validation = att.Validation.Dup()
+ } else if ut, ok := att.Type.(UserType); ok {
+ if val := ut.Attribute().Validation; val != nil {
+ validation = val.Dup()
+ }
+ }
+ ma := &MappedAttributeExpr{
+ AttributeExpr: &AttributeExpr{
+ Type: Dup(att.Type),
+ References: att.References,
+ Bases: att.Bases,
+ Description: att.Description,
+ Docs: att.Docs,
+ Meta: att.Meta,
+ DefaultValue: att.DefaultValue,
+ UserExamples: att.UserExamples,
+ Validation: validation,
+ },
+ nameMap: nameMap,
+ reverseMap: reverseMap,
+ }
+ ma.Remap()
+ return ma
+}
+
+// Remap recomputes the name mappings from the inner attribute. Use this if
+// the underlying attribute is modified after the mapped attribute has been
+// initially created.
+func (ma *MappedAttributeExpr) Remap() {
+ var (
+ n = &Object{}
+ o = AsObject(ma.Type)
+ )
+ for _, nat := range *o {
+ elems := strings.Split(nat.Name, ":")
+ n.Set(elems[0], nat.Attribute)
+ if len(elems) > 1 {
+ ma.nameMap[elems[0]] = elems[1]
+ ma.reverseMap[elems[1]] = elems[0]
+ }
+ }
+ ma.Type = n
+}
+
+// DupMappedAtt creates a deep copy of ma.
+func DupMappedAtt(ma *MappedAttributeExpr) *MappedAttributeExpr {
+ nameMap := make(map[string]string, len(ma.nameMap))
+ reverseMap := make(map[string]string, len(ma.reverseMap))
+ for k, v := range ma.nameMap {
+ nameMap[k] = v
+ }
+ for k, v := range ma.reverseMap {
+ reverseMap[k] = v
+ }
+ return &MappedAttributeExpr{
+ AttributeExpr: DupAtt(ma.AttributeExpr),
+ nameMap: nameMap,
+ reverseMap: reverseMap,
+ }
+}
+
+// Map records the element name of one of the child attributes.
+// Map panics if attName is not the name of a child attribute.
+func (ma *MappedAttributeExpr) Map(elemName, attName string) {
+ if att := AsObject(ma.Type).Attribute(attName); att == nil {
+ panic(attName + " is not the name of a child of the mapped attribute") // bug
+ }
+ ma.nameMap[attName] = elemName
+ ma.reverseMap[elemName] = attName
+}
+
+// Delete removes a child attribute given its name.
+func (ma *MappedAttributeExpr) Delete(attName string) {
+ delete(ma.nameMap, attName)
+ for k, v := range ma.reverseMap {
+ if v == attName {
+ delete(ma.reverseMap, k)
+ break
+ }
+ }
+ ma.Type.(*Object).Delete(attName)
+ if ma.Validation != nil {
+ ma.Validation.RemoveRequired(attName)
+ }
+}
+
+// Attribute returns the original attribute using "att:elem" format for the keys.
+func (ma *MappedAttributeExpr) Attribute() *AttributeExpr {
+ att := DupAtt(ma.AttributeExpr)
+ obj := AsObject(att.Type)
+ for _, nat := range *obj {
+ if elem := ma.ElemName(nat.Name); elem != nat.Name {
+ obj.Rename(nat.Name, nat.Name+":"+elem)
+ }
+ }
+ return att
+}
+
+// ElemName returns the transport element name of the given object key. It
+// returns keyName if it's a key of the mapped attribute object type. It panics
+// if there is no mapping and keyName is not a key.
+func (ma *MappedAttributeExpr) ElemName(keyName string) string {
+ if n, ok := ma.nameMap[keyName]; ok {
+ return n
+ }
+ if att := AsObject(ma.Type).Attribute(keyName); att != nil {
+ return keyName
+ }
+ panic("Key " + keyName + " is not defined") // bug
+}
+
+// KeyName returns the object key of the given transport element name. It
+// returns elemName if it's a key of the mapped attribute object type. It panics
+// if there is no mapping and elemName is not a key.
+func (ma *MappedAttributeExpr) KeyName(elemName string) string {
+ if n, ok := ma.reverseMap[elemName]; ok {
+ return n
+ }
+ if att := AsObject(ma.Type).Attribute(elemName); att != nil {
+ return elemName
+ }
+ panic("transport element " + elemName + " is not defined and is not a key") // bug
+}
+
+// Merge merges other's attributes into a overriding attributes of a with
+// attributes of other with identical names.
+func (ma *MappedAttributeExpr) Merge(other *MappedAttributeExpr) {
+ if other == nil {
+ return
+ }
+ ma.AttributeExpr.Merge(other.Attribute())
+ ma.Remap()
+}
+
+// FindKey finds the given key in the mapped attribute expression.
+// If key is found, it returns the transport element name of the key and true.
+// Otherwise, it returns an empty string and false.
+func (ma *MappedAttributeExpr) FindKey(keyName string) (string, bool) {
+ obj := AsObject(ma.Type)
+ for _, nat := range *obj {
+ if nat.Name == keyName {
+ return ma.ElemName(keyName), true
+ }
+ }
+ return "", false
+}
+
+// IsEmpty returns true if the mapped attribute contains no key.
+func (ma *MappedAttributeExpr) IsEmpty() bool {
+ return len(*ma.Type.(*Object)) == 0
+}
diff --git a/vendor/goa.design/goa/expr/method.go b/vendor/goa.design/goa/expr/method.go
new file mode 100644
index 000000000..7f2988865
--- /dev/null
+++ b/vendor/goa.design/goa/expr/method.go
@@ -0,0 +1,271 @@
+package expr
+
+import (
+ "fmt"
+
+ "goa.design/goa/eval"
+)
+
+type (
+ // StreamKind is a type denoting the kind of stream.
+ StreamKind int
+
+ // MethodExpr defines a single method.
+ MethodExpr struct {
+ // DSLFunc contains the DSL used to initialize the expression.
+ eval.DSLFunc
+ // Name of method.
+ Name string
+ // Description of method for consumption by humans.
+ Description string
+ // Docs points to the method external documentation if any.
+ Docs *DocsExpr
+ // Payload attribute
+ Payload *AttributeExpr
+ // Result attribute
+ Result *AttributeExpr
+ // Errors lists the error responses.
+ Errors []*ErrorExpr
+ // Requirements contains the security requirements for the
+ // method. One requirement is composed of potentially multiple
+ // schemes. Incoming requests must validate at least one
+ // requirement to be authorized.
+ Requirements []*SecurityExpr
+ // Service that owns method.
+ Service *ServiceExpr
+ // Meta is an arbitrary set of key/value pairs, see dsl.Meta
+ Meta MetaExpr
+ // Stream is the kind of stream (none, payload, result, or both)
+ // the method defines.
+ Stream StreamKind
+ // StreamingPayload is the payload sent across the stream.
+ StreamingPayload *AttributeExpr
+ }
+)
+
+const (
+ // NoStreamKind represents no payload or result stream in method.
+ NoStreamKind StreamKind = iota + 1
+ // ClientStreamKind represents client sends a streaming payload to
+ // method.
+ ClientStreamKind
+ // ServerStreamKind represents server sends a streaming result from
+ // method.
+ ServerStreamKind
+ // BidirectionalStreamKind represents client and server sending payload
+ // and result respectively via a stream.
+ BidirectionalStreamKind
+)
+
+// Error returns the error with the given name. It looks up recursively in the
+// endpoint then the service and finally the root expression.
+func (m *MethodExpr) Error(name string) *ErrorExpr {
+ for _, err := range m.Errors {
+ if err.Name == name {
+ return err
+ }
+ }
+ return m.Service.Error(name)
+}
+
+// EvalName returns the generic expression name used in error messages.
+func (m *MethodExpr) EvalName() string {
+ var prefix, suffix string
+ if m.Name != "" {
+ suffix = fmt.Sprintf("method %#v", m.Name)
+ } else {
+ suffix = "unnamed method"
+ }
+ if m.Service != nil {
+ prefix = m.Service.EvalName() + " "
+ }
+ return prefix + suffix
+}
+
+// Prepare makes sure the payload and result types are initialized (to the Empty
+// type if nil).
+func (m *MethodExpr) Prepare() {
+ if m.Payload == nil {
+ m.Payload = &AttributeExpr{Type: Empty}
+ }
+ if m.StreamingPayload == nil {
+ m.StreamingPayload = &AttributeExpr{Type: Empty}
+ }
+ if m.Result == nil {
+ m.Result = &AttributeExpr{Type: Empty}
+ }
+}
+
+// Validate validates the method payloads, results, and errors (if any).
+func (m *MethodExpr) Validate() error {
+ verr := new(eval.ValidationErrors)
+ if m.Payload.Type != Empty {
+ verr.Merge(m.Payload.Validate("payload", m))
+ // validate security scheme requirements
+ var requirements []*SecurityExpr
+ if len(m.Requirements) > 0 {
+ requirements = m.Requirements
+ } else if len(m.Service.Requirements) > 0 {
+ requirements = m.Service.Requirements
+ }
+ for _, r := range requirements {
+ for _, s := range r.Schemes {
+ verr.Merge(s.Validate())
+ switch s.Kind {
+ case BasicAuthKind:
+ if !hasTag(m.Payload, "security:username") {
+ verr.Add(m, "payload of method %q of service %q does not define a username attribute, use Username to define one", m.Name, m.Service.Name)
+ }
+ if !hasTag(m.Payload, "security:password") {
+ verr.Add(m, "payload of method %q of service %q does not define a password attribute, use Password to define one", m.Name, m.Service.Name)
+ }
+ case APIKeyKind:
+ if !hasTag(m.Payload, "security:apikey:"+s.SchemeName) {
+ verr.Add(m, "payload of method %q of service %q does not define an API key attribute, use APIKey to define one", m.Name, m.Service.Name)
+ }
+ case JWTKind:
+ if !hasTag(m.Payload, "security:token") {
+ verr.Add(m, "payload of method %q of service %q does not define a JWT attribute, use Token to define one", m.Name, m.Service.Name)
+ }
+ case OAuth2Kind:
+ if !hasTag(m.Payload, "security:accesstoken") {
+ verr.Add(m, "payload of method %q of service %q does not define a OAuth2 access token attribute, use AccessToken to define one", m.Name, m.Service.Name)
+ }
+ }
+ }
+ for _, scope := range r.Scopes {
+ found := false
+ for _, s := range r.Schemes {
+ if s.Kind == OAuth2Kind || s.Kind == JWTKind {
+ for _, se := range s.Scopes {
+ if se.Name == scope {
+ found = true
+ break
+ }
+ }
+ }
+ }
+ if !found {
+ verr.Add(m, "security scope %q not found in any of the security schemes.", scope)
+ }
+ }
+ }
+ }
+ if m.StreamingPayload.Type != Empty {
+ verr.Merge(m.StreamingPayload.Validate("streaming_payload", m))
+ }
+ if m.Result.Type != Empty {
+ verr.Merge(m.Result.Validate("result", m))
+ }
+ for _, e := range m.Errors {
+ if err := e.Validate(); err != nil {
+ if verrs, ok := err.(*eval.ValidationErrors); ok {
+ verr.Merge(verrs)
+ }
+ }
+ }
+ return verr
+}
+
+// hasTag is a helper function that traverses the given attribute and all its
+// bases recursively looking for an attribute with the given tag meta. This
+// recursion is only needed for attributes that have not been finalized yet.
+func hasTag(p *AttributeExpr, tag string) bool {
+ if p.HasTag(tag) {
+ return true
+ }
+ for _, base := range p.Bases {
+ ut, ok := base.(UserType)
+ if !ok {
+ continue
+ }
+ return hasTag(ut.Attribute(), tag)
+ }
+ if ut, ok := p.Type.(UserType); ok {
+ return hasTag(ut.Attribute(), tag)
+ }
+ return false
+}
+
+// Finalize makes sure the method payload and result types are set. It also
+// projects the result if it is a result type and a view is explicitly set in
+// the design or a result type having at most one view.
+func (m *MethodExpr) Finalize() {
+ if m.Payload == nil {
+ m.Payload = &AttributeExpr{Type: Empty}
+ } else {
+ m.Payload.Finalize()
+ }
+ if m.StreamingPayload == nil {
+ m.StreamingPayload = &AttributeExpr{Type: Empty}
+ } else {
+ m.StreamingPayload.Finalize()
+ }
+ if m.Result == nil {
+ m.Result = &AttributeExpr{Type: Empty}
+ } else {
+ m.Result.Finalize()
+ if rt, ok := m.Result.Type.(*ResultTypeExpr); ok {
+ rt.Finalize()
+ }
+ }
+ for _, e := range m.Errors {
+ e.Finalize()
+ }
+
+ // Inherit security requirements
+ noreq := false
+ for _, r := range m.Requirements {
+ // Handle special case of no security
+ for _, s := range r.Schemes {
+ if s.Kind == NoKind {
+ noreq = true
+ break
+ }
+ }
+ if noreq {
+ break
+ }
+ }
+ if noreq {
+ m.Requirements = nil
+ } else if len(m.Requirements) == 0 && len(m.Service.Requirements) > 0 {
+ m.Requirements = copyReqs(m.Service.Requirements)
+ }
+
+}
+
+// IsStreaming determines whether the method streams payload or result.
+func (m *MethodExpr) IsStreaming() bool {
+ return m.Stream != 0 && m.Stream != NoStreamKind
+}
+
+// IsPayloadStreaming determines whether the method streams payload.
+func (m *MethodExpr) IsPayloadStreaming() bool {
+ return m.Stream == ClientStreamKind || m.Stream == BidirectionalStreamKind
+}
+
+// helper function that duplicates just enough of a security expression so that
+// its scheme names can be overridden without affecting the original.
+func copyReqs(reqs []*SecurityExpr) []*SecurityExpr {
+ reqs2 := make([]*SecurityExpr, len(reqs))
+ for i, req := range reqs {
+ req2 := &SecurityExpr{Scopes: req.Scopes}
+ schs := make([]*SchemeExpr, len(req.Schemes))
+ for j, sch := range req.Schemes {
+ schs[j] = &SchemeExpr{
+ Kind: sch.Kind,
+ SchemeName: sch.SchemeName,
+ Description: sch.Description,
+ In: sch.In,
+ Name: sch.Name,
+ Scopes: sch.Scopes,
+ Flows: sch.Flows,
+ Meta: sch.Meta,
+ }
+ }
+ req2.Schemes = schs
+ reqs2[i] = req2
+ }
+ return reqs2
+}
diff --git a/vendor/goa.design/goa/expr/random.go b/vendor/goa.design/goa/expr/random.go
new file mode 100644
index 000000000..f3e628a72
--- /dev/null
+++ b/vendor/goa.design/goa/expr/random.go
@@ -0,0 +1,77 @@
+package expr
+
+import (
+ "crypto/md5"
+ "encoding/binary"
+ "math/rand"
+
+ "github.com/manveru/faker"
+)
+
+// Random generates consistent random values of different types given a seed.
+// The random values are consistent in that given the same seed the same random values get
+// generated.
+// The generator tracks the user types that it has processed to avoid infinite recursions, this
+// means a new generator should be created when wanting to generate a new random value for a user
+// type.
+type Random struct {
+ Seed string
+ Seen map[string]*interface{}
+ faker *faker.Faker
+ rand *rand.Rand
+}
+
+// NewRandom returns a random value generator seeded from the given string value.
+func NewRandom(seed string) *Random {
+ hasher := md5.New()
+ hasher.Write([]byte(seed))
+ sint := int64(binary.BigEndian.Uint64(hasher.Sum(nil)))
+ source := rand.NewSource(sint)
+ ran := rand.New(source)
+ faker := &faker.Faker{
+ Language: "end",
+ Dict: faker.Dict["en"],
+ Rand: ran,
+ }
+ return &Random{
+ Seed: seed,
+ faker: faker,
+ rand: ran,
+ }
+}
+
+// Int produces a random integer.
+func (r *Random) Int() int {
+ return r.rand.Int()
+}
+
+// Int32 produces a random 32-bit integer.
+func (r *Random) Int32() int32 {
+ return r.rand.Int31()
+}
+
+// Int64 produces a random 64-bit integer.
+func (r *Random) Int64() int64 {
+ return r.rand.Int63()
+}
+
+// String produces a random string.
+func (r *Random) String() string {
+ return r.faker.Sentence(2, false)
+
+}
+
+// Bool produces a random boolean.
+func (r *Random) Bool() bool {
+ return r.rand.Int()%2 == 0
+}
+
+// Float32 produces a random float32 value.
+func (r *Random) Float32() float32 {
+ return r.rand.Float32()
+}
+
+// Float64 produces a random float64 value.
+func (r *Random) Float64() float64 {
+ return r.rand.Float64()
+}
diff --git a/vendor/goa.design/goa/expr/result_type.go b/vendor/goa.design/goa/expr/result_type.go
new file mode 100644
index 000000000..f3b85eacc
--- /dev/null
+++ b/vendor/goa.design/goa/expr/result_type.go
@@ -0,0 +1,454 @@
+package expr
+
+import (
+ "fmt"
+ "mime"
+ "strings"
+
+ "goa.design/goa/eval"
+)
+
+const (
+ // DefaultView is the name of the default result type view.
+ DefaultView = "default"
+)
+
+type (
+ // ResultTypeExpr describes the rendering of a service using field and
+ // link definitions. A field corresponds to a single member of the result
+ // type, it has a name and a type as well as optional validation rules.
+ // A link has a name and a URL that points to a related service. Result
+ // types also define views which describe which fields and links to
+ // render when building the response body for the corresponding view.
+ ResultTypeExpr struct {
+ // A result type is a type
+ *UserTypeExpr
+ // Identifier is the RFC 6838 result type media type identifier.
+ Identifier string
+ // ContentType identifies the value written to the response
+ // "Content-Type" header.
+ ContentType string
+ // Views list the supported views indexed by name.
+ Views []*ViewExpr
+ }
+
+ // ViewExpr defines which fields and links to render when building a
+ // response. The view is an object whose field names must match the
+ // names of the parent result type field names. The field definitions are
+ // inherited from the parent result type but may be overridden.
+ ViewExpr struct {
+ // Set of properties included in view
+ *AttributeExpr
+ // Name of view
+ Name string
+ // Parent result Type
+ Parent *ResultTypeExpr
+ }
+)
+
+var (
+ // ErrorResultIdentifier is the result type identifier used for error
+ // responses.
+ ErrorResultIdentifier = "application/vnd.goa.error"
+
+ // ErrorResult is the built-in result type for error responses.
+ ErrorResult = &ResultTypeExpr{
+ UserTypeExpr: &UserTypeExpr{
+ AttributeExpr: &AttributeExpr{
+ Type: errorResultType,
+ Description: "Error response result type",
+ UserExamples: []*ExampleExpr{{
+ Summary: "BadRequest",
+ Value: Val{
+ "name": "bad_request",
+ "id": "3F1FKVRR",
+ "message": "Value of ID must be an integer",
+ },
+ }},
+ Validation: &ValidationExpr{Required: []string{"name", "id", "message", "temporary", "timeout", "fault"}},
+ },
+ TypeName: "error",
+ },
+ Identifier: ErrorResultIdentifier,
+ Views: []*ViewExpr{errorResultView},
+ }
+
+ errorResultType = &Object{
+ {"name", &AttributeExpr{
+ Type: String,
+ Description: "Name is the name of this class of errors.",
+ Meta: MetaExpr{"struct:error:name": nil},
+ UserExamples: []*ExampleExpr{{Value: "bad_request"}},
+ }},
+ {"id", &AttributeExpr{
+ Type: String,
+ Description: "ID is a unique identifier for this particular occurrence of the problem.",
+ UserExamples: []*ExampleExpr{{Value: "123abc"}},
+ }},
+ {"message", &AttributeExpr{
+ Type: String,
+ Description: "Message is a human-readable explanation specific to this occurrence of the problem.",
+ UserExamples: []*ExampleExpr{{Value: "parameter 'p' must be an integer"}},
+ }},
+ {"temporary", &AttributeExpr{
+ Type: Boolean,
+ Description: "Is the error temporary?",
+ }},
+ {"timeout", &AttributeExpr{
+ Type: Boolean,
+ Description: "Is the error a timeout?",
+ }},
+ {"fault", &AttributeExpr{
+ Type: Boolean,
+ Description: "Is the error a server-side fault?",
+ }},
+ }
+
+ errorResultView = &ViewExpr{
+ AttributeExpr: &AttributeExpr{Type: errorResultType},
+ Name: "default",
+ }
+)
+
+// NewResultTypeExpr creates a result type definition but does not
+// execute the DSL.
+func NewResultTypeExpr(name, identifier string, fn func()) *ResultTypeExpr {
+ return &ResultTypeExpr{
+ UserTypeExpr: &UserTypeExpr{
+ AttributeExpr: &AttributeExpr{Type: &Object{}, DSLFunc: fn},
+ TypeName: name,
+ },
+ Identifier: identifier,
+ }
+}
+
+// CanonicalIdentifier returns the result type identifier sans suffix
+// which is what the DSL uses to store and lookup result types.
+func CanonicalIdentifier(identifier string) string {
+ base, params, err := mime.ParseMediaType(identifier)
+ if err != nil {
+ return identifier
+ }
+ id := base
+ if i := strings.Index(id, "+"); i != -1 {
+ id = id[:i]
+ }
+ return mime.FormatMediaType(id, params)
+}
+
+// Kind implements DataKind.
+func (m *ResultTypeExpr) Kind() Kind { return ResultTypeKind }
+
+// Dup creates a deep copy of the result type given a deep copy of its attribute.
+func (m *ResultTypeExpr) Dup(att *AttributeExpr) UserType {
+ return &ResultTypeExpr{
+ UserTypeExpr: m.UserTypeExpr.Dup(att).(*UserTypeExpr),
+ Identifier: m.Identifier,
+ Views: m.Views,
+ }
+}
+
+// ID returns the identifier of the result type.
+func (m *ResultTypeExpr) ID() string {
+ return m.Identifier
+}
+
+// Name returns the result type name.
+func (m *ResultTypeExpr) Name() string { return m.TypeName }
+
+// View returns the view with the given name.
+func (m *ResultTypeExpr) View(name string) *ViewExpr {
+ for _, v := range m.Views {
+ if v.Name == name {
+ return v
+ }
+ }
+ return nil
+}
+
+// IsError returns true if the result type is implemented via a goa struct.
+func (m *ResultTypeExpr) IsError() bool {
+ base, params, err := mime.ParseMediaType(m.Identifier)
+ if err != nil {
+ panic("invalid result type identifier " + m.Identifier) // bug
+ }
+ delete(params, "view")
+ return mime.FormatMediaType(base, params) == ErrorResult.Identifier
+}
+
+// ComputeViews returns the result type views recursing as necessary if the result
+// type is a collection.
+func (m *ResultTypeExpr) ComputeViews() []*ViewExpr {
+ if m.Views != nil {
+ return m.Views
+ }
+ if a, ok := m.Type.(*Array); ok {
+ if mt, ok := a.ElemType.Type.(*ResultTypeExpr); ok {
+ return mt.ComputeViews()
+ }
+ }
+ return nil
+}
+
+// HasMultipleViews returns true if the result type has more than one view.
+func (m *ResultTypeExpr) HasMultipleViews() bool {
+ return len(m.Views) > 1
+}
+
+// ViewHasAttribute returns true if the result type view has the given
+// attribute.
+func (m *ResultTypeExpr) ViewHasAttribute(view, attr string) bool {
+ v := m.View(view)
+ if v == nil {
+ return false
+ }
+ return v.AttributeExpr.Find(attr) != nil
+}
+
+// Finalize builds the default view if not explicitly defined and finalizes
+// the underlying UserTypeExpr.
+func (m *ResultTypeExpr) Finalize() {
+ if m.View("default") == nil {
+ att := DupAtt(m.AttributeExpr)
+ if arr := AsArray(att.Type); arr != nil {
+ att.Type = AsObject(arr.ElemType.Type)
+ }
+ v := &ViewExpr{
+ AttributeExpr: att,
+ Name: "default",
+ Parent: m,
+ }
+ m.Views = append(m.Views, v)
+ }
+ m.UserTypeExpr.Finalize()
+}
+
+// Project creates a ResultTypeExpr containing the fields defined in the view
+// expression of m named after the view argument. Project also returns a links
+// object created after the link expression of m if there is one.
+//
+// The resulting result type defines a default view. The result type identifier is
+// computed by adding a parameter called "view" to the original identifier. The
+// value of the "view" parameter is the name of the view.
+func Project(m *ResultTypeExpr, view string, seen ...map[string]*AttributeExpr) (*ResultTypeExpr, error) {
+ _, params, _ := mime.ParseMediaType(m.Identifier)
+ if params["view"] == view {
+ // nothing to do
+ return m, nil
+ }
+ if _, ok := m.Type.(*Array); ok {
+ return projectCollection(m, view, seen...)
+ }
+ return projectSingle(m, view, seen...)
+}
+
+func projectSingle(m *ResultTypeExpr, view string, seen ...map[string]*AttributeExpr) (*ResultTypeExpr, error) {
+ v := m.View(view)
+ if v == nil {
+ return nil, fmt.Errorf("unknown view %#v", view)
+ }
+ viewObj := v.Type.(*Object)
+
+ // Compute validations - view may not have all fields
+ var val *ValidationExpr
+ if m.Validation != nil {
+ var required []string
+ for _, n := range m.Validation.Required {
+ if att := viewObj.Attribute(n); att != nil {
+ required = append(required, n)
+ }
+ }
+ val = m.Validation.Dup()
+ val.Required = required
+ }
+
+ // Compute description
+ desc := m.Description
+ if desc == "" {
+ desc = m.TypeName + " result type"
+ }
+ desc += " (" + view + " view)"
+
+ // Compute type name
+ typeName := m.TypeName
+ if view != "default" {
+ typeName += strings.Title(view)
+ }
+
+ var ut *UserTypeExpr
+ if len(seen) > 0 {
+ s := seen[0]
+ if att, ok := s[m.Identifier]; ok {
+ if rt, ok2 := att.Type.(*ResultTypeExpr); ok2 {
+ ut = &UserTypeExpr{
+ AttributeExpr: DupAtt(rt.Attribute()),
+ TypeName: rt.TypeName,
+ Service: rt.Service,
+ }
+ }
+ }
+ } else {
+ seen = append(seen, make(map[string]*AttributeExpr))
+ }
+ if ut == nil {
+ ut = &UserTypeExpr{
+ AttributeExpr: &AttributeExpr{
+ Description: desc,
+ Validation: val,
+ },
+ }
+ }
+ ut.TypeName = typeName
+ ut.AttributeExpr.Type = Dup(v.Type)
+ projected := &ResultTypeExpr{
+ Identifier: m.projectIdentifier(view),
+ UserTypeExpr: ut,
+ }
+ projected.Views = []*ViewExpr{{
+ Name: "default",
+ AttributeExpr: DupAtt(v.AttributeExpr),
+ Parent: projected,
+ }}
+
+ projectedObj := projected.Type.(*Object)
+ mtObj := m.Type.(*Object)
+ for _, nat := range *viewObj {
+ if at := mtObj.Attribute(nat.Name); at != nil {
+ pat, err := projectRecursive(at, nat, view, seen...)
+ if err != nil {
+ return nil, err
+ }
+ projectedObj.Set(nat.Name, pat)
+ }
+ }
+ return projected, nil
+}
+
+func projectCollection(m *ResultTypeExpr, view string, seen ...map[string]*AttributeExpr) (*ResultTypeExpr, error) {
+ // Project the collection element result type
+ e := m.Type.(*Array).ElemType.Type.(*ResultTypeExpr) // validation checked this cast would work
+ pe, err2 := Project(e, view, seen...)
+ if err2 != nil {
+ return nil, fmt.Errorf("collection element: %s", err2)
+ }
+
+ // Build the projected collection with the results
+ proj := &ResultTypeExpr{
+ Identifier: m.projectIdentifier(view),
+ UserTypeExpr: &UserTypeExpr{
+ AttributeExpr: &AttributeExpr{
+ Description: m.TypeName + " is the result type for an array of " + e.TypeName + " (" + view + " view)",
+ Type: &Array{ElemType: &AttributeExpr{Type: pe}},
+ UserExamples: m.UserExamples,
+ },
+ TypeName: pe.TypeName + "Collection",
+ },
+ Views: []*ViewExpr{{
+ AttributeExpr: DupAtt(pe.View("default").AttributeExpr),
+ Name: "default",
+ Parent: pe,
+ }},
+ }
+
+ // Run the DSL that was created by the CollectionOf function
+ if !eval.Execute(proj.DSL(), proj) {
+ return nil, eval.Context.Errors
+ }
+
+ return proj, nil
+}
+
+func projectRecursive(at *AttributeExpr, vat *NamedAttributeExpr, view string, seen ...map[string]*AttributeExpr) (*AttributeExpr, error) {
+ s := seen[0]
+ ut, isUT := at.Type.(UserType)
+ if isUT {
+ if att, ok := s[ut.ID()]; ok {
+ return att, nil
+ }
+ }
+ at = DupAtt(at)
+ if isUT {
+ s[ut.ID()] = at
+ }
+ if rt, ok := at.Type.(*ResultTypeExpr); ok {
+ vatt := vat.Attribute
+ var view string
+ if len(vatt.Meta["view"]) > 0 {
+ view = vatt.Meta["view"][0]
+ }
+ if view == "" && len(at.Meta["view"]) > 0 {
+ view = at.Meta["view"][0]
+ }
+ if view == "" {
+ view = DefaultView
+ }
+ pr, err := Project(rt, view, seen...)
+ if err != nil {
+ return nil, fmt.Errorf("view %#v on field %#v cannot be computed: %s", view, vat.Name, err)
+ }
+ at.Type = pr
+ return at, nil
+ }
+ if obj := AsObject(at.Type); obj != nil {
+ vobj := AsObject(vat.Attribute.Type)
+ if vobj == nil {
+ return at, nil
+ }
+ for _, cnat := range *obj {
+ var cvnat *NamedAttributeExpr
+ for _, nnat := range *vobj {
+ if nnat.Name == cnat.Name {
+ cvnat = nnat
+ break
+ }
+ }
+ if cvnat == nil {
+ continue
+ }
+ pat, err := projectRecursive(cnat.Attribute, cvnat, view, seen...)
+ if err != nil {
+ return nil, err
+ }
+ cnat.Attribute = pat
+ }
+ return at, nil
+ }
+ if ar := AsArray(at.Type); ar != nil {
+ pat, err := projectRecursive(ar.ElemType, vat, view, seen...)
+ if err != nil {
+ return nil, err
+ }
+ ar.ElemType = pat
+ }
+ return at, nil
+}
+
+// projectIdentifier computes the projected result type identifier by adding the
+// "view" param. We need the projected result type identifier to be different so
+// that looking up projected result types from ProjectedResultTypes works
+// correctly. It's also good for clients.
+func (m *ResultTypeExpr) projectIdentifier(view string) string {
+ base, params, err := mime.ParseMediaType(m.Identifier)
+ if err != nil {
+ base = m.Identifier
+ }
+ if params == nil {
+ params = make(map[string]string)
+ }
+ params["view"] = view
+ return mime.FormatMediaType(base, params)
+}
+
+// EvalName returns the generic definition name used in error messages.
+func (v *ViewExpr) EvalName() string {
+ var prefix, suffix string
+ if v.Name != "" {
+ prefix = fmt.Sprintf("view %#v", v.Name)
+ } else {
+ prefix = "unnamed view"
+ }
+ if v.Parent != nil {
+ suffix = fmt.Sprintf(" of %s", v.Parent.EvalName())
+ }
+ return prefix + suffix
+}
diff --git a/vendor/goa.design/goa/expr/root.go b/vendor/goa.design/goa/expr/root.go
new file mode 100644
index 000000000..0b86acfa0
--- /dev/null
+++ b/vendor/goa.design/goa/expr/root.go
@@ -0,0 +1,294 @@
+package expr
+
+import (
+ "sort"
+ "strings"
+
+ "goa.design/goa/eval"
+)
+
+// Root is the root object built by the DSL.
+var Root = &RootExpr{GeneratedTypes: &GeneratedRoot{}}
+
+type (
+ // RootExpr is the struct built by the DSL on process start.
+ RootExpr struct {
+ // API contains the API expression built by the DSL.
+ API *APIExpr
+ // Services contains the list of services exposed by the API.
+ Services []*ServiceExpr
+ // Errors contains the list of errors returned by all the API
+ // methods.
+ Errors []*ErrorExpr
+ // Types contains the user types described in the DSL.
+ Types []UserType
+ // ResultTypes contains the result types described in the DSL.
+ ResultTypes []UserType
+ // GeneratedTypes contains the types generated during DSL
+ // execution.
+ GeneratedTypes *GeneratedRoot
+ // Conversions list the user type to external type mappings.
+ Conversions []*TypeMap
+ // Creations list the external type to user type mappings.
+ Creations []*TypeMap
+ // Schemes list the registered security schemes.
+ Schemes []*SchemeExpr
+
+ // Meta is a set of key/value pairs with semantic that is
+ // specific to each generator.
+ Meta MetaExpr
+ }
+
+ // MetaExpr is a set of key/value pairs
+ MetaExpr map[string][]string
+
+ // TypeMap defines a user to external type mapping.
+ TypeMap struct {
+ // User is the user type being converted or created.
+ User UserType
+
+ // External is an instance of the type being converted from or to.
+ External interface{}
+ }
+)
+
+// NameMap returns the attribute and transport element name encoded in the given
+// string. The encoding uses a simple "attribute:element" notation which allows
+// to map transport field names (HTTP headers etc.) to underlying attributes.
+// The second element of the encoding is optional in which case both the element
+// and attribute have the same name.
+func NameMap(encoded string) (string, string) {
+ elems := strings.Split(encoded, ":")
+ attName := elems[0]
+ name := attName
+ if len(elems) > 1 {
+ name = elems[1]
+ }
+ return attName, name
+}
+
+// WalkSets returns the expressions in order of evaluation.
+func (r *RootExpr) WalkSets(walk eval.SetWalker) {
+ if r.API == nil {
+ r.API = NewAPIExpr("API", func() {})
+ }
+
+ // Top level API DSL
+ walk(eval.ExpressionSet{r.API})
+
+ // User types
+ types := make(eval.ExpressionSet, len(r.Types))
+ for i, t := range r.Types {
+ types[i] = t.Attribute()
+ }
+ walk(types)
+
+ // Result types
+ mtypes := make(eval.ExpressionSet, len(r.ResultTypes))
+ for i, mt := range r.ResultTypes {
+ mtypes[i] = mt.(*ResultTypeExpr)
+ }
+ walk(mtypes)
+
+ // Services
+ services := make(eval.ExpressionSet, len(r.Services))
+ var methods eval.ExpressionSet
+ for i, s := range r.Services {
+ services[i] = s
+ }
+ walk(services)
+
+ // Methods (must be done after services)
+ for _, s := range r.Services {
+ for _, m := range s.Methods {
+ methods = append(methods, m)
+ }
+ }
+ walk(methods)
+
+ // HTTP services and endpoints
+ httpsvcs := make(eval.ExpressionSet, len(r.API.HTTP.Services))
+ sort.SliceStable(r.API.HTTP.Services, func(i, j int) bool {
+ if r.API.HTTP.Services[j].ParentName == r.API.HTTP.Services[i].Name() {
+ return true
+ }
+ return false
+ })
+ var httpepts eval.ExpressionSet
+ var httpsvrs eval.ExpressionSet
+ for i, svc := range r.API.HTTP.Services {
+ httpsvcs[i] = svc
+ for _, e := range svc.HTTPEndpoints {
+ httpepts = append(httpepts, e)
+ }
+ for _, s := range svc.FileServers {
+ httpsvrs = append(httpsvrs, s)
+ }
+ }
+ walk(eval.ExpressionSet{r.API.HTTP})
+ walk(httpsvcs)
+ walk(httpepts)
+ walk(httpsvrs)
+
+ // GRPC services and endpoints
+ grpcsvcs := make(eval.ExpressionSet, len(r.API.GRPC.Services))
+ sort.SliceStable(r.API.GRPC.Services, func(i, j int) bool {
+ if r.API.GRPC.Services[j].ParentName == r.API.GRPC.Services[i].Name() {
+ return true
+ }
+ return false
+ })
+ var grpcepts eval.ExpressionSet
+ for i, svc := range r.API.GRPC.Services {
+ grpcsvcs[i] = svc
+ for _, e := range svc.GRPCEndpoints {
+ grpcepts = append(grpcepts, e)
+ }
+ }
+ walk(eval.ExpressionSet{r.API.GRPC})
+ walk(grpcsvcs)
+ walk(grpcepts)
+}
+
+// DependsOn returns nil, the core DSL has no dependency.
+func (r *RootExpr) DependsOn() []eval.Root { return nil }
+
+// Packages returns the Go import path to this and the dsl packages.
+func (r *RootExpr) Packages() []string {
+ return []string{
+ "goa.design/goa/expr",
+ "goa.design/goa/dsl",
+ }
+}
+
+// UserType returns the user type expression with the given name if found, nil otherwise.
+func (r *RootExpr) UserType(name string) UserType {
+ for _, t := range r.Types {
+ if t.Name() == name {
+ return t
+ }
+ }
+ for _, t := range r.ResultTypes {
+ if t.Name() == name {
+ return t
+ }
+ }
+ return nil
+}
+
+// GeneratedResultType returns the generated result type expression with the given
+// id, nil if there isn't one.
+func (r *RootExpr) GeneratedResultType(id string) *ResultTypeExpr {
+ for _, t := range *r.GeneratedTypes {
+ mt := t.(*ResultTypeExpr)
+ if mt.Identifier == id {
+ return mt
+ }
+ }
+ return nil
+}
+
+// Service returns the service with the given name.
+func (r *RootExpr) Service(name string) *ServiceExpr {
+ for _, s := range r.Services {
+ if s.Name == name {
+ return s
+ }
+ }
+ return nil
+}
+
+// Error returns the error with the given name.
+func (r *RootExpr) Error(name string) *ErrorExpr {
+ for _, e := range r.Errors {
+ if e.Name == name {
+ return e
+ }
+ }
+ return nil
+}
+
+// HTTPService returns the service with the given name if any.
+func (r *RootExpr) HTTPService(name string) *HTTPServiceExpr {
+ for _, res := range r.API.HTTP.Services {
+ if res.Name() == name {
+ return res
+ }
+ }
+ return nil
+}
+
+// HTTPServiceFor creates a new or returns the existing service definition for the
+// given service.
+func (r *RootExpr) HTTPServiceFor(s *ServiceExpr) *HTTPServiceExpr {
+ if res := r.HTTPService(s.Name); res != nil {
+ return res
+ }
+ res := &HTTPServiceExpr{
+ ServiceExpr: s,
+ }
+ r.API.HTTP.Services = append(r.API.HTTP.Services, res)
+ return res
+}
+
+// EvalName is the name of the DSL.
+func (r *RootExpr) EvalName() string {
+ return "design"
+}
+
+// Validate makes sure the root expression is valid for code generation.
+func (r *RootExpr) Validate() error {
+ var verr eval.ValidationErrors
+ if r.API == nil {
+ verr.Add(r, "Missing API declaration")
+ }
+ return &verr
+}
+
+// Finalize finalizes the server expressions.
+func (r *RootExpr) Finalize() {
+ if r.API == nil {
+ r.API = &APIExpr{}
+ }
+ if len(r.API.Servers) == 0 {
+ r.API.Servers = []*ServerExpr{r.API.DefaultServer()}
+ }
+ for _, s := range r.API.Servers {
+ s.Finalize()
+ }
+}
+
+// Dup creates a new map from the given expression.
+func (m MetaExpr) Dup() MetaExpr {
+ d := make(MetaExpr, len(m))
+ for k, v := range m {
+ d[k] = v
+ }
+ return d
+}
+
+// Merge merges src meta expression with m. If meta has intersecting set of
+// keys on both m and src, then the values for those keys in src is appended
+// to the values of the keys in m if not already existing.
+func (m MetaExpr) Merge(src MetaExpr) {
+ for k, vals := range src {
+ if mvals, ok := m[k]; ok {
+ var found bool
+ for _, v := range vals {
+ found = false
+ for _, mv := range mvals {
+ if mv == v {
+ found = true
+ break
+ }
+ }
+ if !found {
+ mvals = append(mvals, v)
+ }
+ }
+ m[k] = mvals
+ } else {
+ m[k] = vals
+ }
+ }
+}
diff --git a/vendor/goa.design/goa/expr/security.go b/vendor/goa.design/goa/expr/security.go
new file mode 100644
index 000000000..73824a69c
--- /dev/null
+++ b/vendor/goa.design/goa/expr/security.go
@@ -0,0 +1,224 @@
+package expr
+
+import (
+ "fmt"
+ "net/url"
+
+ "goa.design/goa/eval"
+)
+
+// SchemeKind is a type of security scheme.
+type SchemeKind int
+
+const (
+ // OAuth2Kind identifies a "OAuth2" security scheme.
+ OAuth2Kind SchemeKind = iota + 1
+ // BasicAuthKind means "basic" security scheme.
+ BasicAuthKind
+ // APIKeyKind means "apiKey" security scheme.
+ APIKeyKind
+ // JWTKind means an "apiKey" security scheme, with support for
+ // TokenPath and Scopes.
+ JWTKind
+ // NoKind means to have no security for this endpoint.
+ NoKind
+)
+
+// FlowKind is a type of OAuth2 flow.
+type FlowKind int
+
+const (
+ // AuthorizationCodeFlowKind identifies a OAuth2 authorization code
+ // flow.
+ AuthorizationCodeFlowKind FlowKind = iota + 1
+ // ImplicitFlowKind identifiers a OAuth2 implicit flow.
+ ImplicitFlowKind
+ // PasswordFlowKind identifies a Resource Owner Password flow.
+ PasswordFlowKind
+ // ClientCredentialsFlowKind identifies a OAuth Client Credentials flow.
+ ClientCredentialsFlowKind
+)
+
+type (
+ // SecurityExpr defines a security requirement.
+ SecurityExpr struct {
+ // Schemes is the list of security schemes used for this
+ // requirement.
+ Schemes []*SchemeExpr
+ // Scopes list the required scopes if any.
+ Scopes []string
+ }
+
+ // SchemeExpr defines a security scheme used to authenticate against the
+ // method being designed.
+ SchemeExpr struct {
+ // Kind is the sort of security scheme this object represents.
+ Kind SchemeKind
+ // SchemeName is the name of the security scheme, e.g. "googAuth",
+ // "my_big_token", "jwt".
+ SchemeName string
+ // Description describes the security scheme e.g. "Google OAuth2"
+ Description string
+ // In determines the location of the API key, one of "header" or
+ // "query".
+ In string
+ // Name refers to a header or parameter name, based on In's
+ // value.
+ Name string
+ // Scopes lists the JWT or OAuth2 scopes.
+ Scopes []*ScopeExpr
+ // Flows determine the oauth2 flows supported by this scheme.
+ Flows []*FlowExpr
+ // Meta is a list of key/value pairs
+ Meta MetaExpr
+ }
+
+ // FlowExpr describes a specific OAuth2 flow.
+ FlowExpr struct {
+ // Kind is the kind of flow.
+ Kind FlowKind
+ // AuthorizationURL to be used for implicit or authorizationCode
+ // flows.
+ AuthorizationURL string
+ // TokenURL to be used for password, clientCredentials or
+ // authorizationCode flows.
+ TokenURL string
+ // RefreshURL to be used for obtaining refresh token.
+ RefreshURL string
+ }
+
+ // A ScopeExpr defines a scope name and description.
+ ScopeExpr struct {
+ // Name of the scope.
+ Name string
+ // Description is the description of the scope.
+ Description string
+ }
+)
+
+// EvalName returns the generic definition name used in error messages.
+func (s *SecurityExpr) EvalName() string {
+ var suffix string
+ if len(s.Schemes) > 0 && len(s.Schemes[0].SchemeName) > 0 {
+ suffix = "scheme " + s.Schemes[0].SchemeName
+ }
+ return "Security" + suffix
+}
+
+// DupRequirement creates a copy of the given security requirement.
+func DupRequirement(req *SecurityExpr) *SecurityExpr {
+ dup := &SecurityExpr{
+ Scopes: req.Scopes,
+ Schemes: make([]*SchemeExpr, 0, len(req.Schemes)),
+ }
+ for _, s := range req.Schemes {
+ dup.Schemes = append(dup.Schemes, DupScheme(s))
+ }
+ return dup
+}
+
+// DupScheme creates a copy of the given scheme expression.
+func DupScheme(sch *SchemeExpr) *SchemeExpr {
+ dup := SchemeExpr{
+ Kind: sch.Kind,
+ SchemeName: sch.SchemeName,
+ Description: sch.Description,
+ In: sch.In,
+ Scopes: sch.Scopes,
+ Flows: sch.Flows,
+ Meta: sch.Meta,
+ }
+ return &dup
+}
+
+// Type returns the type of the scheme.
+func (s *SchemeExpr) Type() string {
+ switch s.Kind {
+ case OAuth2Kind:
+ return "OAuth2"
+ case BasicAuthKind:
+ return "BasicAuth"
+ case APIKeyKind:
+ return "APIKey"
+ case JWTKind:
+ return "JWT"
+ default:
+ panic(fmt.Sprintf("unknown scheme kind: %#v", s.Kind)) // bug
+ }
+}
+
+// EvalName returns the generic definition name used in error messages.
+func (s *SchemeExpr) EvalName() string {
+ return s.Type() + "Security"
+}
+
+// Validate ensures that the method payload contains attributes required
+// by the scheme.
+func (s *SchemeExpr) Validate() *eval.ValidationErrors {
+ verr := new(eval.ValidationErrors)
+ for _, f := range s.Flows {
+ if err := f.Validate(); err != nil {
+ verr.Merge(err)
+ }
+ }
+ return verr
+}
+
+// EvalName returns the name of the expression used in error messages.
+func (f *FlowExpr) EvalName() string {
+ if f.TokenURL != "" {
+ return fmt.Sprintf("flow with token URL %q", f.TokenURL)
+ }
+ return fmt.Sprintf("flow with refresh URL %q", f.RefreshURL)
+}
+
+// Validate ensures that TokenURL and AuthorizationURL are valid URLs.
+func (f *FlowExpr) Validate() *eval.ValidationErrors {
+ verr := new(eval.ValidationErrors)
+ _, err := url.Parse(f.TokenURL)
+ if err != nil {
+ verr.Add(f, "invalid token URL %q: %s", f.TokenURL, err)
+ }
+ _, err = url.Parse(f.AuthorizationURL)
+ if err != nil {
+ verr.Add(f, "invalid authorization URL %q: %s", f.AuthorizationURL, err)
+ }
+ _, err = url.Parse(f.RefreshURL)
+ if err != nil {
+ verr.Add(f, "invalid refresh URL %q: %s", f.RefreshURL, err)
+ }
+ return verr
+}
+
+// Type returns the grant type of the OAuth2 grant.
+func (f *FlowExpr) Type() string {
+ switch f.Kind {
+ case AuthorizationCodeFlowKind:
+ return "authorization_code"
+ case ImplicitFlowKind:
+ return "implicit"
+ case PasswordFlowKind:
+ return "password"
+ case ClientCredentialsFlowKind:
+ return "client_credentials"
+ default:
+ panic(fmt.Sprintf("unknown flow kind: %#v", f.Kind)) // bug
+ }
+}
+
+func (k SchemeKind) String() string {
+ switch k {
+ case BasicAuthKind:
+ return "Basic"
+ case APIKeyKind:
+ return "APIKey"
+ case JWTKind:
+ return "JWT"
+ case OAuth2Kind:
+ return "OAuth2"
+ case NoKind:
+ return "None"
+ default:
+ panic("unknown kind") // bug
+ }
+}
diff --git a/vendor/goa.design/goa/expr/server.go b/vendor/goa.design/goa/expr/server.go
new file mode 100644
index 000000000..517ff81cf
--- /dev/null
+++ b/vendor/goa.design/goa/expr/server.go
@@ -0,0 +1,219 @@
+package expr
+
+import (
+ "fmt"
+ "net/url"
+ "regexp"
+ "sort"
+ "strings"
+
+ "goa.design/goa/eval"
+)
+
+// WildcardRegex is the regular expression used to capture path parameters.
+var WildcardRegex = regexp.MustCompile(`/{\*?([a-zA-Z0-9_]+)}`)
+
+type (
+ // ServerExpr contains a single API host information.
+ ServerExpr struct {
+ // Name of server
+ Name string
+ // Description of server
+ Description string
+ // Services list the services hosted by the server.
+ Services []string
+ // Hosts list the server hosts.
+ Hosts []*HostExpr
+ }
+
+ // HostExpr describes a server host.
+ HostExpr struct {
+ // Name of host
+ Name string
+ // Name of server that uses host.
+ ServerName string
+ // Description of host
+ Description string
+ // URIs to host if any, may contain parameter elements using
+ // the "{param}" syntax.
+ URIs []URIExpr
+ // Variables defines the URI variables if any.
+ Variables *AttributeExpr
+ }
+
+ // URIExpr represents a parameterized URI.
+ URIExpr string
+)
+
+// ExtractWildcards returns the names of the wildcards that appear in path.
+func ExtractWildcards(path string) []string {
+ matches := WildcardRegex.FindAllStringSubmatch(path, -1)
+ wcs := make([]string, len(matches))
+ for i, m := range matches {
+ wcs[i] = m[1]
+ }
+ return wcs
+}
+
+// EvalName is the qualified name of the expression.
+func (s *ServerExpr) EvalName() string { return "Server " + s.Name }
+
+// Validate validates the server and server hosts.
+func (s *ServerExpr) Validate() error {
+ verr := new(eval.ValidationErrors)
+ for _, h := range s.Hosts {
+ verr.Merge(h.Validate().(*eval.ValidationErrors))
+ }
+ for _, svc := range s.Services {
+ if Root.Service(svc) == nil {
+ verr.Add(s, "service %q undefined", svc)
+ }
+ }
+ return verr
+}
+
+// Finalize initializes the server services and/or host with default values if
+// not set explicitly in the design.
+func (s *ServerExpr) Finalize() {
+ if len(s.Services) == 0 {
+ s.Services = make([]string, len(Root.Services))
+ for i, svc := range Root.Services {
+ s.Services[i] = svc.Name
+ }
+ }
+ if len(s.Hosts) == 0 {
+ s.Hosts = []*HostExpr{{
+ Name: "svc",
+ Description: "Service host",
+ URIs: []URIExpr{"http://localhost:80", "grpc://localhost:8080"},
+ }}
+ }
+ for _, h := range s.Hosts {
+ h.Finalize()
+ }
+}
+
+// Schemes returns the list of transport schemes used by all the server
+// endpoints. The possible values for the elements of the returned slice are
+// "http", "https", "grpc" and "grpcs".
+func (s *ServerExpr) Schemes() []string {
+ schemes := make(map[string]struct{})
+ for _, h := range s.Hosts {
+ for _, sch := range h.Schemes() {
+ schemes[sch] = struct{}{}
+ }
+ }
+ ss := make([]string, len(schemes))
+ i := 0
+ for s := range schemes {
+ ss[i] = s
+ i++
+ }
+ sort.Strings(ss)
+ return ss
+}
+
+var validSchemes = map[string]struct{}{"http": {}, "https": {}, "grpc": {}, "grpcs": {}}
+
+// Validate validates the host.
+func (h *HostExpr) Validate() error {
+ verr := new(eval.ValidationErrors)
+ if len(h.URIs) == 0 {
+ verr.Add(h, "host must defined at least one URI")
+ }
+ for _, u := range h.URIs {
+ vu := WildcardRegex.ReplaceAllString(string(u), "/w")
+ pu, err := url.Parse(vu)
+ if err != nil {
+ verr.Add(h, "malformed URI %q", u)
+ continue
+ }
+ if pu.Scheme == "" {
+ verr.Add(h, "missing scheme for URI %q, scheme must be one of 'http', 'https', 'grpc' or 'grpcs'", u)
+ } else if _, ok := validSchemes[pu.Scheme]; !ok {
+ verr.Add(h, "invalid scheme for URI %q, scheme must be one of 'http', 'https', 'grpc' or 'grpcs'", u)
+ }
+ }
+ if h.Variables != nil {
+ for _, v := range *(h.Variables.Type.(*Object)) {
+ if !IsPrimitive(v.Attribute.Type) {
+ verr.Add(h, "invalid type for URI variable %q: type must be a primitive", v.Name)
+ }
+ if v.Attribute.Validation == nil {
+ if v.Attribute.DefaultValue == nil {
+ verr.Add(h, "URI variable %q must have a default value or an enum validation", v.Name)
+ }
+ } else if v.Attribute.DefaultValue == nil && len(v.Attribute.Validation.Values) == 0 {
+ verr.Add(h, "URI variable %q must have a default value or an enum validation", v.Name)
+ }
+ }
+ }
+ return verr
+}
+
+// Finalize makes sure Variables is set.
+func (h *HostExpr) Finalize() {
+ if h.Variables == nil {
+ h.Variables = &AttributeExpr{Type: &Object{}}
+ }
+}
+
+// EvalName returns the name returned in error messages.
+func (h *HostExpr) EvalName() string {
+ return fmt.Sprintf("host %q of server %q", h.Name, h.ServerName)
+}
+
+// Attribute returns the variables attribute. This implements the CompositeExpr
+// interface.
+func (h *HostExpr) Attribute() *AttributeExpr {
+ if h.Variables == nil {
+ h.Variables = &AttributeExpr{Type: &Object{}}
+ }
+ return h.Variables
+}
+
+// Schemes returns the list of transport schemes defined for the host. The
+// possible values for the elements of the returned slice are "http", "https",
+// "grpc" and "grpcs".
+func (h *HostExpr) Schemes() []string {
+ schemes := make(map[string]struct{})
+ for _, uri := range h.URIs {
+ ustr := string(uri)
+ // Did not use url package to find scheme because the url may
+ // contain params (i.e. http://{version}.example.com) which needs
+ // substition for url.Parse to succeed. Also URIs in host must have
+ // a scheme otherwise validations would have failed.
+ switch {
+ case strings.HasPrefix(ustr, "https"):
+ schemes["https"] = struct{}{}
+ case strings.HasPrefix(ustr, "http"):
+ schemes["http"] = struct{}{}
+ case strings.HasPrefix(ustr, "grpcs"):
+ schemes["grpcs"] = struct{}{}
+ case strings.HasPrefix(ustr, "grpc"):
+ schemes["grpc"] = struct{}{}
+ }
+ }
+ ss := make([]string, len(schemes))
+ i := 0
+ for s := range schemes {
+ ss[i] = s
+ i++
+ }
+ sort.Strings(ss)
+ return ss
+}
+
+// Params return the names of the parameters used in URI if any.
+func (u URIExpr) Params() []string {
+ r := regexp.MustCompile(`\{([^\{\}]+)\}`)
+ matches := r.FindAllStringSubmatch(string(u), -1)
+ if len(matches) == 0 {
+ return nil
+ }
+ wcs := make([]string, len(matches))
+ for i, m := range matches {
+ wcs[i] = m[1]
+ }
+ return wcs
+}
diff --git a/vendor/goa.design/goa/expr/service.go b/vendor/goa.design/goa/expr/service.go
new file mode 100644
index 000000000..d914d4ebe
--- /dev/null
+++ b/vendor/goa.design/goa/expr/service.go
@@ -0,0 +1,143 @@
+package expr
+
+import (
+ "fmt"
+
+ "goa.design/goa/eval"
+)
+
+type (
+ // ServiceExpr describes a set of related methods.
+ ServiceExpr struct {
+ // DSLFunc contains the DSL used to initialize the expression.
+ eval.DSLFunc
+ // Name of service.
+ Name string
+ // Description of service used in documentation.
+ Description string
+ // Docs points to external documentation
+ Docs *DocsExpr
+ // Methods is the list of service methods.
+ Methods []*MethodExpr
+ // Errors list the errors common to all the service methods.
+ Errors []*ErrorExpr
+ // Requirements contains the security requirements that apply to
+ // all the service methods. One requirement is composed of
+ // potentially multiple schemes. Incoming requests must validate
+ // at least one requirement to be authorized.
+ Requirements []*SecurityExpr
+ // Meta is a set of key/value pairs with semantic that is
+ // specific to each generator.
+ Meta MetaExpr
+ }
+
+ // ErrorExpr defines an error response. It consists of a named
+ // attribute.
+ ErrorExpr struct {
+ // AttributeExpr is the underlying attribute.
+ *AttributeExpr
+ // Name is the unique name of the error.
+ Name string
+ }
+)
+
+// Method returns the method expression with the given name, nil if there isn't
+// one.
+func (s *ServiceExpr) Method(n string) *MethodExpr {
+ for _, m := range s.Methods {
+ if m.Name == n {
+ return m
+ }
+ }
+ return nil
+}
+
+// EvalName returns the generic expression name used in error messages.
+func (s *ServiceExpr) EvalName() string {
+ if s.Name == "" {
+ return "unnamed service"
+ }
+ return fmt.Sprintf("service %#v", s.Name)
+}
+
+// Error returns the error with the given name if any.
+func (s *ServiceExpr) Error(name string) *ErrorExpr {
+ for _, erro := range s.Errors {
+ if erro.Name == name {
+ return erro
+ }
+ }
+ return Root.Error(name)
+}
+
+// Hash returns a unique hash value for s.
+func (s *ServiceExpr) Hash() string {
+ return "_service_+" + s.Name
+}
+
+// Validate validates the service methods and errors.
+func (s *ServiceExpr) Validate() error {
+ verr := new(eval.ValidationErrors)
+ for _, m := range s.Methods {
+ if err := m.Validate(); err != nil {
+ if verrs, ok := err.(*eval.ValidationErrors); ok {
+ verr.Merge(verrs)
+ }
+ }
+ }
+ for _, e := range s.Errors {
+ if err := e.Validate(); err != nil {
+ if verrs, ok := err.(*eval.ValidationErrors); ok {
+ verr.Merge(verrs)
+ }
+ }
+ }
+ return verr
+}
+
+// Finalize finalizes all the service methods and errors.
+func (s *ServiceExpr) Finalize() {
+ for _, e := range s.Errors {
+ e.Finalize()
+ }
+}
+
+// Validate checks that the error name is found in the result meta for
+// custom error types.
+func (e *ErrorExpr) Validate() error {
+ verr := new(eval.ValidationErrors)
+ rt, ok := e.AttributeExpr.Type.(*ResultTypeExpr)
+ if !ok {
+ return verr
+ }
+ if o := AsObject(rt); o != nil {
+ var errField string
+ for _, n := range *o {
+ if _, ok := n.Attribute.Meta["struct:error:name"]; ok {
+ if errField != "" {
+ verr.Add(e, "meta 'struct:error:name' already set for attribute %q of result type %q", errField, rt.Identifier)
+ continue
+ }
+ errField = n.Name
+ }
+ }
+ if errField == "" {
+ verr.Add(e, "meta 'struct:error:name' is missing in result type %q", rt.Identifier)
+ }
+ }
+ return verr
+}
+
+// Finalize makes sure the error type is a user type since it has to generate a
+// Go error.
+// Note: this may produce a user type with an attribute that is not an object!
+func (e *ErrorExpr) Finalize() {
+ att := e.AttributeExpr
+ if _, ok := att.Type.(UserType); !ok {
+ ut := &UserTypeExpr{
+ AttributeExpr: att,
+ TypeName: e.Name,
+ }
+ e.AttributeExpr = &AttributeExpr{Type: ut}
+ }
+}
diff --git a/vendor/goa.design/goa/expr/testing.go b/vendor/goa.design/goa/expr/testing.go
new file mode 100644
index 000000000..58b01f940
--- /dev/null
+++ b/vendor/goa.design/goa/expr/testing.go
@@ -0,0 +1,74 @@
+package expr
+
+import (
+ "testing"
+
+ "goa.design/goa/eval"
+)
+
+// RunHTTPDSL returns the http DSL root resulting from running the given DSL.
+func RunHTTPDSL(t *testing.T, dsl func()) *RootExpr {
+ setupDSLRun()
+
+ // run DSL (first pass)
+ if !eval.Execute(dsl, nil) {
+ t.Fatal(eval.Context.Error())
+ }
+
+ // run DSL (second pass)
+ if err := eval.RunDSL(); err != nil {
+ t.Fatal(err)
+ }
+
+ // return generated root
+ return Root
+}
+
+// RunInvalidHTTPDSL returns the error resulting from running the given DSL.
+func RunInvalidHTTPDSL(t *testing.T, dsl func()) error {
+ setupDSLRun()
+
+ // run DSL (first pass)
+ if !eval.Execute(dsl, nil) {
+ return eval.Context.Errors
+ }
+
+ // run DSL (second pass)
+ if err := eval.RunDSL(); err != nil {
+ return err
+ }
+
+ // expected an error - didn't get one
+ t.Fatal("expected a DSL evaluation error - got none")
+
+ return nil
+}
+
+// RunGRPCDSL returns the gRPC DSL root resulting from running the given DSL.
+func RunGRPCDSL(t *testing.T, dsl func()) *RootExpr {
+ setupDSLRun()
+
+ // run DSL (first pass)
+ if !eval.Execute(dsl, nil) {
+ t.Fatal(eval.Context.Error())
+ }
+
+ // run DSL (second pass)
+ if err := eval.RunDSL(); err != nil {
+ t.Fatal(err)
+ }
+
+ // return generated root
+ return Root
+}
+
+func setupDSLRun() {
+ // reset all roots and codegen data structures
+ eval.Reset()
+ Root = new(RootExpr)
+ Root.GeneratedTypes = &GeneratedRoot{}
+ eval.Register(Root)
+ eval.Register(Root.GeneratedTypes)
+ Root.API = NewAPIExpr("test api", func() {})
+ Root.API.Servers = []*ServerExpr{Root.API.DefaultServer()}
+}
diff --git a/vendor/goa.design/goa/expr/types.go b/vendor/goa.design/goa/expr/types.go
new file mode 100644
index 000000000..45ec2948f
--- /dev/null
+++ b/vendor/goa.design/goa/expr/types.go
@@ -0,0 +1,717 @@
+/*
+Package expr defines types which are used to describe the data structures used
+by both the request and response messages used by services.
+
+There are primitive types corresponding to scalar values (bool, string, integers
+and numbers), array types which represent a collection of items, map types which
+represent maps of key/value pairs and object types describing data structures
+with fields.
+
+The package also defines user types which can also be result types. A result
+type is a user type used to described response messages that defines views.
+*/
+package expr
+
+import (
+ "fmt"
+ "reflect"
+
+ "goa.design/goa/eval"
+)
+
+type (
+ // A Kind defines the conceptual type that a DataType represents.
+ Kind uint
+
+ // DataType is the common interface to all types.
+ DataType interface {
+ // Kind of data type, one of the Kind enum.
+ Kind() Kind
+ // Name returns the type name.
+ Name() string
+ // IsCompatible checks whether val has a Go type that is compatible with the data
+ // type.
+ IsCompatible(interface{}) bool
+ // Example generates a pseudo-random value using the given random generator.
+ Example(*Random) interface{}
+ // Hash returns a unique hash value for the instance of the type.
+ Hash() string
+ }
+
+ // Primitive is the type for null, boolean, integer, number, string, and time.
+ Primitive Kind
+
+ // Array is the type used to describe field arrays or repeated fields.
+ Array struct {
+ ElemType *AttributeExpr
+ }
+
+ // Map is the type used to describe maps of fields.
+ Map struct {
+ KeyType *AttributeExpr
+ ElemType *AttributeExpr
+ }
+
+ // NamedAttributeExpr describes object attributes together with their
+ // names.
+ NamedAttributeExpr struct {
+ // Name of attribute
+ Name string
+ // Attribute
+ Attribute *AttributeExpr
+ }
+
+ // Object is the type used to describe composite data structures.
+ // Note: not a map because order matters.
+ Object []*NamedAttributeExpr
+
+ // UserType is the interface implemented by all user type
+ // implementations. Plugins may leverage this interface to introduce
+ // their own types.
+ UserType interface {
+ DataType
+ // ID returns the identifier for the user type.
+ ID() string
+ // Rename changes the type name to the given value.
+ Rename(string)
+ // Attribute provides the underlying type and validations.
+ Attribute() *AttributeExpr
+ // SetAttribute updates the underlying attribute.
+ SetAttribute(*AttributeExpr)
+ // Dup makes a shallow copy of the type and assigns its
+ // attribute with att.
+ Dup(att *AttributeExpr) UserType
+ // EvalName returns the name reported by the DSL engine.
+ EvalName() string
+ // Validate checks that the user type expression is consistent.
+ Validate(ctx string, parent eval.Expression) *eval.ValidationErrors
+ // Finalize finalizes the underlying type.
+ Finalize()
+ }
+
+ // ArrayVal is the type used to set the default value for arrays.
+ ArrayVal []interface{}
+
+ // MapVal is the type used to set the default value for maps.
+ MapVal map[interface{}]interface{}
+)
+
+const (
+ // BooleanKind represents a boolean.
+ BooleanKind Kind = iota + 1
+ // IntKind represents a signed integer.
+ IntKind
+ // Int32Kind represents a signed 32-bit integer.
+ Int32Kind
+ // Int64Kind represents a signed 64-bit integer.
+ Int64Kind
+ // UIntKind represents an unsigned integer.
+ UIntKind
+ // UInt32Kind represents an unsigned 32-bit integer.
+ UInt32Kind
+ // UInt64Kind represents an unsigned 64-bit integer.
+ UInt64Kind
+ // Float32Kind represents a 32-bit floating number.
+ Float32Kind
+ // Float64Kind represents a 64-bit floating number.
+ Float64Kind
+ // StringKind represents a JSON string.
+ StringKind
+ // BytesKind represent a series of bytes (binary data).
+ BytesKind
+ // ArrayKind represents a JSON array.
+ ArrayKind
+ // ObjectKind represents a JSON object.
+ ObjectKind
+ // MapKind represents a JSON object where the keys are not known in
+ // advance.
+ MapKind
+ // UserTypeKind represents a user type.
+ UserTypeKind
+ // ResultTypeKind represents a result type.
+ ResultTypeKind
+ // AnyKind represents an unknown type.
+ AnyKind
+)
+
+const (
+ // Boolean is the type for a JSON boolean.
+ Boolean = Primitive(BooleanKind)
+
+ // Int is the type for a signed integer.
+ Int = Primitive(IntKind)
+
+ // Int32 is the type for a signed 32-bit integer.
+ Int32 = Primitive(Int32Kind)
+
+ // Int64 is the type for a signed 64-bit integer.
+ Int64 = Primitive(Int64Kind)
+
+ // UInt is the type for an unsigned integer.
+ UInt = Primitive(UIntKind)
+
+ // UInt32 is the type for an unsigned 32-bit integer.
+ UInt32 = Primitive(UInt32Kind)
+
+ // UInt64 is the type for an unsigned 64-bit integer.
+ UInt64 = Primitive(UInt64Kind)
+
+ // Float32 is the type for a 32-bit floating number.
+ Float32 = Primitive(Float32Kind)
+
+ // Float64 is the type for a 64-bit floating number.
+ Float64 = Primitive(Float64Kind)
+
+ // String is the type for a JSON string.
+ String = Primitive(StringKind)
+
+ // Bytes is the type for binary data.
+ Bytes = Primitive(BytesKind)
+
+ // Any is the type for an arbitrary JSON value (interface{} in Go).
+ Any = Primitive(AnyKind)
+)
+
+// Built-in composite types
+
+// Empty represents empty values.
+var Empty = &UserTypeExpr{
+ TypeName: "Empty",
+ AttributeExpr: &AttributeExpr{
+ Description: "Empty represents empty values",
+ Type: &Object{},
+ },
+}
+
+// Convenience methods
+
+// AsObject returns the type underlying object if any, nil otherwise.
+func AsObject(dt DataType) *Object {
+ switch t := dt.(type) {
+ case *UserTypeExpr:
+ return AsObject(t.Type)
+ case *ResultTypeExpr:
+ return AsObject(t.Type)
+ case *Object:
+ return t
+ default:
+ return nil
+ }
+}
+
+// AsArray returns the type underlying array if any, nil otherwise.
+func AsArray(dt DataType) *Array {
+ switch t := dt.(type) {
+ case *UserTypeExpr:
+ return AsArray(t.Type)
+ case *ResultTypeExpr:
+ return AsArray(t.Type)
+ case *Array:
+ return t
+ default:
+ return nil
+ }
+}
+
+// AsMap returns the type underlying map if any, nil otherwise.
+func AsMap(dt DataType) *Map {
+ switch t := dt.(type) {
+ case *UserTypeExpr:
+ return AsMap(t.Type)
+ case *ResultTypeExpr:
+ return AsMap(t.Type)
+ case *Map:
+ return t
+ default:
+ return nil
+ }
+}
+
+// IsObject returns true if the data type is an object.
+func IsObject(dt DataType) bool { return AsObject(dt) != nil }
+
+// IsArray returns true if the data type is an array.
+func IsArray(dt DataType) bool { return AsArray(dt) != nil }
+
+// IsMap returns true if the data type is a map.
+func IsMap(dt DataType) bool { return AsMap(dt) != nil }
+
+// IsPrimitive returns true if the data type is a primitive type.
+func IsPrimitive(dt DataType) bool {
+ switch t := dt.(type) {
+ case Primitive:
+ return true
+ case *UserTypeExpr:
+ return IsPrimitive(t.Type)
+ case *ResultTypeExpr:
+ return IsPrimitive(t.Type)
+ default:
+ return false
+ }
+}
+
+// Equal compares the types recursively and returns true if they are equal. Two
+// types are equal if:
+//
+// - both types have the same kind
+// - array types have elements whose types are equal
+// - map types have keys and elements whose types are equal
+// - objects have the same attribute names and the attribute types are equal
+//
+// Note: calling Equal is not equivalent to evaluation dt.Hash() == dt2.Hash()
+// as the former may return true for two user types with different names and
+// thus with different hash values.
+func Equal(dt, dt2 DataType) bool {
+ bs := *equal(dt, dt2)
+ for _, b := range bs {
+ if !*b {
+ return false
+ }
+ }
+ return true
+}
+
+// Support recursive types by doing lazy evaluation.
+func equal(dt, dt2 DataType, seen ...map[string]*[]*bool) *[]*bool {
+ f := false
+ fs := []*bool{&f}
+ if dt.Kind() != dt2.Kind() {
+ return &fs
+ }
+ var s map[string]*[]*bool
+ if len(seen) > 0 {
+ s = seen[0]
+ } else {
+ s = make(map[string]*[]*bool)
+ }
+ switch actual := dt.(type) {
+ case *Array:
+ return equal(actual.ElemType.Type, AsArray(dt2).ElemType.Type, s)
+ case *Map:
+ s1 := equal(actual.ElemType.Type, AsMap(dt2).ElemType.Type, s)
+ s2 := equal(actual.KeyType.Type, AsMap(dt2).KeyType.Type, s)
+ s3 := append(*s1, *s2...)
+ return &s3
+ case *Object:
+ if len(*actual) != len(*AsObject(dt2)) {
+ return &fs
+ }
+ var bs []*bool
+ for _, nat := range *actual {
+ obj := AsObject(dt2)
+ at := obj.Attribute(nat.Name)
+ if at == nil {
+ return &fs
+ }
+ bs = append(bs, *equal(nat.Attribute.Type, at.Type, s)...)
+ }
+ return &bs
+ case UserType:
+ key := actual.Name() + "=" + dt2.Name()
+ if v, ok := s[key]; ok {
+ return v
+ }
+ var res []*bool
+ pres := &res
+ s[key] = pres
+ if IsObject(actual) {
+ *pres = *equal(AsObject(dt), AsObject(dt2), s)
+ } else {
+ // User types can also be arrays (CollectionOf)
+ *pres = *equal(AsArray(dt), AsArray(dt2), s)
+ }
+ return pres
+ }
+
+ t := true
+ ts := []*bool{&t}
+ return &ts
+}
+
+// DataType implementation
+
+// Kind implements DataKind.
+func (p Primitive) Kind() Kind { return Kind(p) }
+
+// Name returns the type name appropriate for logging.
+func (p Primitive) Name() string {
+ switch p {
+ case Boolean:
+ return "boolean"
+ case Int:
+ return "int"
+ case Int32:
+ return "int32"
+ case Int64:
+ return "int64"
+ case UInt:
+ return "uint"
+ case UInt32:
+ return "uint32"
+ case UInt64:
+ return "uint64"
+ case Float32:
+ return "float32"
+ case Float64:
+ return "float64"
+ case String:
+ return "string"
+ case Bytes:
+ return "bytes"
+ case Any:
+ return "any"
+ default:
+ panic("unknown primitive type") // bug
+ }
+}
+
+// IsCompatible returns true if val is compatible with p.
+func (p Primitive) IsCompatible(val interface{}) bool {
+ if p == Any {
+ return true
+ }
+ switch val.(type) {
+ case bool:
+ return p == Boolean
+ case int, int8, int16, int32, uint, uint8, uint16, uint32:
+ return p == Int || p == Int32 || p == Int64 ||
+ p == UInt || p == UInt32 || p == UInt64 ||
+ p == Float32 || p == Float64
+ case int64, uint64:
+ return p == Int64 || p == UInt64 || p == Float32 || p == Float64
+ case float32, float64:
+ return p == Float32 || p == Float64
+ case string:
+ return p == String || p == Bytes
+ case []byte:
+ return p == Bytes
+ }
+ return false
+}
+
+// Example generates a pseudo-random primitive value using the given random
+// generator.
+func (p Primitive) Example(r *Random) interface{} {
+ switch p {
+ case Boolean:
+ return r.Bool()
+ case Int, UInt:
+ return r.Int()
+ case Int32, UInt32:
+ return r.Int32()
+ case Int64, UInt64:
+ return r.Int64()
+ case Float32:
+ return r.Float32()
+ case Float64:
+ return r.Float64()
+ case String, Any:
+ return r.String()
+ case Bytes:
+ return []byte(r.String())
+ default:
+ panic("unknown primitive type") // bug
+ }
+}
+
+// Hash returns a unique hash value for p.
+func (p Primitive) Hash() string {
+ return p.Name()
+}
+
+// Kind implements DataKind.
+func (a *Array) Kind() Kind { return ArrayKind }
+
+// Name returns the type name.
+func (a *Array) Name() string {
+ return "array"
+}
+
+// Hash returns a unique hash value for a.
+func (a *Array) Hash() string {
+ return "_array_+" + a.ElemType.Type.Hash()
+}
+
+// IsCompatible returns true if val is compatible with p.
+func (a *Array) IsCompatible(val interface{}) bool {
+ k := reflect.TypeOf(val).Kind()
+ if k != reflect.Array && k != reflect.Slice {
+ return false
+ }
+ v := reflect.ValueOf(val)
+ for i := 0; i < v.Len(); i++ {
+ compat := (a.ElemType.Type != nil) && a.ElemType.Type.IsCompatible(v.Index(i).Interface())
+ if !compat {
+ return false
+ }
+ }
+ return true
+}
+
+// Example generates a pseudo-random array value using the given random
+// generator.
+func (a *Array) Example(r *Random) interface{} {
+ count := r.Int()%3 + 2
+ res := make([]interface{}, count)
+ for i := 0; i < count; i++ {
+ res[i] = a.ElemType.Example(r)
+ }
+ return a.MakeSlice(res)
+}
+
+// MakeSlice examines the key type from the Array and create a slice with
+// builtin type if possible. The idea is to avoid generating []interface{} and
+// produce more precise types.
+func (a *Array) MakeSlice(s []interface{}) interface{} {
+ slice := reflect.MakeSlice(toReflectType(a), 0, len(s))
+ for _, item := range s {
+ slice = reflect.Append(slice, reflect.ValueOf(item))
+ }
+ return slice.Interface()
+}
+
+// ToSlice converts an ArrayVal into a slice.
+func (a ArrayVal) ToSlice() []interface{} {
+ arr := make([]interface{}, len(a))
+ for i, elem := range a {
+ switch actual := elem.(type) {
+ case ArrayVal:
+ arr[i] = actual.ToSlice()
+ case MapVal:
+ arr[i] = actual.ToMap()
+ default:
+ arr[i] = actual
+ }
+ }
+ return arr
+}
+
+// Attribute returns the attribute with the given name if any, nil otherwise.
+func (o *Object) Attribute(name string) *AttributeExpr {
+ for _, nat := range *o {
+ if nat.Name == name {
+ return nat.Attribute
+ }
+ }
+ return nil
+}
+
+// Set replaces the object named attribute n if any - creates a new object by
+// appending to the slice of named attributes otherwise. The resulting object is
+// returned in both cases.
+func (o *Object) Set(n string, att *AttributeExpr) {
+ for _, nat := range *o {
+ if nat.Name == n {
+ nat.Attribute = att
+ return
+ }
+ }
+ *o = append(*o, &NamedAttributeExpr{n, att})
+}
+
+// Delete creates a new object with the same named attributes as o but without
+// the named attribute n if any.
+func (o *Object) Delete(n string) {
+ index := -1
+ for i, nat := range *o {
+ if nat.Name == n {
+ index = i
+ break
+ }
+ }
+ if index == -1 {
+ return
+ }
+ *o = append((*o)[:index], (*o)[index+1:]...)
+}
+
+// Rename changes the name of the named attribute n to m. Rename does nothing if
+// o does not have an attribute named n.
+func (o *Object) Rename(n, m string) {
+ for _, nat := range *o {
+ if nat.Name == n {
+ nat.Name = m
+ return
+ }
+ }
+}
+
+// Kind implements DataKind.
+func (o *Object) Kind() Kind { return ObjectKind }
+
+// Name returns the type name.
+func (o *Object) Name() string { return "object" }
+
+// Hash returns a unique hash value for o.
+func (o *Object) Hash() string {
+ h := "_object_"
+ for _, nat := range *o {
+ h += "+" + nat.Name + "/" + nat.Attribute.Type.Hash()
+ }
+ return h
+}
+
+// Merge creates a new object consisting of the named attributes of o appended
+// with duplicates of the named attributes of other. Named attributes of o that
+// have an identical name to named attributes of other get overridden.
+func (o *Object) Merge(other *Object) *Object {
+ res := o
+ for _, nat := range *other {
+ res.Set(nat.Name, DupAtt(nat.Attribute))
+ }
+ return res
+}
+
+// IsCompatible returns true if o describes the (Go) type of val.
+func (o *Object) IsCompatible(val interface{}) bool {
+ k := reflect.TypeOf(val).Kind()
+ return k == reflect.Map || k == reflect.Struct
+}
+
+// Example returns a random value of the object.
+func (o *Object) Example(r *Random) interface{} {
+ res := make(map[string]interface{})
+ for _, nat := range *o {
+ if v := nat.Attribute.Example(r); v != nil {
+ res[nat.Name] = v
+ }
+ }
+ return res
+}
+
+// Kind implements DataKind.
+func (m *Map) Kind() Kind { return MapKind }
+
+// Name returns the type name.
+func (m *Map) Name() string { return "map" }
+
+// Hash returns a unique hash value for m.
+func (m *Map) Hash() string {
+ return "_map_+" + m.KeyType.Type.Hash() + ":" + m.ElemType.Type.Hash()
+}
+
+// IsCompatible returns true if o describes the (Go) type of val.
+func (m *Map) IsCompatible(val interface{}) bool {
+ k := reflect.TypeOf(val).Kind()
+ if k != reflect.Map {
+ return false
+ }
+ v := reflect.ValueOf(val)
+ for _, key := range v.MapKeys() {
+ keyCompat := m.KeyType.Type == nil || m.KeyType.Type.IsCompatible(key.Interface())
+ elemCompat := m.ElemType.Type == nil || m.ElemType.Type.IsCompatible(v.MapIndex(key).Interface())
+ if !keyCompat || !elemCompat {
+ return false
+ }
+ }
+ return true
+}
+
+// Example returns a random hash value.
+func (m *Map) Example(r *Random) interface{} {
+ if IsObject(m.KeyType.Type) || IsArray(m.KeyType.Type) || IsMap(m.KeyType.Type) {
+ // not much we can do for non hashable Go types
+ return nil
+ }
+ count := r.Int()%3 + 1
+ pair := map[interface{}]interface{}{}
+ for i := 0; i < count; i++ {
+ k := m.KeyType.Example(r)
+ v := m.ElemType.Example(r)
+ if k != nil && v != nil {
+ pair[k] = v
+ }
+ }
+ return m.MakeMap(pair)
+}
+
+// MakeMap examines the key type from a Map and create a map with builtin type
+// if possible. The idea is to avoid generating map[interface{}]interface{},
+// which cannot be handled by json.Marshal.
+func (m *Map) MakeMap(raw map[interface{}]interface{}) interface{} {
+ ma := reflect.MakeMap(toReflectType(m))
+ for key, value := range raw {
+ ma.SetMapIndex(reflect.ValueOf(key), reflect.ValueOf(value))
+ }
+ return ma.Interface()
+}
+
+// ToMap converts a MapVal to a map.
+func (m MapVal) ToMap() map[interface{}]interface{} {
+ mp := make(map[interface{}]interface{}, len(m))
+ for k, v := range m {
+ switch actual := v.(type) {
+ case ArrayVal:
+ mp[k] = actual.ToSlice()
+ case MapVal:
+ mp[k] = actual.ToMap()
+ default:
+ mp[k] = actual
+ }
+ }
+ return mp
+}
+
+// QualifiedTypeName returns the qualified type name for the given data type.
+// The qualified type name includes the name of the type of the elements of
+// array or map types. This is useful in reporting types in error messages,
+// examples of qualified type names:
+//
+// "array"
+// "map"
+// "map>"
+//
+func QualifiedTypeName(t DataType) string {
+ switch t.Kind() {
+ case ArrayKind:
+ a := t.(*Array)
+ return fmt.Sprintf("%s<%s>",
+ t.Name(),
+ QualifiedTypeName(a.ElemType.Type),
+ )
+ case MapKind:
+ h := t.(*Map)
+ return fmt.Sprintf("%s<%s, %s>",
+ t.Name(),
+ QualifiedTypeName(h.KeyType.Type),
+ QualifiedTypeName(h.ElemType.Type),
+ )
+ }
+ return t.Name()
+}
+
+// toReflectType converts the DataType to reflect.Type.
+func toReflectType(dtype DataType) reflect.Type {
+ switch dtype.Kind() {
+ case BooleanKind:
+ return reflect.TypeOf(true)
+ case Int32Kind:
+ return reflect.TypeOf(int32(0))
+ case Int64Kind:
+ return reflect.TypeOf(int64(0))
+ case Float32Kind:
+ return reflect.TypeOf(float32(0))
+ case Float64Kind:
+ return reflect.TypeOf(float64(0))
+ case StringKind:
+ return reflect.TypeOf("")
+ case BytesKind:
+ return reflect.TypeOf([]byte{})
+ case ObjectKind, UserTypeKind, ResultTypeKind:
+ return reflect.TypeOf(map[string]interface{}{})
+ case ArrayKind:
+ return reflect.SliceOf(toReflectType(dtype.(*Array).ElemType.Type))
+ case MapKind:
+ m := dtype.(*Map)
+ // avoid complication: not allow object as the map key
+ var ktype reflect.Type
+ if m.KeyType.Type.Kind() != ObjectKind {
+ ktype = toReflectType(m.KeyType.Type)
+ } else {
+ ktype = reflect.TypeOf([]interface{}{}).Elem()
+ }
+ return reflect.MapOf(ktype, toReflectType(m.ElemType.Type))
+ default:
+ return reflect.TypeOf([]interface{}{}).Elem()
+ }
+}
diff --git a/vendor/goa.design/goa/expr/user_type.go b/vendor/goa.design/goa/expr/user_type.go
new file mode 100644
index 000000000..f4797de9b
--- /dev/null
+++ b/vendor/goa.design/goa/expr/user_type.go
@@ -0,0 +1,100 @@
+package expr
+
+type (
+ // UserTypeExpr is the struct used to describe user defined types.
+ UserTypeExpr struct {
+ // A user type expression is a field expression.
+ *AttributeExpr
+ // Name of type
+ TypeName string
+ // Service this type is the default type for if any
+ Service *ServiceExpr
+ }
+)
+
+// NewUserTypeExpr creates a user type expression but does not execute the DSL.
+func NewUserTypeExpr(name string, fn func()) *UserTypeExpr {
+ return &UserTypeExpr{
+ TypeName: name,
+ AttributeExpr: &AttributeExpr{DSLFunc: fn},
+ }
+}
+
+// ID returns the type name of the user type.
+func (u *UserTypeExpr) ID() string {
+ return u.Name()
+}
+
+// Kind implements DataKind.
+func (u *UserTypeExpr) Kind() Kind { return UserTypeKind }
+
+// Name returns the type name.
+func (u *UserTypeExpr) Name() string {
+ if u.AttributeExpr == nil {
+ return u.TypeName
+ }
+ if n, ok := u.AttributeExpr.Meta["struct:type:name"]; ok {
+ return n[0]
+ }
+ return u.TypeName
+}
+
+// Rename changes the type name to the given value.
+func (u *UserTypeExpr) Rename(n string) { u.TypeName = n }
+
+// IsCompatible returns true if u describes the (Go) type of val.
+func (u *UserTypeExpr) IsCompatible(val interface{}) bool {
+ return u.Type == nil || u.Type.IsCompatible(val)
+}
+
+// Attribute returns the embedded attribute.
+func (u *UserTypeExpr) Attribute() *AttributeExpr {
+ return u.AttributeExpr
+}
+
+// SetAttribute sets the embedded attribute.
+func (u *UserTypeExpr) SetAttribute(att *AttributeExpr) {
+ u.AttributeExpr = att
+}
+
+// Dup creates a deep copy of the user type given a deep copy of its attribute.
+func (u *UserTypeExpr) Dup(att *AttributeExpr) UserType {
+ if u == Empty {
+ // Don't dup Empty so that code may check against it.
+ return u
+ }
+ return &UserTypeExpr{
+ AttributeExpr: att,
+ TypeName: u.TypeName,
+ Service: u.Service,
+ }
+}
+
+// Hash returns a unique hash value for u.
+func (u *UserTypeExpr) Hash() string {
+ return "_type_+" + u.TypeName
+}
+
+// Example produces an example for the user type which is JSON serialization
+// compatible.
+func (u *UserTypeExpr) Example(r *Random) interface{} {
+ if ex := u.recExample(r); ex != nil {
+ return *ex
+ }
+ return nil
+}
+
+func (u *UserTypeExpr) recExample(r *Random) *interface{} {
+ if ex, ok := r.Seen[u.ID()]; ok {
+ return ex
+ }
+ if r.Seen == nil {
+ r.Seen = make(map[string]*interface{})
+ }
+ var ex interface{}
+ pex := &ex
+ r.Seen[u.ID()] = pex
+ actual := u.Type.Example(r)
+ *pex = actual
+ return pex
+}
diff --git a/vendor/goa.design/goa/grpc/codegen/client.go b/vendor/goa.design/goa/grpc/codegen/client.go
new file mode 100644
index 000000000..24ff1b037
--- /dev/null
+++ b/vendor/goa.design/goa/grpc/codegen/client.go
@@ -0,0 +1,380 @@
+package codegen
+
+import (
+ "fmt"
+ "path/filepath"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/codegen/service"
+ "goa.design/goa/expr"
+)
+
+// ClientFiles returns all the client gRPC transport files.
+func ClientFiles(genpkg string, root *expr.RootExpr) []*codegen.File {
+ svcLen := len(root.API.GRPC.Services)
+ fw := make([]*codegen.File, 2*svcLen)
+ for i, svc := range root.API.GRPC.Services {
+ fw[i] = client(genpkg, svc)
+ }
+ for i, svc := range root.API.GRPC.Services {
+ fw[i+svcLen] = clientEncodeDecode(genpkg, svc)
+ }
+ return fw
+}
+
+// client returns the files defining the gRPC client.
+func client(genpkg string, svc *expr.GRPCServiceExpr) *codegen.File {
+ path := filepath.Join(codegen.Gendir, "grpc", codegen.SnakeCase(svc.Name()), "client", "client.go")
+ data := GRPCServices.Get(svc.Name())
+ title := fmt.Sprintf("%s GRPC client", svc.Name())
+ sections := []*codegen.SectionTemplate{
+ codegen.Header(title, "client", []*codegen.ImportSpec{
+ {Path: "context"},
+ {Path: "google.golang.org/grpc"},
+ {Path: "goa.design/goa", Name: "goa"},
+ {Path: "goa.design/goa/grpc", Name: "goagrpc"},
+ {Path: filepath.Join(genpkg, codegen.SnakeCase(svc.Name())), Name: data.Service.PkgName},
+ {Path: filepath.Join(genpkg, codegen.SnakeCase(svc.Name()), "views"), Name: data.Service.ViewsPkg},
+ {Path: filepath.Join(genpkg, "grpc", codegen.SnakeCase(svc.Name()), pbPkgName)},
+ }),
+ }
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-struct",
+ Source: clientStructT,
+ Data: data,
+ })
+ for _, e := range data.Endpoints {
+ if e.ClientStream != nil {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-stream-struct-type",
+ Source: streamStructTypeT,
+ Data: e.ClientStream,
+ })
+ }
+ }
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-init",
+ Source: clientInitT,
+ Data: data,
+ })
+ for _, e := range data.Endpoints {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-endpoint-init",
+ Source: clientEndpointInitT,
+ Data: e,
+ })
+ }
+ for _, e := range data.Endpoints {
+ if e.ClientStream != nil {
+ if e.ClientStream.RecvConvert != nil {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-stream-recv",
+ Source: streamRecvT,
+ Data: e.ClientStream,
+ })
+ }
+ if e.Method.StreamKind == expr.ClientStreamKind || e.Method.StreamKind == expr.BidirectionalStreamKind {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-stream-send",
+ Source: streamSendT,
+ Data: e.ClientStream,
+ })
+ }
+ if e.ServerStream.MustClose {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-stream-close",
+ Source: streamCloseT,
+ Data: e.ClientStream,
+ })
+ }
+ if e.Method.ViewedResult != nil && e.Method.ViewedResult.ViewName == "" {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-stream-set-view",
+ Source: streamSetViewT,
+ Data: e.ClientStream,
+ })
+ }
+ }
+ }
+ return &codegen.File{Path: path, SectionTemplates: sections}
+}
+
+func clientEncodeDecode(genpkg string, svc *expr.GRPCServiceExpr) *codegen.File {
+ var (
+ path string
+ sections []*codegen.SectionTemplate
+
+ data = GRPCServices.Get(svc.Name())
+ )
+ {
+ path = filepath.Join(codegen.Gendir, "grpc", codegen.SnakeCase(svc.Name()), "client", "encode_decode.go")
+ sections = []*codegen.SectionTemplate{
+ codegen.Header(svc.Name()+" gRPC client encoders and decoders", "client", []*codegen.ImportSpec{
+ {Path: "context"},
+ {Path: "strconv"},
+ {Path: "google.golang.org/grpc"},
+ {Path: "google.golang.org/grpc/metadata"},
+ {Path: "goa.design/goa", Name: "goa"},
+ {Path: "goa.design/goa/grpc", Name: "goagrpc"},
+ {Path: filepath.Join(genpkg, codegen.SnakeCase(svc.Name())), Name: data.Service.PkgName},
+ {Path: filepath.Join(genpkg, codegen.SnakeCase(svc.Name()), "views"), Name: data.Service.ViewsPkg},
+ {Path: filepath.Join(genpkg, "grpc", codegen.SnakeCase(svc.Name()), pbPkgName)},
+ }),
+ }
+ fm := transTmplFuncs(svc)
+ fm["metadataEncodeDecodeData"] = metadataEncodeDecodeData
+ fm["typeConversionData"] = typeConversionData
+ fm["isBearer"] = isBearer
+ for _, e := range data.Endpoints {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "remote-method-builder",
+ Source: remoteMethodBuilderT,
+ Data: e,
+ })
+ if e.PayloadRef != "" {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "request-encoder",
+ Source: requestEncoderT,
+ Data: e,
+ FuncMap: fm,
+ })
+ }
+ if e.ResultRef != "" {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "response-decoder",
+ Source: responseDecoderT,
+ Data: e,
+ FuncMap: fm,
+ })
+ }
+ }
+ }
+ return &codegen.File{Path: path, SectionTemplates: sections}
+}
+
+// isBearer returns true if the security scheme uses a Bearer scheme.
+func isBearer(schemes []*service.SchemeData) bool {
+ for _, s := range schemes {
+ if s.Name != "Authorization" {
+ continue
+ }
+ if s.Type == "JWT" || s.Type == "OAuth2" {
+ return true
+ }
+ }
+ return false
+}
+
+// input: ServiceData
+const clientStructT = `{{ printf "%s lists the service endpoint gRPC clients." .ClientStruct | comment }}
+type {{ .ClientStruct }} struct {
+ grpccli {{ .PkgName }}.{{ .ClientInterface }}
+ opts []grpc.CallOption
+}
+`
+
+// input: ServiceData
+const clientInitT = `{{ printf "New%s instantiates gRPC client for all the %s service servers." .ClientStruct .Service.Name | comment }}
+func New{{ .ClientStruct }}(cc *grpc.ClientConn, opts ...grpc.CallOption) *{{ .ClientStruct }} {
+ return &{{ .ClientStruct }}{
+ grpccli: {{ .ClientInterfaceInit }}(cc),
+ opts: opts,
+ }
+}
+`
+
+// input: EndpointData
+const clientEndpointInitT = `{{ printf "%s calls the %q function in %s.%s interface." .Method.VarName .Method.VarName .PkgName .ClientInterface | comment }}
+func (c *{{ .ClientStruct }}) {{ .Method.VarName }}() goa.Endpoint {
+ return func(ctx context.Context, v interface{}) (interface{}, error) {
+ inv := goagrpc.NewInvoker(
+ Build{{ .Method.VarName }}Func(c.grpccli, c.opts...),
+ {{ if .PayloadRef }}Encode{{ .Method.VarName }}Request{{ else }}nil{{ end }},
+ {{ if .ResultRef }}Decode{{ .Method.VarName }}Response{{ else }}nil{{ end }})
+ res, err := inv.Invoke(ctx, v)
+ if err != nil {
+ return nil, goagrpc.DecodeError(err)
+ }
+ return res, nil
+ }
+}
+`
+
+// input: EndpointData
+const remoteMethodBuilderT = `{{ printf "Build%sFunc builds the remote method to invoke for %q service %q endpoint." .Method.VarName .ServiceName .Method.Name | comment }}
+func Build{{ .Method.VarName }}Func(grpccli {{ .PkgName }}.{{ .ClientInterface }}, cliopts ...grpc.CallOption) goagrpc.RemoteFunc {
+ return func(ctx context.Context, reqpb interface{}, opts ...grpc.CallOption) (interface{}, error) {
+ for _, opt := range cliopts {
+ opts = append(opts, opt)
+ }
+ return grpccli.{{ .Method.VarName }}(ctx{{ if not .Method.StreamingPayload }}, reqpb.({{ .Request.ClientConvert.TgtRef }}){{ end }}, opts...)
+ }
+}
+`
+
+// input: EndpointData
+const responseDecoderT = `{{ printf "Decode%sResponse decodes responses from the %s %s endpoint." .Method.VarName .ServiceName .Method.Name | comment }}
+func Decode{{ .Method.VarName }}Response(ctx context.Context, v interface{}, hdr, trlr metadata.MD) (interface{}, error) {
+{{- if or .Response.Headers .Response.Trailers }}
+ var (
+ {{- range .Response.Headers }}
+ {{ .VarName }} {{ .TypeRef }}
+ {{- end }}
+ {{- range .Response.Trailers }}
+ {{ .VarName }} {{ .TypeRef }}
+ {{- end }}
+ err error
+ )
+ {
+ {{- range .Response.Headers }}
+ {{ template "metadata_decoder" (metadataEncodeDecodeData . "hdr") }}
+ {{- if .Validate }}
+ {{ .Validate }}
+ {{- end }}
+ {{- end }}
+ {{- range .Response.Trailers }}
+ {{ template "metadata_decoder" (metadataEncodeDecodeData . "trlr") }}
+ {{- if .Validate }}
+ {{ .Validate }}
+ {{- end }}
+ {{- end }}
+ }
+ if err != nil {
+ return nil, err
+ }
+{{- end }}
+{{- if .ViewedResultRef }}
+ var view string
+ {
+ if vals := hdr.Get("goa-view"); len(vals) > 0 {
+ view = vals[0]
+ }
+ }
+{{- end }}
+{{- if .ServerStream }}
+ return &{{ .ClientStream.VarName }}{
+ stream: v.({{ .ClientStream.Interface }}),
+ {{- if .ViewedResultRef }}
+ view: view,
+ {{- end }}
+ }, nil
+{{- else }}
+ message, ok := v.({{ .Response.ClientConvert.SrcRef }})
+ if !ok {
+ return nil, goagrpc.ErrInvalidType("{{ .ServiceName }}", "{{ .Method.Name }}", "{{ .Response.ClientConvert.SrcRef }}", v)
+ }
+ {{- if .Response.ClientConvert.Validation }}
+ err = {{ .Response.ClientConvert.Validation.Name }}(message)
+ {{- end }}
+ res := {{ .Response.ClientConvert.Init.Name }}({{ range .Response.ClientConvert.Init.Args }}{{ .Name }}, {{ end }})
+ {{- if .ViewedResultRef }}
+ vres := {{ if not .Method.ViewedResult.IsCollection }}&{{ end }}{{ .Method.ViewedResult.FullName }}{Projected: res}
+ vres.View = view
+ return {{ .ServicePkgName }}.{{ .Method.ViewedResult.ResultInit.Name }}({{ range .Method.ViewedResult.ResultInit.Args}}{{ .Name }}, {{ end }}), nil
+ {{- else }}
+ return res, nil
+ {{- end }}
+{{- end }}
+}
+
+{{- define "metadata_decoder" }}
+ {{- if or (eq .Metadata.Type.Name "string") (eq .Metadata.Type.Name "any") }}
+ {{- if .Metadata.Required }}
+ if vals := {{ .VarName }}.Get({{ printf "%q" .Metadata.Name }}); len(vals) == 0 {
+ err = goa.MergeErrors(err, goa.MissingFieldError({{ printf "%q" .Metadata.Name }}, "metadata"))
+ } else {
+ {{ .Metadata.VarName }} = vals[0]
+ }
+ {{- else }}
+ if vals := {{ .VarName }}.Get({{ printf "%q" .Metadata.Name }}); len(vals) > 0 {
+ {{ .Metadata.VarName }} = vals[0]
+ }
+ {{- end }}
+ {{- else if .Metadata.StringSlice }}
+ {{- if .Metadata.Required }}
+ if vals := {{ .VarName }}.Get({{ printf "%q" .Metadata.Name }}); len(vals) == 0 {
+ err = goa.MergeErrors(err, goa.MissingFieldError({{ printf "%q" .Metadata.Name }}, "metadata"))
+ } else {
+ {{ .Metadata.VarName }} = vals
+ }
+ {{- else }}
+ {{ .Metadata.VarName }} = {{ .VarName }}.Get({{ printf "%q" .Metadata.Name }})
+ {{- end }}
+ {{- else if .Metadata.Slice }}
+ {{- if .Metadata.Required }}
+ if {{ .Metadata.VarName }}Raw := {{ .VarName }}.Get({{ printf "%q" .Metadata.Name }}); len({{ .Metadata.VarName }}Raw) == 0 {
+ err = goa.MergeErrors(err, goa.MissingFieldError({{ printf "%q" .Metadata.Name }}, "metadata"))
+ } else {
+ {{- template "slice_conversion" .Metadata }}
+ }
+ {{- else }}
+ if {{ .Metadata.VarName }}Raw := {{ .VarName }}.Get({{ printf "%q" .Metadata.Name }}); len({{ .Metadata.VarName }}Raw) > 0 {
+ {{- template "slice_conversion" .Metadata }}
+ }
+ {{- end }}
+ {{- else }}
+ {{- if .Metadata.Required }}
+ if vals := {{ .VarName }}.Get({{ printf "%q" .Metadata.Name }}); len(vals) == 0 {
+ err = goa.MergeErrors(err, goa.MissingFieldError({{ printf "%q" .Metadata.Name }}, "metadata"))
+ } else {
+ {{ .Metadata.VarName }}Raw = vals[0]
+ {{ template "type_conversion" .Metadata }}
+ }
+ {{- else }}
+ if vals := {{ .VarName }}.Get({{ printf "%q" .Metadata.Name }}); len(vals) > 0 {
+ {{ .Metadata.VarName }}Raw = vals[0]
+ {{ template "type_conversion" .Metadata }}
+ }
+ {{- end }}
+ {{- end }}
+{{- end }}
+` + convertStringToTypeT
+
+// input: EndpointData
+const requestEncoderT = `{{ printf "Encode%sRequest encodes requests sent to %s %s endpoint." .Method.VarName .ServiceName .Method.Name | comment }}
+func Encode{{ .Method.VarName }}Request(ctx context.Context, v interface{}, md *metadata.MD) (interface{}, error) {
+ payload, ok := v.({{ .PayloadRef }})
+ if !ok {
+ return nil, goagrpc.ErrInvalidType("{{ .ServiceName }}", "{{ .Method.Name }}", "{{ .PayloadRef }}", v)
+ }
+{{- range .Request.Metadata }}
+ {{- if .StringSlice }}
+ for _, value := range payload{{ if .FieldName }}.{{ .FieldName }}{{ end }} {
+ (*md).Append({{ printf "%q" .Name }}, value)
+ }
+ {{- else if .Slice }}
+ for _, value := range payload{{ if .FieldName }}.{{ .FieldName }}{{ end }} {
+ {{ template "string_conversion" (typeConversionData .Type.ElemType.Type "valueStr" "value") }}
+ (*md).Append({{ printf "%q" .Name }}, valueStr)
+ }
+ {{- else }}
+ {{- if .Pointer }}
+ if payload{{ if .FieldName }}.{{ .FieldName }}{{ end }} != nil {
+ {{- end }}
+ {{- if (and (eq .Name "Authorization") (isBearer $.MetadataSchemes)) }}
+ if !strings.Contains({{ if .Pointer }}*{{ end }}payload{{ if .FieldName }}.{{ .FieldName }}{{ end }}, " ") {
+ (*md).Append(ctx, {{ printf "%q" .Name }}, "Bearer "+{{ if .Pointer }}*{{ end }}payload{{ if .FieldName }}.{{ .FieldName }}{{ end }})
+ } else {
+ {{- end }}
+ (*md).Append({{ printf "%q" .Name }},
+ {{- if eq .Type.Name "bytes" }} string(
+ {{- else if not (eq .Type.Name "string") }} fmt.Sprintf("%v",
+ {{- end }}
+ {{- if .Pointer }}*{{ end }}payload{{ if .FieldName }}.{{ .FieldName }}{{ end }}
+ {{- if or (eq .Type.Name "bytes") (not (eq .Type.Name "string")) }})
+ {{- end }})
+ {{- if (and (eq .Name "Authorization") (isBearer $.MetadataSchemes)) }}
+ }
+ {{- end }}
+ {{- if .Pointer }}
+ }
+ {{- end }}
+ {{- end }}
+{{- end }}
+{{- if .Request.ClientConvert }}
+ return {{ .Request.ClientConvert.Init.Name }}({{ range .Request.ClientConvert.Init.Args }}{{ .Name }}, {{ end }}), nil
+{{- else }}
+ return nil, nil
+{{- end }}
+}
+` + convertTypeToStringT
diff --git a/vendor/goa.design/goa/grpc/codegen/client_cli.go b/vendor/goa.design/goa/grpc/codegen/client_cli.go
new file mode 100644
index 000000000..033d32cf7
--- /dev/null
+++ b/vendor/goa.design/goa/grpc/codegen/client_cli.go
@@ -0,0 +1,800 @@
+package codegen
+
+import (
+ "encoding/json"
+ "fmt"
+ "path/filepath"
+ "reflect"
+ "strconv"
+ "strings"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+var cmds = map[string]*subcommandData{}
+
+type (
+ commandData struct {
+ // Name of command e.g. "cellar-storage"
+ Name string
+ // VarName is the name of the command variable e.g.
+ // "cellarStorage"
+ VarName string
+ // Description is the help text.
+ Description string
+ // Subcommands is the list of endpoint commands.
+ Subcommands []*subcommandData
+ // Example is a valid command invocation, starting with the
+ // command name.
+ Example string
+ // PkgName is the service gRPC client package import name,
+ // e.g. "storagec".
+ PkgName string
+ }
+
+ subcommandData struct {
+ // Name is the subcommand name e.g. "add"
+ Name string
+ // FullName is the subcommand full name e.g. "storageAdd"
+ FullName string
+ // Description is the help text.
+ Description string
+ // Flags is the list of flags supported by the subcommand.
+ Flags []*flagData
+ // MethodVarName is the endpoint method name, e.g. "Add"
+ MethodVarName string
+ // BuildFunction contains the data for the payload build
+ // function if any. Exclusive with Conversion.
+ BuildFunction *buildFunctionData
+ // Conversion contains the flag value to payload conversion
+ // function if any. Exclusive with BuildFunction.
+ Conversion string
+ // Example is a valid command invocation, starting with the
+ // command name.
+ Example string
+ }
+
+ flagData struct {
+ // Name is the name of the flag, e.g. "list-vintage"
+ Name string
+ // VarName is the name of the flag variable, e.g. "listVintage"
+ VarName string
+ // Type is the type of the flag, e.g. INT
+ Type string
+ // FullName is the flag full name e.g. "storageAddVintage"
+ FullName string
+ // Description is the flag help text.
+ Description string
+ // Required is true if the flag is required.
+ Required bool
+ // Example returns a JSON serialized example value.
+ Example string
+ }
+
+ buildFunctionData struct {
+ // Name is the build payload function name.
+ Name string
+ // ActualParams is the list of passed build function parameters.
+ ActualParams []string
+ // FormalParams is the list of build function formal parameter
+ // names.
+ FormalParams []string
+ // ServiceName is the name of the service.
+ ServiceName string
+ // MethodName is the name of the method.
+ MethodName string
+ // ResultType is the fully qualified result type name.
+ ResultType string
+ // Fields describes the payload fields.
+ Fields []*fieldData
+ // PayloadInit contains the data needed to render the function
+ // body.
+ PayloadInit *InitData
+ // CheckErr is true if the payload initialization code requires
+ // an "err error" variable that must be checked.
+ CheckErr bool
+ // Args contains the data needed to build payload.
+ Args []*InitArgData
+ }
+
+ fieldData struct {
+ // Name is the field name, e.g. "Vintage"
+ Name string
+ // VarName is the name of the local variable holding the field
+ // value, e.g. "vintage"
+ VarName string
+ // TypeName is the name of the type.
+ TypeName string
+ // Init is the code initializing the variable.
+ Init string
+ // Pointer is true if the variable needs to be declared as a
+ // pointer.
+ Pointer bool
+ }
+)
+
+// ClientCLIFiles returns the client gRPC CLI support file.
+func ClientCLIFiles(genpkg string, root *expr.RootExpr) []*codegen.File {
+ var (
+ data []*commandData
+ svcs []*expr.GRPCServiceExpr
+ )
+ for _, svc := range root.API.GRPC.Services {
+ sd := GRPCServices.Get(svc.Name())
+ if len(sd.Endpoints) > 0 {
+ data = append(data, buildCommandData(sd))
+ svcs = append(svcs, svc)
+ }
+ }
+ if len(svcs) == 0 {
+ return nil
+ }
+
+ var files []*codegen.File
+ for _, svr := range root.API.Servers {
+ files = append(files, endpointParser(genpkg, root, svr, data))
+ }
+ for i, svc := range svcs {
+ files = append(files, payloadBuilders(genpkg, svc, data[i]))
+ }
+ return files
+}
+
+// endpointParser returns the file that implements the command line parser that
+// builds the client endpoint and payload necessary to perform a request.
+func endpointParser(genpkg string, root *expr.RootExpr, svr *expr.ServerExpr, data []*commandData) *codegen.File {
+ pkg := codegen.SnakeCase(codegen.Goify(svr.Name, true))
+ path := filepath.Join(codegen.Gendir, "grpc", "cli", pkg, "cli.go")
+ title := fmt.Sprintf("%s gRPC client CLI support package", root.API.Name)
+ specs := []*codegen.ImportSpec{
+ {Path: "context"},
+ {Path: "flag"},
+ {Path: "fmt"},
+ {Path: "os"},
+ {Path: "goa.design/goa", Name: "goa"},
+ {Path: "goa.design/goa/grpc", Name: "goagrpc"},
+ {Path: "google.golang.org/grpc", Name: "grpc"},
+ }
+ for _, svc := range root.API.GRPC.Services {
+ sd := GRPCServices.Get(svc.Name())
+ specs = append(specs, &codegen.ImportSpec{
+ Path: filepath.Join(genpkg, "grpc", codegen.SnakeCase(sd.Service.Name), "client"),
+ Name: sd.Service.PkgName + "c",
+ })
+ specs = append(specs, &codegen.ImportSpec{
+ Path: filepath.Join(genpkg, "grpc", codegen.SnakeCase(sd.Service.Name), pbPkgName),
+ })
+ }
+ usages := make([]string, len(data))
+ var examples []string
+ for i, cmd := range data {
+ subs := make([]string, len(cmd.Subcommands))
+ for i, s := range cmd.Subcommands {
+ subs[i] = s.Name
+ }
+ var lp, rp string
+ if len(subs) > 1 {
+ lp = "("
+ rp = ")"
+ }
+ usages[i] = fmt.Sprintf("%s %s%s%s", cmd.Name, lp, strings.Join(subs, "|"), rp)
+ if i < 5 {
+ examples = append(examples, cmd.Example)
+ }
+ }
+
+ sections := []*codegen.SectionTemplate{
+ codegen.Header(title, "cli", specs),
+ {Source: usageT, Data: usages},
+ {Source: exampleT, Data: examples},
+ }
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "parse-endpoint",
+ Source: parseT,
+ Data: data,
+ })
+ for _, cmd := range data {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "cli-command-usage",
+ Source: commandUsageT,
+ Data: cmd,
+ FuncMap: map[string]interface{}{"printDescription": printDescription},
+ })
+ }
+
+ return &codegen.File{Path: path, SectionTemplates: sections}
+}
+
+func printDescription(desc string) string {
+ res := strings.Replace(desc, "`", "`+\"`\"+`", -1)
+ res = strings.Replace(res, "\n", "\n\t", -1)
+ return res
+}
+
+// payloadBuilders returns the file that contains the payload constructors that
+// use flag values as arguments.
+func payloadBuilders(genpkg string, svc *expr.GRPCServiceExpr, data *commandData) *codegen.File {
+ path := filepath.Join(codegen.Gendir, "grpc", codegen.SnakeCase(svc.Name()), "client", "cli.go")
+ title := fmt.Sprintf("%s gRPC client CLI support package", svc.Name())
+ sd := GRPCServices.Get(svc.Name())
+ specs := []*codegen.ImportSpec{
+ {Path: "encoding/json"},
+ {Path: "fmt"},
+ {Path: filepath.Join(genpkg, codegen.SnakeCase(svc.Name())), Name: sd.Service.PkgName},
+ {Path: filepath.Join(genpkg, "grpc", codegen.SnakeCase(svc.Name()), pbPkgName)},
+ }
+ sections := []*codegen.SectionTemplate{
+ codegen.Header(title, "client", specs),
+ }
+ for _, sub := range data.Subcommands {
+ if sub.BuildFunction != nil {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "cli-build-payload",
+ Source: buildPayloadT,
+ Data: sub.BuildFunction,
+ })
+ }
+ }
+
+ return &codegen.File{Path: path, SectionTemplates: sections}
+}
+
+// buildCommandData builds the data needed by the templates to render the CLI
+// parsing of the service command.
+func buildCommandData(svc *ServiceData) *commandData {
+ var (
+ name string
+ description string
+ subcommands []*subcommandData
+ example string
+ )
+ {
+ name = svc.Service.Name
+ description = svc.Service.Description
+ if description == "" {
+ description = fmt.Sprintf("Make requests to the %q service", name)
+ }
+ subcommands = make([]*subcommandData, len(svc.Endpoints))
+ for i, e := range svc.Endpoints {
+ subcommands[i] = buildSubcommandData(svc, e)
+ }
+ if len(subcommands) > 0 {
+ example = subcommands[0].Example
+ }
+ }
+ return &commandData{
+ Name: codegen.KebabCase(name),
+ VarName: codegen.Goify(name, false),
+ Description: description,
+ Subcommands: subcommands,
+ Example: example,
+ PkgName: svc.Service.PkgName + "c",
+ }
+}
+
+func buildSubcommandData(svc *ServiceData, e *EndpointData) *subcommandData {
+ var (
+ name string
+ fullName string
+ description string
+ flags []*flagData
+ buildFunction *buildFunctionData
+ conversion string
+ )
+ {
+ svcn := svc.Service.Name
+ en := e.Method.Name
+ name = codegen.KebabCase(en)
+ fullName = goify(svcn, en)
+ description = e.Method.Description
+ if description == "" {
+ description = fmt.Sprintf("Make request to the %q endpoint", e.Method.Name)
+ }
+ if e.Request != nil {
+ args := e.Request.CLIArgs
+ flags, buildFunction = makeFlags(e, args)
+ if buildFunction == nil && len(flags) > 0 {
+ // No build function, just convert the arg to the body type
+ var convPre, convSuff string
+ target := "data"
+ if flagType(e.Method.Payload) == "JSON" {
+ target = "val"
+ convPre = fmt.Sprintf("var val %s\n", e.Method.Payload)
+ convSuff = "\ndata = val"
+ }
+ conv, check := conversionCode(
+ "*"+flags[0].FullName+"Flag",
+ target,
+ e.Method.Payload,
+ false,
+ )
+ conversion = convPre + conv + convSuff
+ if check {
+ conversion = "var err error\n" + conversion
+ conversion += "\nif err != nil {\n"
+ if flagType(e.Method.Payload) == "JSON" {
+ conversion += fmt.Sprintf(`return nil, nil, fmt.Errorf("invalid JSON for %s, example of valid JSON:\n%%s", %q)`,
+ flags[0].FullName+"Flag", flags[0].Example)
+ } else {
+ conversion += fmt.Sprintf(`return nil, nil, fmt.Errorf("invalid value for %s, must be %s")`,
+ flags[0].FullName+"Flag", flags[0].Type)
+ }
+ conversion += "\n}"
+ }
+ }
+ }
+ }
+ sub := &subcommandData{
+ Name: name,
+ FullName: fullName,
+ Description: description,
+ Flags: flags,
+ MethodVarName: e.Method.VarName,
+ BuildFunction: buildFunction,
+ Conversion: conversion,
+ }
+ generateExample(sub, svc.Service.Name)
+ cmds[fullName] = sub
+
+ return sub
+}
+
+func generateExample(sub *subcommandData, svc string) {
+ ex := codegen.KebabCase(svc) + " " + codegen.KebabCase(sub.Name)
+ for _, f := range sub.Flags {
+ ex += " --" + f.Name + " " + f.Example
+ }
+ sub.Example = ex
+}
+
+func makeFlags(e *EndpointData, args []*InitArgData) ([]*flagData, *buildFunctionData) {
+ var (
+ fdata []*fieldData
+ flags = make([]*flagData, len(args))
+ params = make([]string, len(args))
+ check bool
+ pinit *InitData
+ )
+ for i, arg := range args {
+ f := argToFlag(e.ServiceName, e.Method.Name, arg)
+ flags[i] = f
+ params[i] = f.FullName
+ code, chek := fieldLoadCode(f.FullName, f.Type, arg)
+ check = check || chek
+ tn := arg.TypeRef
+ if f.Type == "JSON" {
+ // We need to declare the variable without
+ // a pointer to be able to unmarshal the JSON
+ // using its address.
+ tn = arg.TypeName
+ }
+ fdata = append(fdata, &fieldData{
+ Name: arg.Name,
+ VarName: arg.Name,
+ TypeName: tn,
+ Init: code,
+ Pointer: arg.Pointer,
+ })
+ }
+ if e.Request.ServerConvert != nil {
+ pinit = e.Request.ServerConvert.Init
+ }
+ return flags, &buildFunctionData{
+ Name: "Build" + e.Method.VarName + "Payload",
+ ActualParams: params,
+ FormalParams: params,
+ ServiceName: e.ServiceName,
+ MethodName: e.Method.Name,
+ ResultType: e.PayloadRef,
+ Fields: fdata,
+ PayloadInit: pinit,
+ CheckErr: check,
+ Args: args,
+ }
+}
+
+func jsonExample(v interface{}) string {
+ // In JSON, keys must be a string. But goa allows map keys to be anything.
+ r := reflect.ValueOf(v)
+ if r.Kind() == reflect.Map {
+ keys := r.MapKeys()
+ if keys[0].Kind() != reflect.String {
+ a := make(map[string]interface{}, len(keys))
+ var kstr string
+ for _, k := range keys {
+ switch t := k.Interface().(type) {
+ case bool:
+ kstr = strconv.FormatBool(t)
+ case int32:
+ kstr = strconv.FormatInt(int64(t), 10)
+ case int64:
+ kstr = strconv.FormatInt(t, 10)
+ case int:
+ kstr = strconv.Itoa(t)
+ case float32:
+ kstr = strconv.FormatFloat(float64(t), 'f', -1, 32)
+ case float64:
+ kstr = strconv.FormatFloat(t, 'f', -1, 64)
+ default:
+ kstr = k.String()
+ }
+ a[kstr] = r.MapIndex(k).Interface()
+ }
+ v = a
+ }
+ }
+ b, err := json.MarshalIndent(v, " ", " ")
+ ex := "?"
+ if err == nil {
+ ex = string(b)
+ }
+ if strings.Contains(ex, "\n") {
+ ex = "'" + strings.Replace(ex, "'", "\\'", -1) + "'"
+ }
+ return ex
+}
+
+func goify(terms ...string) string {
+ res := codegen.Goify(terms[0], false)
+ if len(terms) == 1 {
+ return res
+ }
+ for _, t := range terms[1:] {
+ res += codegen.Goify(t, true)
+ }
+ return res
+}
+
+// fieldLoadCode returns the code of the build payload function that initializes
+// one of the payload object fields. It returns the initialization code and a
+// boolean indicating whether the code requires an "err" variable.
+func fieldLoadCode(actual, fType string, arg *InitArgData) (string, bool) {
+ var (
+ code string
+ check bool
+ startIf string
+ endIf string
+ )
+ {
+ if !arg.Required {
+ startIf = fmt.Sprintf("if %s != \"\" {\n", actual)
+ endIf = "\n}"
+ }
+ if arg.TypeName == stringN {
+ ref := "&"
+ if arg.Required || arg.DefaultValue != nil {
+ ref = ""
+ }
+ code = arg.Name + " = " + ref + actual
+ } else {
+ ex := jsonExample(arg.Example)
+ code, check = conversionCode(actual, arg.Name, arg.TypeName, !arg.Required && arg.DefaultValue == nil)
+ if check {
+ code += "\nif err != nil {\n"
+ if flagType(arg.TypeName) == "JSON" {
+ code += fmt.Sprintf(`return nil, fmt.Errorf("invalid JSON for %s, example of valid JSON:\n%%s", %q)`,
+ arg.Name, ex)
+ } else {
+ code += fmt.Sprintf(`err = fmt.Errorf("invalid value for %s, must be %s")`,
+ arg.Name, fType)
+ }
+ code += "\n}"
+ }
+ if arg.Validate != "" {
+ code += "\n" + arg.Validate + "\n" + "if err != nil {\n\treturn nil, err\n}"
+ }
+ }
+ }
+ return fmt.Sprintf("%s%s%s", startIf, code, endIf), check
+}
+
+var (
+ boolN = codegen.GoNativeTypeName(expr.Boolean)
+ intN = codegen.GoNativeTypeName(expr.Int)
+ int32N = codegen.GoNativeTypeName(expr.Int32)
+ int64N = codegen.GoNativeTypeName(expr.Int64)
+ uintN = codegen.GoNativeTypeName(expr.UInt)
+ uint32N = codegen.GoNativeTypeName(expr.UInt32)
+ uint64N = codegen.GoNativeTypeName(expr.UInt64)
+ float32N = codegen.GoNativeTypeName(expr.Float32)
+ float64N = codegen.GoNativeTypeName(expr.Float64)
+ stringN = codegen.GoNativeTypeName(expr.String)
+ bytesN = codegen.GoNativeTypeName(expr.Bytes)
+)
+
+// conversionCode produces the code that converts the string stored in the
+// variable "from" to the value stored in the variable "to" of type typeName.
+func conversionCode(from, to, typeName string, pointer bool) (string, bool) {
+ var (
+ parse string
+ cast string
+ checkErr bool
+ )
+ target := to
+ needCast := typeName != stringN && typeName != bytesN && flagType(typeName) != "JSON"
+ decl := ""
+ if needCast && pointer {
+ target = "val"
+ decl = ":"
+ }
+ switch typeName {
+ case boolN:
+ parse = fmt.Sprintf("%s, err %s= strconv.ParseBool(%s)", target, decl, from)
+ checkErr = true
+ case intN:
+ parse = fmt.Sprintf("var v int64\nv, err = strconv.ParseInt(%s, 10, 64)", from)
+ cast = fmt.Sprintf("%s %s= int(v)", target, decl)
+ checkErr = true
+ case int32N:
+ parse = fmt.Sprintf("var v int64\nv, err = strconv.ParseInt(%s, 10, 32)", from)
+ cast = fmt.Sprintf("%s %s= int32(v)", target, decl)
+ checkErr = true
+ case int64N:
+ parse = fmt.Sprintf("%s, err %s= strconv.ParseInt(%s, 10, 64)", target, decl, from)
+ case uintN:
+ parse = fmt.Sprintf("var v uint64\nv, err = strconv.ParseUint(%s, 10, 64)", from)
+ cast = fmt.Sprintf("%s %s= uint(v)", target, decl)
+ checkErr = true
+ case uint32N:
+ parse = fmt.Sprintf("var v uint64\nv, err = strconv.ParseUint(%s, 10, 32)", from)
+ cast = fmt.Sprintf("%s %s= uint32(v)", target, decl)
+ checkErr = true
+ case uint64N:
+ parse = fmt.Sprintf("%s, err %s= strconv.ParseUint(%s, 10, 64)", target, decl, from)
+ checkErr = true
+ case float32N:
+ parse = fmt.Sprintf("var v float64\nv, err = strconv.ParseFloat(%s, 32)", from)
+ cast = fmt.Sprintf("%s %s= float32(v)", target, decl)
+ checkErr = true
+ case float64N:
+ parse = fmt.Sprintf("%s, err %s= strconv.ParseFloat(%s, 64)", target, decl, from)
+ checkErr = true
+ case stringN:
+ parse = fmt.Sprintf("%s %s= %s", target, decl, from)
+ case bytesN:
+ parse = fmt.Sprintf("%s %s= string(%s)", target, decl, from)
+ default:
+ parse = fmt.Sprintf("err = json.Unmarshal([]byte(%s), &%s)", from, target)
+ checkErr = true
+ }
+ if !needCast {
+ return parse, checkErr
+ }
+ if cast != "" {
+ parse = parse + "\n" + cast
+ }
+ if to != target {
+ ref := ""
+ if pointer {
+ ref = "&"
+ }
+ parse = parse + fmt.Sprintf("\n%s = %s%s", to, ref, target)
+ }
+ return parse, checkErr
+}
+
+func flagType(tname string) string {
+ switch tname {
+ case boolN, intN, int32N, int64N, uintN, uint32N, uint64N, float32N, float64N, stringN:
+ return strings.ToUpper(tname)
+ case bytesN:
+ return "STRING"
+ default: // Any, Array, Map, Object, User
+ return "JSON"
+ }
+}
+
+func argToFlag(svcn, en string, arg *InitArgData) *flagData {
+ ex := jsonExample(arg.Example)
+ fn := goify(svcn, en, arg.Name)
+ return &flagData{
+ Name: codegen.KebabCase(arg.Name),
+ VarName: codegen.Goify(arg.Name, false),
+ Type: flagType(arg.TypeName),
+ FullName: fn,
+ Description: arg.Description,
+ Required: arg.Required,
+ Example: ex,
+ }
+}
+
+// input: []string
+const usageT = `// UsageCommands returns the set of commands and sub-commands using the format
+//
+// command (subcommand1|subcommand2|...)
+//
+func UsageCommands() string {
+ return ` + "`" + `{{ range . }}{{ . }}
+{{ end }}` + "`" + `
+}
+`
+
+// input: []string
+const exampleT = `// UsageExamples produces an example of a valid invocation of the CLI tool.
+func UsageExamples() string {
+ return {{ range . }}os.Args[0] + ` + "`" + ` {{ . }}` + "`" + ` + "\n" +
+ {{ end }}""
+}
+`
+
+// input: []commandData
+const parseT = `// ParseEndpoint returns the endpoint and payload as specified on the command
+// line.
+func ParseEndpoint(cc *grpc.ClientConn, opts ...grpc.CallOption) (goa.Endpoint, interface{}, error) {
+ var (
+ {{- range . }}
+ {{ .VarName }}Flags = flag.NewFlagSet("{{ .Name }}", flag.ContinueOnError)
+ {{ range .Subcommands }}
+ {{ .FullName }}Flags = flag.NewFlagSet("{{ .Name }}", flag.ExitOnError)
+ {{- $sub := . }}
+ {{- range .Flags }}
+ {{ .FullName }}Flag = {{ $sub.FullName }}Flags.String("{{ .Name }}", "{{ if .Required }}REQUIRED{{ end }}", {{ printf "%q" .Description }})
+ {{- end }}
+ {{ end }}
+ {{- end }}
+ )
+ {{ range . -}}
+ {{ $cmd := . -}}
+ {{ .VarName }}Flags.Usage = {{ .VarName }}Usage
+ {{ range .Subcommands -}}
+ {{ .FullName }}Flags.Usage = {{ .FullName }}Usage
+ {{ end }}
+ {{ end }}
+ if err := flag.CommandLine.Parse(os.Args[1:]); err != nil {
+ return nil, nil, err
+ }
+
+ if len(os.Args) < flag.NFlag()+3 {
+ return nil, nil, fmt.Errorf("not enough arguments")
+ }
+
+ var (
+ svcn string
+ svcf *flag.FlagSet
+ )
+ {
+ svcn = os.Args[1+flag.NFlag()]
+ switch svcn {
+ {{- range . }}
+ case "{{ .Name }}":
+ svcf = {{ .VarName }}Flags
+ {{- end }}
+ default:
+ return nil, nil, fmt.Errorf("unknown service %q", svcn)
+ }
+ }
+ if err := svcf.Parse(os.Args[2+flag.NFlag():]); err != nil {
+ return nil, nil, err
+ }
+
+ var (
+ epn string
+ epf *flag.FlagSet
+ )
+ {
+ epn = os.Args[2+flag.NFlag()+svcf.NFlag()]
+ switch svcn {
+ {{- range . }}
+ case "{{ .Name }}":
+ switch epn {
+ {{- range .Subcommands }}
+ case "{{ .Name }}":
+ epf = {{ .FullName }}Flags
+ {{ end }}
+ }
+ {{ end }}
+ }
+ }
+ if epf == nil {
+ return nil, nil, fmt.Errorf("unknown %q endpoint %q", svcn, epn)
+ }
+
+ // Parse endpoint flags if any
+ if len(os.Args) > 2+flag.NFlag()+svcf.NFlag() {
+ if err := epf.Parse(os.Args[3+flag.NFlag()+svcf.NFlag():]); err != nil {
+ return nil, nil, err
+ }
+ }
+
+ var (
+ data interface{}
+ endpoint goa.Endpoint
+ err error
+ )
+ {
+ switch svcn {
+ {{- range . }}
+ case "{{ .Name }}":
+ c := {{ .PkgName }}.NewClient(cc, opts...)
+ switch epn {
+ {{- $pkgName := .PkgName }}{{ range .Subcommands }}
+ case "{{ .Name }}":
+ endpoint = c.{{ .MethodVarName }}()
+ {{- if .BuildFunction }}
+ data, err = {{ $pkgName}}.{{ .BuildFunction.Name }}({{ range .BuildFunction.ActualParams }}*{{ . }}Flag, {{ end }})
+ {{- else if .Conversion }}
+ {{ .Conversion }}
+ {{- else }}
+ data = nil
+ {{- end }}
+ {{- end }}
+ }
+ {{- end }}
+ }
+ }
+ if err != nil {
+ return nil, nil, err
+ }
+
+ return endpoint, data, nil
+}
+`
+
+// input: buildFunctionData
+const buildPayloadT = `{{ printf "%s builds the payload for the %s %s endpoint from CLI flags." .Name .ServiceName .MethodName | comment }}
+func {{ .Name }}({{ range .FormalParams }}{{ . }} string, {{ end }}) ({{ .ResultType }}, error) {
+{{- if .CheckErr }}
+ var err error
+{{- end }}
+{{- range .Fields }}
+ {{- if .VarName }}
+ var {{ .VarName }} {{ if .Pointer }}*{{ end }}{{ .TypeName }}
+ {
+ {{ .Init }}
+ }
+ {{- end }}
+{{- end }}
+{{- if .CheckErr }}
+ if err != nil {
+ return nil, err
+ }
+{{- end }}
+{{- if .PayloadInit }}
+ {{- with .PayloadInit }}
+ {{ .Code }}
+ {{- if .ReturnIsStruct }}
+ {{- range .Args }}
+ {{- if .FieldName }}
+ payload.{{ .FieldName }} = {{ .Name }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ return payload, nil
+ {{- end }}
+{{- end }}
+}
+`
+
+// input: commandData
+const commandUsageT = `{{ printf "%sUsage displays the usage of the %s command and its subcommands." .Name .Name | comment }}
+func {{ .VarName }}Usage() {
+ fmt.Fprintf(os.Stderr, ` + "`" + `{{ printDescription .Description }}
+Usage:
+ %s [globalflags] {{ .Name }} COMMAND [flags]
+
+COMMAND:
+ {{- range .Subcommands }}
+ {{ .Name }}: {{ printDescription .Description }}
+ {{- end }}
+
+Additional help:
+ %s {{ .Name }} COMMAND --help
+` + "`" + `, os.Args[0], os.Args[0])
+}
+
+{{- range .Subcommands }}
+func {{ .FullName }}Usage() {
+ fmt.Fprintf(os.Stderr, ` + "`" + `%s [flags] {{ $.Name }} {{ .Name }}{{range .Flags }} -{{ .Name }} {{ .Type }}{{ end }}
+
+{{ printDescription .Description}}
+ {{- range .Flags }}
+ -{{ .Name }} {{ .Type }}: {{ .Description }}
+ {{- end }}
+
+Example:
+ ` + "`+os.Args[0]+" + "`" + ` {{ .Example }}
+` + "`" + `, os.Args[0])
+}
+{{ end }}
+`
diff --git a/vendor/goa.design/goa/grpc/codegen/client_types.go b/vendor/goa.design/goa/grpc/codegen/client_types.go
new file mode 100644
index 000000000..269064a2c
--- /dev/null
+++ b/vendor/goa.design/goa/grpc/codegen/client_types.go
@@ -0,0 +1,98 @@
+package codegen
+
+import (
+ "path/filepath"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+// ClientTypeFiles returns the gRPC transport type files.
+func ClientTypeFiles(genpkg string, root *expr.RootExpr) []*codegen.File {
+ fw := make([]*codegen.File, len(root.API.GRPC.Services))
+ seen := make(map[string]struct{})
+ for i, r := range root.API.GRPC.Services {
+ fw[i] = clientType(genpkg, r, seen)
+ }
+ return fw
+}
+
+// clientType returns the file containing the constructor functions to
+// transform the service payload types to the corresponding gRPC request types
+// and gRPC response types to the corresponding service result types.
+//
+// seen keeps track of the constructor names that have already been generated
+// to prevent duplicate code generation.
+func clientType(genpkg string, svc *expr.GRPCServiceExpr, seen map[string]struct{}) *codegen.File {
+ var (
+ path string
+ initData []*InitData
+ validated []*ValidationData
+
+ sd = GRPCServices.Get(svc.Name())
+ )
+ {
+ collect := func(c *ConvertData) {
+ if c.Init != nil {
+ initData = append(initData, c.Init)
+ }
+ }
+
+ path = filepath.Join(codegen.Gendir, "grpc", codegen.SnakeCase(svc.Name()), "client", "types.go")
+ for _, a := range svc.GRPCEndpoints {
+ ed := sd.Endpoint(a.Name())
+ if c := ed.Request.ClientConvert; c != nil {
+ collect(c)
+ }
+ if c := ed.Response.ClientConvert; c != nil {
+ collect(c)
+ }
+ if ed.ClientStream != nil {
+ if c := ed.ClientStream.RecvConvert; c != nil {
+ collect(c)
+ }
+ if c := ed.ClientStream.SendConvert; c != nil {
+ collect(c)
+ }
+ }
+ }
+
+ for _, v := range sd.Validations {
+ validated = append(validated, v)
+ }
+ }
+
+ header := codegen.Header(svc.Name()+" gRPC client types", "client",
+ []*codegen.ImportSpec{
+ {Path: "unicode/utf8"},
+ {Path: "goa.design/goa", Name: "goa"},
+ {Path: filepath.Join(genpkg, codegen.SnakeCase(svc.Name())), Name: sd.Service.PkgName},
+ {Path: filepath.Join(genpkg, codegen.SnakeCase(svc.Name()), "views"), Name: sd.Service.ViewsPkg},
+ {Path: filepath.Join(genpkg, "grpc", codegen.SnakeCase(svc.Name()), pbPkgName)},
+ },
+ )
+ sections := []*codegen.SectionTemplate{header}
+ for _, init := range initData {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-type-init",
+ Source: typeInitT,
+ Data: init,
+ })
+ }
+ for _, data := range validated {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-validate",
+ Source: validateT,
+ Data: data,
+ })
+ }
+ for _, h := range sd.TransformHelpers {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-transform-helper",
+ Source: transformHelperT,
+ Data: h,
+ })
+ }
+
+ return &codegen.File{Path: path, SectionTemplates: sections}
+}
diff --git a/vendor/goa.design/goa/grpc/codegen/example_cli.go b/vendor/goa.design/goa/grpc/codegen/example_cli.go
new file mode 100644
index 000000000..fb3ed14b2
--- /dev/null
+++ b/vendor/goa.design/goa/grpc/codegen/example_cli.go
@@ -0,0 +1,94 @@
+package codegen
+
+import (
+ "os"
+ "path/filepath"
+ "strings"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+// ExampleCLIFiles returns an example gRPC client tool implementation.
+func ExampleCLIFiles(genpkg string, root *expr.RootExpr) []*codegen.File {
+ var files []*codegen.File
+ for _, svr := range root.API.Servers {
+ if f := exampleCLI(genpkg, root, svr); f != nil {
+ files = append(files, f)
+ }
+ }
+ return files
+}
+
+// exampleCLI returns an example client tool HTTP implementation for the given
+// server expression.
+func exampleCLI(genpkg string, root *expr.RootExpr, svr *expr.ServerExpr) *codegen.File {
+ var (
+ mainPath string
+ apiPkg string
+ pkg string
+ )
+ {
+ apiPkg = strings.ToLower(codegen.Goify(root.API.Name, false))
+ pkg = codegen.SnakeCase(codegen.Goify(svr.Name, true))
+ mainPath = filepath.Join("cmd", pkg+"-cli", "grpc.go")
+ if _, err := os.Stat(mainPath); !os.IsNotExist(err) {
+ return nil // file already exists, skip it.
+ }
+ }
+
+ var (
+ specs []*codegen.ImportSpec
+ )
+ {
+ idx := strings.LastIndex(genpkg, string(os.PathSeparator))
+ rootPath := "."
+ if idx > 0 {
+ rootPath = genpkg[:idx]
+ }
+ specs = []*codegen.ImportSpec{
+ {Path: "context"},
+ {Path: "encoding/json"},
+ {Path: "flag"},
+ {Path: "fmt"},
+ {Path: "google.golang.org/grpc"},
+ {Path: "os"},
+ {Path: "time"},
+ {Path: "goa.design/goa"},
+ {Path: "goa.design/goa/grpc", Name: "goagrpc"},
+ {Path: rootPath, Name: apiPkg},
+ {Path: filepath.Join(genpkg, "grpc", "cli", pkg), Name: "cli"},
+ }
+ }
+
+ var (
+ sections []*codegen.SectionTemplate
+ )
+ {
+ sections = []*codegen.SectionTemplate{
+ codegen.Header("", "main", specs),
+ &codegen.SectionTemplate{Name: "do-grpc-cli", Source: grpcCLIDoT},
+ }
+ }
+
+ return &codegen.File{Path: mainPath, SectionTemplates: sections, SkipExist: true}
+}
+
+const (
+ grpcCLIDoT = `func doGRPC(scheme, host string, timeout int, debug bool) (goa.Endpoint, interface{}, error) {
+ conn, err := grpc.Dial(host, grpc.WithInsecure())
+ if err != nil {
+ fmt.Fprintln(os.Stderr, fmt.Sprintf("could not connect to gRPC server at %s: %v", host, err))
+ }
+ return cli.ParseEndpoint(conn)
+}
+
+func grpcUsageCommands() string {
+ return cli.UsageCommands()
+}
+
+func grpcUsageExamples() string {
+ return cli.UsageExamples()
+}
+`
+)
diff --git a/vendor/goa.design/goa/grpc/codegen/example_server.go b/vendor/goa.design/goa/grpc/codegen/example_server.go
new file mode 100644
index 000000000..489425242
--- /dev/null
+++ b/vendor/goa.design/goa/grpc/codegen/example_server.go
@@ -0,0 +1,203 @@
+package codegen
+
+import (
+ "os"
+ "path/filepath"
+ "strings"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+// ExampleServerFiles returns and example main and dummy service
+// implementations.
+func ExampleServerFiles(genpkg string, root *expr.RootExpr) []*codegen.File {
+ var fw []*codegen.File
+ for _, svr := range root.API.Servers {
+ if m := exampleServer(genpkg, root, svr); m != nil {
+ fw = append(fw, m)
+ }
+ }
+ return fw
+}
+
+// exampleServer returns an example gRPC server implementation.
+func exampleServer(genpkg string, root *expr.RootExpr, svr *expr.ServerExpr) *codegen.File {
+ var (
+ mainPath string
+ apiPkg string
+ )
+ {
+ apiPkg = strings.ToLower(codegen.Goify(root.API.Name, false))
+ pkg := codegen.SnakeCase(codegen.Goify(svr.Name, true))
+ mainPath = filepath.Join("cmd", pkg, "grpc.go")
+ if _, err := os.Stat(mainPath); !os.IsNotExist(err) {
+ return nil // file already exists, skip it.
+ }
+ }
+
+ var (
+ specs []*codegen.ImportSpec
+ )
+ {
+ idx := strings.LastIndex(genpkg, string(os.PathSeparator))
+ rootPath := "."
+ if idx > 0 {
+ rootPath = genpkg[:idx]
+ }
+ specs = []*codegen.ImportSpec{
+ {Path: "context"},
+ {Path: "log"},
+ {Path: "net"},
+ {Path: "net/url"},
+ {Path: "os"},
+ {Path: "sync"},
+ {Path: "goa.design/goa/grpc/middleware"},
+ {Path: "google.golang.org/grpc"},
+ {Path: "github.com/grpc-ecosystem/go-grpc-middleware", Name: "grpcmiddleware"},
+ {Path: "goa.design/goa/grpc", Name: "goagrpc"},
+ {Path: rootPath, Name: apiPkg},
+ }
+ for _, svc := range root.API.GRPC.Services {
+ pkgName := GRPCServices.Get(svc.Name()).Service.PkgName
+ specs = append(specs, &codegen.ImportSpec{
+ Path: filepath.Join(genpkg, "grpc", codegen.SnakeCase(svc.Name()), "server"),
+ Name: pkgName + "svr",
+ })
+ specs = append(specs, &codegen.ImportSpec{
+ Path: filepath.Join(genpkg, codegen.SnakeCase(svc.Name())),
+ Name: pkgName,
+ })
+ specs = append(specs, &codegen.ImportSpec{
+ Path: filepath.Join(genpkg, "grpc", codegen.SnakeCase(svc.Name()), pbPkgName),
+ })
+ }
+ }
+
+ var (
+ sections []*codegen.SectionTemplate
+ )
+ {
+ svcdata := make([]*ServiceData, len(svr.Services))
+ for i, svc := range svr.Services {
+ svcdata[i] = GRPCServices.Get(svc)
+ }
+ sections = []*codegen.SectionTemplate{
+ codegen.Header("", "main", specs),
+ &codegen.SectionTemplate{
+ Name: "server-grpc-start",
+ Source: grpcSvrStartT,
+ Data: map[string]interface{}{
+ "Services": svcdata,
+ },
+ },
+ &codegen.SectionTemplate{Name: "server-grpc-logger", Source: grpcSvrLoggerT},
+ &codegen.SectionTemplate{
+ Name: "server-grpc-init",
+ Source: grpcSvrInitT,
+ Data: map[string]interface{}{
+ "Services": svcdata,
+ },
+ },
+ &codegen.SectionTemplate{
+ Name: "server-grpc-register",
+ Source: grpcRegisterSvrT,
+ Data: map[string]interface{}{
+ "Services": svcdata,
+ },
+ FuncMap: map[string]interface{}{
+ "goify": codegen.Goify,
+ },
+ },
+ &codegen.SectionTemplate{
+ Name: "server-grpc-end",
+ Source: grpcSvrEndT,
+ Data: map[string]interface{}{
+ "Services": svcdata,
+ },
+ },
+ }
+ }
+ return &codegen.File{Path: mainPath, SectionTemplates: sections, SkipExist: true}
+}
+
+const (
+ // input: map[string]interface{}{"Services":[]*ServiceData}
+ grpcSvrStartT = `{{ comment "handleGRPCServer starts configures and starts a gRPC server on the given URL. It shuts down the server if any error is received in the error channel." }}
+func handleGRPCServer(ctx context.Context, u *url.URL{{ range $.Services }}{{ if .Service.Methods }}, {{ .Service.VarName }}Endpoints *{{ .Service.PkgName }}.Endpoints{{ end }}{{ end }}, wg *sync.WaitGroup, errc chan error, logger *log.Logger, debug bool) {
+`
+
+ grpcSvrLoggerT = `
+ // Setup goa log adapter. Replace logger with your own using your
+ // log package of choice. The goa.design/middleware/logging/...
+ // packages define log adapters for common log packages.
+ var (
+ adapter middleware.Logger
+ )
+ {
+ adapter = middleware.NewLogger(logger)
+ }
+`
+
+ // input: map[string]interface{}{"Services":[]*ServiceData}
+ grpcSvrInitT = `
+ // Wrap the endpoints with the transport specific layers. The generated
+ // server packages contains code generated from the design which maps
+ // the service input and output data structures to gRPC requests and
+ // responses.
+ var (
+ {{- range .Services }}
+ {{ .Service.VarName }}Server *{{.Service.PkgName}}svr.Server
+ {{- end }}
+ )
+ {
+ {{- range .Services }}
+ {{- if .Endpoints }}
+ {{ .Service.VarName }}Server = {{ .Service.PkgName }}svr.New({{ .Service.VarName }}Endpoints{{ if .HasUnaryEndpoint }}, nil{{ end }}{{ if .HasStreamingEndpoint }}, nil{{ end }})
+ {{- else }}
+ {{ .Service.VarName }}Server = {{ .Service.PkgName }}svr.New(nil{{ if .HasUnaryEndpoint }}, nil{{ end }}{{ if .HasStreamingEndpoint }}, nil{{ end }})
+ {{- end }}
+ {{- end }}
+ }
+`
+
+ // input: map[string]interface{}{"Services":[]*ServiceData}
+ grpcRegisterSvrT = `
+ // Initialize gRPC server with the middleware.
+ srv := grpc.NewServer(grpcmiddleware.WithUnaryServerChain(
+ middleware.RequestID(),
+ middleware.Log(adapter),
+ ))
+
+ // Register the servers.
+ {{- range .Services }}
+ {{ .PkgName }}.Register{{ goify .Service.VarName true }}Server(srv, {{ .Service.VarName }}Server)
+ {{- end }}
+`
+
+ // input: map[string]interface{}{"Services":[]*ServiceData}
+ grpcSvrEndT = `
+ (*wg).Add(1)
+ go func() {
+ defer (*wg).Done()
+
+ {{ comment "Start gRPC server in a separate goroutine." }}
+ go func() {
+ lis, err := net.Listen("tcp", u.Host)
+ if err != nil {
+ errc <- err
+ }
+ logger.Printf("gRPC server listening on %q", u.Host)
+ errc <- srv.Serve(lis)
+ }()
+
+ select {
+ case <-ctx.Done():
+ logger.Printf("shutting down gRPC server at %q", u.Host)
+ srv.Stop()
+ return
+ }
+ }()
+}
+`
+)
diff --git a/vendor/goa.design/goa/grpc/codegen/funcs.go b/vendor/goa.design/goa/grpc/codegen/funcs.go
new file mode 100644
index 000000000..08b485d8e
--- /dev/null
+++ b/vendor/goa.design/goa/grpc/codegen/funcs.go
@@ -0,0 +1,37 @@
+package codegen
+
+import (
+ "fmt"
+
+ "goa.design/goa/dsl"
+)
+
+// statusCodeToGRPCConst produces the standard name for the given gRPC status
+// code. If no standard name exists then the string consisting of the code
+// integer value is returned.
+func statusCodeToGRPCConst(code int) string {
+ if v, ok := statusCodeToConst[code]; ok {
+ return fmt.Sprintf("codes.%s", v)
+ }
+ return fmt.Sprintf("%d", code)
+}
+
+var statusCodeToConst = map[int]string{
+ dsl.CodeOK: "OK",
+ dsl.CodeCanceled: "Canceled",
+ dsl.CodeUnknown: "Unknown",
+ dsl.CodeInvalidArgument: "InvalidArgument",
+ dsl.CodeDeadlineExceeded: "DeadlineExceeded",
+ dsl.CodeNotFound: "NotFound",
+ dsl.CodeAlreadyExists: "AlreadyExists",
+ dsl.CodePermissionDenied: "PermissionDenied",
+ dsl.CodeResourceExhausted: "ResourceExhausted",
+ dsl.CodeFailedPrecondition: "FailedPrecondition",
+ dsl.CodeAborted: "Aborted",
+ dsl.CodeOutOfRange: "OutOfRange",
+ dsl.CodeUnimplemented: "Unimplemented",
+ dsl.CodeInternal: "Internal",
+ dsl.CodeUnavailable: "Unavailable",
+ dsl.CodeDataLoss: "DataLoss",
+ dsl.CodeUnauthenticated: "Unauthenticated",
+}
diff --git a/vendor/goa.design/goa/grpc/codegen/proto.go b/vendor/goa.design/goa/grpc/codegen/proto.go
new file mode 100644
index 000000000..b1065ca2f
--- /dev/null
+++ b/vendor/goa.design/goa/grpc/codegen/proto.go
@@ -0,0 +1,84 @@
+package codegen
+
+import (
+ "fmt"
+ "os"
+ "os/exec"
+ "path/filepath"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+// ProtoFiles returns a *.proto file for each gRPC service.
+func ProtoFiles(genpkg string, root *expr.RootExpr) []*codegen.File {
+ fw := make([]*codegen.File, len(root.API.GRPC.Services))
+ for i, svc := range root.API.GRPC.Services {
+ fw[i] = protoFile(genpkg, svc)
+ }
+ return fw
+}
+
+func protoFile(genpkg string, svc *expr.GRPCServiceExpr) *codegen.File {
+ svcName := codegen.SnakeCase(svc.Name())
+ path := filepath.Join(codegen.Gendir, "grpc", svcName, pbPkgName, svcName+".proto")
+ data := GRPCServices.Get(svc.Name())
+
+ title := fmt.Sprintf("%s protocol buffer definition", svc.Name())
+ sections := []*codegen.SectionTemplate{
+ Header(title, svc.Name(), []*codegen.ImportSpec{}),
+ &codegen.SectionTemplate{
+ Name: "grpc-service",
+ Source: serviceT,
+ Data: data,
+ },
+ }
+
+ for _, m := range data.Messages {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "grpc-message",
+ Source: messageT,
+ Data: m,
+ })
+ }
+
+ return &codegen.File{
+ Path: path,
+ SectionTemplates: sections,
+ FinalizeFunc: protoc,
+ }
+}
+
+func protoc(path string) error {
+ dir := filepath.Dir(path)
+ os.MkdirAll(dir, 0777)
+
+ args := []string{"--go_out=plugins=grpc:.", path, "--proto_path", dir}
+ cmd := exec.Command("protoc", args...)
+ cmd.Dir = filepath.Dir(path)
+
+ if output, err := cmd.CombinedOutput(); err != nil {
+ return fmt.Errorf("failed to run protoc: %s: %s", err, output)
+ }
+
+ return nil
+}
+
+const (
+ // input: ServiceData
+ serviceT = `{{ .Description | comment }}
+service {{ .Name }} {
+ {{- range .Endpoints }}
+ {{ if .Method.Description }}{{ .Method.Description | comment }}{{ end }}
+ {{- $serverStream := or (eq .Method.StreamKind 3) (eq .Method.StreamKind 4) }}
+ {{- $clientStream := or (eq .Method.StreamKind 2) (eq .Method.StreamKind 4) }}
+ rpc {{ .Method.VarName }} ({{ if $clientStream }}stream {{ end }}{{ .Request.Message.Name }}) returns ({{ if $serverStream }}stream {{ end }}{{ .Response.Message.Name }});
+ {{- end }}
+}
+`
+
+ // input: service.UserTypeData
+ messageT = `{{ comment .Description }}
+message {{ .VarName }}{{ .Def }}
+`
+)
diff --git a/vendor/goa.design/goa/grpc/codegen/proto_header.go b/vendor/goa.design/goa/grpc/codegen/proto_header.go
new file mode 100644
index 000000000..c19f7f370
--- /dev/null
+++ b/vendor/goa.design/goa/grpc/codegen/proto_header.go
@@ -0,0 +1,45 @@
+package codegen
+
+import (
+ "goa.design/goa/codegen"
+ "goa.design/goa/pkg"
+)
+
+var (
+ // ProtoVersion is the protocol buffer version used to generate .proto files
+ ProtoVersion = "proto3"
+)
+
+// Header returns a proto source file header section template.
+func Header(title, pack string, imports []*codegen.ImportSpec) *codegen.SectionTemplate {
+ return &codegen.SectionTemplate{
+ Name: "source-header",
+ Source: headerT,
+ Data: map[string]interface{}{
+ "Title": title,
+ "ToolVersion": pkg.Version(),
+ "ProtoVersion": ProtoVersion,
+ "Pkg": pack,
+ "Imports": imports,
+ },
+ }
+}
+
+const (
+ headerT = `{{ if .Title -}}
+// Code generated with goa {{ .ToolVersion }}, DO NOT EDIT.
+//
+// {{ .Title }}
+//
+// Command:
+{{ comment commandLine }}
+{{- end }}
+
+syntax = {{ printf "%q" .ProtoVersion }};
+
+package pb;
+
+{{ range .Imports }}
+import {{ .Code }};
+{{ end }}`
+)
diff --git a/vendor/goa.design/goa/grpc/codegen/protobuf.go b/vendor/goa.design/goa/grpc/codegen/protobuf.go
new file mode 100644
index 000000000..38034813f
--- /dev/null
+++ b/vendor/goa.design/goa/grpc/codegen/protobuf.go
@@ -0,0 +1,453 @@
+package codegen
+
+import (
+ "fmt"
+ "goa.design/goa/expr"
+ "strconv"
+ "strings"
+
+ "goa.design/goa/codegen"
+)
+
+type (
+ // protobufAttribute represents an attribute type that returns protocol
+ // buffer code.
+ protobufAttribute struct {
+ *codegen.GoAttribute
+ }
+)
+
+// Name returns the protocol buffer type name.
+func (p *protobufAttribute) Name() string {
+ return protoBufGoFullTypeName(p.Attribute, p.Pkg, p.NameScope)
+}
+
+// Ref returns the protocol buffer type reference.
+func (p *protobufAttribute) Ref() string {
+ return protoBufGoFullTypeRef(p.Attribute, p.Pkg, p.NameScope)
+}
+
+// Set sets the underlying attribute.
+func (p *protobufAttribute) Dup(att *expr.AttributeExpr) codegen.Attributor {
+ return &protobufAttribute{
+ GoAttribute: p.GoAttribute.Dup(att).(*codegen.GoAttribute),
+ }
+}
+
+// Field returns the field name as generated by protocol buffer compiler.
+// NOTE: protoc does not care about common initialisms like api -> API.
+func (p *protobufAttribute) Field(name string, firstUpper bool) string {
+ return protoBufifyAtt(p.Attribute, name, firstUpper)
+}
+
+// protoBufContext returns a contextual attribute for the protocol buffer type.
+func protoBufContext(att *expr.AttributeExpr, pkg string, scope *codegen.NameScope) *codegen.ContextualAttribute {
+ return &codegen.ContextualAttribute{
+ Attribute: &protobufAttribute{
+ GoAttribute: codegen.NewGoAttribute(att, pkg, scope).(*codegen.GoAttribute),
+ },
+ NonPointer: true,
+ UseDefault: true,
+ }
+}
+
+// makeProtoBufMessage recursively transforms the given attribute expression
+// to generate a valid protocol buffer message definition in the proto file.
+// A protocol buffer message is always a user type in goa v2.
+//
+// NOTE: Protocol buffer does not provide native support for nested
+// arrays/maps (See grpc/docs/FAQ.md)
+//
+// makeProtoBufMessage ensures the resulting attribute is a user type. If the
+// given attribute type is a primitive, array, or a map, it wraps the given
+// attribute under an attribute with name "field" and RPC tag number 1. For,
+// nested arrays/maps, the inner array/map is wrapped into a user type.
+func makeProtoBufMessage(att *expr.AttributeExpr, tname string, scope *codegen.NameScope) {
+ switch dt := att.Type.(type) {
+ case expr.Primitive:
+ wrapAttr(att, tname)
+ return
+ case expr.UserType:
+ if dt == expr.Empty {
+ // Empty type must generate a message definition
+ att.Type = &expr.UserTypeExpr{
+ TypeName: tname,
+ AttributeExpr: &expr.AttributeExpr{Type: &expr.Object{}},
+ }
+ return
+ } else if rt, ok := dt.(*expr.ResultTypeExpr); ok && expr.IsArray(rt) {
+ // result type collection
+ wrapAttr(att, tname)
+ }
+ case *expr.Array, *expr.Map:
+ wrapAttr(att, tname)
+ case *expr.Object:
+ att.Type = &expr.UserTypeExpr{
+ TypeName: tname,
+ AttributeExpr: expr.DupAtt(att),
+ }
+ }
+ // wrap nested arrays/maps
+ n := ""
+ makeProtoBufMessageR(att, &n, scope)
+}
+
+// makeProtoBufMessageR is the recursive implementation of makeProtoBufMessage.
+func makeProtoBufMessageR(att *expr.AttributeExpr, tname *string, scope *codegen.NameScope, seen ...map[string]struct{}) {
+ wrap := func(att *expr.AttributeExpr, tname string) {
+ switch dt := att.Type.(type) {
+ case *expr.Array:
+ wrapAttr(att, "ArrayOf"+tname+
+ protoBufify(protoBufMessageDef(dt.ElemType, scope), true))
+ case *expr.Map:
+ wrapAttr(att, tname+"MapOf"+
+ protoBufify(protoBufMessageDef(dt.KeyType, scope), true)+
+ protoBufify(protoBufMessageDef(dt.ElemType, scope), true))
+ }
+ }
+ switch dt := att.Type.(type) {
+ case expr.UserType:
+ var s map[string]struct{}
+ if len(seen) > 0 {
+ s = seen[0]
+ } else {
+ s = make(map[string]struct{})
+ seen = append(seen, s)
+ }
+ if _, ok := s[dt.ID()]; ok {
+ return
+ }
+ s[dt.ID()] = struct{}{}
+ if rt, ok := dt.(*expr.ResultTypeExpr); ok && expr.IsArray(rt) {
+ wrapAttr(rt.Attribute(), rt.Name())
+ }
+ makeProtoBufMessageR(dt.Attribute(), tname, scope, seen...)
+ case *expr.Array:
+ makeProtoBufMessageR(dt.ElemType, tname, scope, seen...)
+ wrap(dt.ElemType, *tname)
+ case *expr.Map:
+ // need not worry about map keys because protocol buffer supports
+ // only primitives as map keys.
+ makeProtoBufMessageR(dt.ElemType, tname, scope, seen...)
+ wrap(dt.ElemType, *tname)
+ case *expr.Object:
+ for _, nat := range *dt {
+ makeProtoBufMessageR(nat.Attribute, tname, scope, seen...)
+ }
+ }
+}
+
+// wrapAttr makes the attribute type a user type by wrapping the given
+// attribute into an attribute named "field".
+func wrapAttr(att *expr.AttributeExpr, tname string) {
+ wrap := func(att *expr.AttributeExpr) *expr.AttributeExpr {
+ return &expr.AttributeExpr{
+ Type: &expr.Object{
+ &expr.NamedAttributeExpr{
+ Name: "field",
+ Attribute: &expr.AttributeExpr{
+ Type: att.Type,
+ Meta: expr.MetaExpr{"rpc:tag": []string{"1"}},
+ },
+ },
+ },
+ Validation: &expr.ValidationExpr{Required: []string{"field"}},
+ }
+ }
+ switch dt := att.Type.(type) {
+ case expr.UserType:
+ // Don't change the original user type. Create a copy and wrap that.
+ ut := expr.Dup(dt).(expr.UserType)
+ ut.SetAttribute(wrap(ut.Attribute()))
+ att.Type = ut
+ default:
+ att.Type = &expr.UserTypeExpr{
+ TypeName: tname,
+ AttributeExpr: wrap(att),
+ }
+ }
+}
+
+// unwrapAttr returns the attribute under the attribute name "field".
+// If "field" does not exist, it returns the given attribute.
+func unwrapAttr(att *expr.AttributeExpr) *expr.AttributeExpr {
+ if a := att.Find("field"); a != nil {
+ return a
+ }
+ return att
+}
+
+// protoBufMessageName returns the protocol buffer message name of the given
+// attribute type.
+func protoBufMessageName(att *expr.AttributeExpr, s *codegen.NameScope) string {
+ return protoBufFullMessageName(att, "", s)
+}
+
+// protoBufFullMessageName returns the protocol buffer message name of the
+// given user type qualified with the given package name if applicable.
+func protoBufFullMessageName(att *expr.AttributeExpr, pkg string, s *codegen.NameScope) string {
+ switch actual := att.Type.(type) {
+ case expr.UserType:
+ n := s.HashedUnique(actual, protoBufify(actual.Name(), true), "")
+ if pkg == "" {
+ return n
+ }
+ return pkg + "." + n
+ case expr.CompositeExpr:
+ return protoBufFullMessageName(actual.Attribute(), pkg, s)
+ default:
+ panic(fmt.Sprintf("data type is not a user type: received type %T", actual)) // bug
+ }
+}
+
+// protoBufGoFullTypeName returns the protocol buffer type name for the given
+// attribute generated after compiling the proto file (in *.pb.go).
+func protoBufGoTypeName(att *expr.AttributeExpr, s *codegen.NameScope) string {
+ return protoBufGoFullTypeName(att, "", s)
+}
+
+// protoBufGoFullTypeName returns the protocol buffer type name qualified with
+// the given package name for the given attribute generated after compiling
+// the proto file (in *.pb.go).
+func protoBufGoFullTypeName(att *expr.AttributeExpr, pkg string, s *codegen.NameScope) string {
+ switch actual := att.Type.(type) {
+ case expr.UserType, expr.CompositeExpr:
+ return protoBufFullMessageName(att, pkg, s)
+ case expr.Primitive:
+ return protoBufNativeGoTypeName(actual)
+ case *expr.Array:
+ return "[]" + protoBufGoFullTypeRef(actual.ElemType, pkg, s)
+ case *expr.Map:
+ return fmt.Sprintf("map[%s]%s",
+ protoBufGoFullTypeRef(actual.KeyType, pkg, s),
+ protoBufGoFullTypeRef(actual.ElemType, pkg, s))
+ case *expr.Object:
+ return s.GoTypeDef(att, false, false)
+ default:
+ panic(fmt.Sprintf("unknown data type %T", actual)) // bug
+ }
+}
+
+// protoBufMessageDef returns the protocol buffer code that defines a message
+// which matches the data structure definition (the part that comes after
+// `message foo`). The message is defined using the proto3 syntax.
+func protoBufMessageDef(att *expr.AttributeExpr, s *codegen.NameScope) string {
+ switch actual := att.Type.(type) {
+ case expr.Primitive:
+ return protoBufNativeMessageTypeName(att.Type)
+ case *expr.Array:
+ return "repeated " + protoBufMessageDef(actual.ElemType, s)
+ case *expr.Map:
+ return fmt.Sprintf("map<%s, %s>", protoBufMessageDef(actual.KeyType, s), protoBufMessageDef(actual.ElemType, s))
+ case expr.UserType:
+ return protoBufMessageName(att, s)
+ case *expr.Object:
+ var ss []string
+ ss = append(ss, " {")
+ for _, nat := range *actual {
+ var (
+ fn string
+ fnum uint64
+ typ string
+ desc string
+ )
+ {
+ fn = codegen.SnakeCase(protoBufify(nat.Name, false))
+ fnum = rpcTag(nat.Attribute)
+ typ = protoBufMessageDef(nat.Attribute, s)
+ if nat.Attribute.Description != "" {
+ desc = codegen.Comment(nat.Attribute.Description) + "\n\t"
+ }
+ }
+ ss = append(ss, fmt.Sprintf("\t%s%s %s = %d;", desc, typ, fn, fnum))
+ }
+ ss = append(ss, "}")
+ return strings.Join(ss, "\n")
+ default:
+ panic(fmt.Sprintf("unknown data type %T", actual)) // bug
+ }
+}
+
+// protoBufGoTypeRef returns the Go code that refers to the Go type generated
+// by compiling the protocol buffer (in *.pb.go) for the given attribute.
+func protoBufGoTypeRef(att *expr.AttributeExpr, s *codegen.NameScope) string {
+ return protoBufGoFullTypeRef(att, "", s)
+}
+
+// protoBufGoFullTypeRef returns the Go code qualified with package name that
+// refers to the Go type generated by compiling the protocol buffer
+// (in *.pb.go) for the given attribute.
+func protoBufGoFullTypeRef(att *expr.AttributeExpr, pkg string, s *codegen.NameScope) string {
+ name := protoBufGoFullTypeName(att, pkg, s)
+ if expr.IsObject(att.Type) {
+ return "*" + name
+ }
+ return name
+}
+
+// protoBufify makes a valid protocol buffer identifier out of any string.
+// It does that by removing any non letter and non digit character and by
+// making sure the first character is a letter or "_". protoBufify produces a
+// "CamelCase" version of the string.
+//
+// If firstUpper is true the first character of the identifier is uppercase
+// otherwise it's lowercase.
+func protoBufify(str string, firstUpper bool) string {
+ // Optimize trivial case
+ if str == "" {
+ return ""
+ }
+
+ // Remove optional suffix that defines corresponding transport specific
+ // name.
+ idx := strings.Index(str, ":")
+ if idx > 0 {
+ str = str[:idx]
+ }
+
+ str = codegen.CamelCase(str, firstUpper, false)
+ if str == "" {
+ // All characters are invalid. Produce a default value.
+ if firstUpper {
+ return "Val"
+ }
+ return "val"
+ }
+ return fixReservedProtoBuf(str)
+}
+
+// protoBufifyAtt honors any struct:field:name meta set on the attribute and
+// and calls protoBufify with the tag value if present or the given name
+// otherwise.
+func protoBufifyAtt(att *expr.AttributeExpr, name string, upper bool) string {
+ if tname, ok := att.Meta["struct:field:name"]; ok {
+ if len(tname) > 0 {
+ name = tname[0]
+ }
+ }
+ return protoBufify(name, upper)
+}
+
+// protoBufNativeMessageTypeName returns the protocol buffer built-in type
+// corresponding to the given primitive type. It panics if t is not a
+// primitive type.
+func protoBufNativeMessageTypeName(t expr.DataType) string {
+ switch t.Kind() {
+ case expr.BooleanKind:
+ return "bool"
+ case expr.IntKind:
+ return "sint32"
+ case expr.Int32Kind:
+ return "sint32"
+ case expr.Int64Kind:
+ return "sint64"
+ case expr.UIntKind:
+ return "uint32"
+ case expr.UInt32Kind:
+ return "uint32"
+ case expr.UInt64Kind:
+ return "uint64"
+ case expr.Float32Kind:
+ return "float"
+ case expr.Float64Kind:
+ return "double"
+ case expr.StringKind:
+ return "string"
+ case expr.BytesKind:
+ return "bytes"
+ default:
+ panic(fmt.Sprintf("cannot compute native protocol buffer type for %T", t)) // bug
+ }
+}
+
+// protoBufNativeGoTypeName returns the Go type corresponding to the given
+// primitive type generated by the protocol buffer compiler after compiling
+// the ".proto" file (in *.pb.go).
+func protoBufNativeGoTypeName(t expr.DataType) string {
+ switch t.Kind() {
+ case expr.BooleanKind:
+ return "bool"
+ case expr.IntKind:
+ return "int32"
+ case expr.Int32Kind:
+ return "int32"
+ case expr.Int64Kind:
+ return "int64"
+ case expr.UIntKind:
+ return "uint32"
+ case expr.UInt32Kind:
+ return "uint32"
+ case expr.UInt64Kind:
+ return "uint64"
+ case expr.Float32Kind:
+ return "float32"
+ case expr.Float64Kind:
+ return "float64"
+ case expr.StringKind:
+ return "string"
+ case expr.BytesKind:
+ return "[]byte"
+ default:
+ panic(fmt.Sprintf("cannot compute native protocol buffer type for %T", t)) // bug
+ }
+}
+
+// rpcTag returns the unique numbered RPC tag from the given attribute.
+func rpcTag(a *expr.AttributeExpr) uint64 {
+ var tag uint64
+ if t, ok := a.Meta["rpc:tag"]; ok {
+ tn, err := strconv.ParseUint(t[0], 10, 64)
+ if err != nil {
+ panic(err) // bug (should catch invalid field numbers in validation)
+ }
+ tag = tn
+ }
+ return tag
+}
+
+// fixReservedProtoBuf appends an underscore on to protocol buffer reserved
+// keywords.
+func fixReservedProtoBuf(w string) string {
+ if reservedProtoBuf[codegen.CamelCase(w, false, false)] {
+ w += "_"
+ }
+ return w
+}
+
+var (
+ // reserved protocol buffer keywords and package names
+ reservedProtoBuf = map[string]bool{
+ // types
+ "bool": true,
+ "bytes": true,
+ "double": true,
+ "fixed32": true,
+ "fixed64": true,
+ "float": true,
+ "int32": true,
+ "int64": true,
+ "sfixed32": true,
+ "sfixed64": true,
+ "sint32": true,
+ "sint64": true,
+ "string": true,
+ "uint32": true,
+ "uint64": true,
+
+ // reserved keywords
+ "enum": true,
+ "import": true,
+ "map": true,
+ "message": true,
+ "oneof": true,
+ "option": true,
+ "package": true,
+ "public": true,
+ "repeated": true,
+ "reserved": true,
+ "returns": true,
+ "rpc": true,
+ "service": true,
+ "syntax": true,
+ }
+)
diff --git a/vendor/goa.design/goa/grpc/codegen/protobuf_transform.go b/vendor/goa.design/goa/grpc/codegen/protobuf_transform.go
new file mode 100644
index 000000000..47289b2fe
--- /dev/null
+++ b/vendor/goa.design/goa/grpc/codegen/protobuf_transform.go
@@ -0,0 +1,324 @@
+package codegen
+
+import (
+ "fmt"
+ "strings"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+// protoBufTransform produces Go code to initialize a data structure defined
+// by target from an instance of data structure defined by source. The source
+// or target is a protocol buffer type.
+//
+// source, target are the source and target attributes used in transformation
+//
+// `proto` param if true indicates that the target is a protocol buffer type
+//
+func protoBufTransform(source, target *codegen.ContextualAttribute, sourceVar, targetVar string, proto bool) (string, []*codegen.TransformFunctionData, error) {
+ var prefix string
+ {
+ prefix = "protobuf"
+ if proto {
+ prefix = "svc"
+ }
+ }
+ p := &protoBufTransformer{
+ helperPrefix: prefix,
+ proto: proto,
+ }
+
+ code, err := p.Transform(source, target, &codegen.TransformAttrs{SourceVar: sourceVar, TargetVar: targetVar, NewVar: true})
+ if err != nil {
+ return "", nil, err
+ }
+
+ funcs, err := codegen.GoTransformHelpers(source, target, p, prefix)
+ if err != nil {
+ return "", nil, err
+ }
+
+ return strings.TrimRight(code, "\n"), funcs, nil
+}
+
+// protoBufTransformer implements the codegen.Transformer interface
+// to transform Go types to protocol buffer generated Go types.
+type protoBufTransformer struct {
+ // helperPrefix is the prefix for the helper function names.
+ helperPrefix string
+ // proto if true indicates target type is a protocol buffer type.
+ proto bool
+ // targetInit is the initialization code for the target type for nested
+ // map and array types.
+ targetInit string
+}
+
+// Transform returns the code to initialize a target data structure from an
+// instance of source data structure. It returns an error if source and target
+// are not compatible for transformation (different types, fields of
+// different type).
+func (p *protoBufTransformer) Transform(source, target *codegen.ContextualAttribute, ta *codegen.TransformAttrs) (string, error) {
+ var (
+ initCode string
+ err error
+
+ srcAtt = source.Attribute.Expr()
+ tgtAtt = target.Attribute.Expr()
+ )
+ if err := codegen.IsCompatible(srcAtt.Type, tgtAtt.Type, ta.SourceVar, ta.TargetVar); err != nil {
+ if p.proto {
+ initCode += fmt.Sprintf("%s := &%s{}\n", ta.TargetVar, target.Attribute.Name())
+ ta.TargetVar += ".Field"
+ ta.NewVar = false
+ tgtAtt = unwrapAttr(expr.DupAtt(tgtAtt))
+ } else {
+ srcAtt = unwrapAttr(expr.DupAtt(srcAtt))
+ ta.SourceVar += ".Field"
+ }
+ if err = codegen.IsCompatible(srcAtt.Type, tgtAtt.Type, ta.SourceVar, ta.TargetVar); err != nil {
+ return "", err
+ }
+ source = source.Dup(srcAtt, true)
+ target = target.Dup(tgtAtt, true)
+ }
+
+ var (
+ code string
+
+ sourceType = source.Attribute.Expr().Type
+ )
+ {
+ switch {
+ case expr.IsArray(sourceType):
+ code, err = p.TransformArray(source, target, ta)
+ case expr.IsMap(sourceType):
+ code, err = p.TransformMap(source, target, ta)
+ case expr.IsObject(sourceType):
+ code, err = p.TransformObject(source, target, ta)
+ default:
+ assign := "="
+ if ta.NewVar {
+ assign = ":="
+ }
+ srcField := p.ConvertType(source.Attribute, target.Attribute, ta.SourceVar)
+ code = fmt.Sprintf("%s %s %s\n", ta.TargetVar, assign, srcField)
+ }
+ }
+ if err != nil {
+ return "", err
+ }
+ return initCode + code, nil
+}
+
+// MakeCompatible checks whether source and target attributes are
+// compatible for transformation and returns an error if not. If no error
+// is returned, it returns the source and target attributes that are
+// compatible.
+func (p *protoBufTransformer) MakeCompatible(source, target *codegen.ContextualAttribute, ta *codegen.TransformAttrs, suffix string) (src, tgt *codegen.ContextualAttribute, newTA *codegen.TransformAttrs, err error) {
+ src = source
+ tgt = target
+ newTA = &codegen.TransformAttrs{
+ SourceVar: ta.SourceVar,
+ TargetVar: ta.TargetVar,
+ NewVar: ta.NewVar,
+ }
+ if err = codegen.IsCompatible(
+ src.Attribute.Expr().Type,
+ tgt.Attribute.Expr().Type,
+ ta.SourceVar+suffix, ta.TargetVar+suffix); err != nil {
+ if p.proto {
+ p.targetInit = target.Attribute.Name()
+ tgtAtt := unwrapAttr(expr.DupAtt(target.Attribute.Expr()))
+ tgt = target.Dup(tgtAtt, true)
+ } else {
+ srcAtt := unwrapAttr(expr.DupAtt(source.Attribute.Expr()))
+ src = source.Dup(srcAtt, true)
+ newTA.SourceVar += ".Field"
+ }
+ if err = codegen.IsCompatible(
+ src.Attribute.Expr().Type,
+ tgt.Attribute.Expr().Type,
+ newTA.SourceVar, newTA.TargetVar); err != nil {
+ return src, tgt, newTA, err
+ }
+ }
+ return src, tgt, newTA, nil
+}
+
+// ConvertType produces code to initialize a target type from a source type
+// held by sourceVar.
+// NOTE: For Int and UInt kinds, protocol buffer Go compiler generates
+// int32 and uint32 respectively whereas goa v2 generates int and uint.
+func (p *protoBufTransformer) ConvertType(source, target codegen.Attributor, sourceVar string) string {
+ typ := source.Expr().Type
+ if _, ok := typ.(expr.UserType); ok {
+ // return a function name for the conversion
+ return fmt.Sprintf("%s(%s)", codegen.HelperName(source, target, p.helperPrefix), sourceVar)
+ }
+
+ if typ.Kind() != expr.IntKind && typ.Kind() != expr.UIntKind {
+ return sourceVar
+ }
+ if p.proto {
+ return fmt.Sprintf("%s(%s)", protoBufNativeGoTypeName(typ), sourceVar)
+ }
+ return fmt.Sprintf("%s(%s)", codegen.GoNativeTypeName(typ), sourceVar)
+}
+
+// transformObject returns the code to transform source attribute of object
+// type to target attribute of object type. It returns an error if source
+// and target are not compatible for transformation.
+func (p *protoBufTransformer) TransformObject(source, target *codegen.ContextualAttribute, ta *codegen.TransformAttrs) (string, error) {
+ return codegen.GoObjectTransform(source, target, ta, p)
+}
+
+// transformArray returns the code to transform source attribute of array
+// type to target attribute of array type. It returns an error if source
+// and target are not compatible for transformation.
+func (p *protoBufTransformer) TransformArray(source, target *codegen.ContextualAttribute, ta *codegen.TransformAttrs) (string, error) {
+ sourceArr := expr.AsArray(source.Attribute.Expr().Type)
+ if sourceArr == nil {
+ return "", fmt.Errorf("source is not an array type: received %T", source.Attribute.Expr().Type)
+ }
+ targetArr := expr.AsArray(target.Attribute.Expr().Type)
+ if targetArr == nil {
+ return "", fmt.Errorf("target is not an array type: received %T", target.Attribute.Expr().Type)
+ }
+
+ source = source.Dup(sourceArr.ElemType, true)
+ target = target.Dup(targetArr.ElemType, true)
+ targetRef := target.Attribute.Ref()
+
+ var code string
+
+ // If targetInit is set, the target array element is in a nested state.
+ // See grpc/docs/FAQ.md.
+ if p.targetInit != "" {
+ assign := "="
+ if ta.NewVar {
+ assign = ":="
+ }
+ code = fmt.Sprintf("%s %s &%s{}\n", ta.TargetVar, assign, p.targetInit)
+ ta = &codegen.TransformAttrs{
+ SourceVar: ta.SourceVar,
+ TargetVar: ta.TargetVar + ".Field",
+ NewVar: false,
+ }
+ p.targetInit = ""
+ }
+ if err := codegen.IsCompatible(source.Attribute.Expr().Type, target.Attribute.Expr().Type, ta.SourceVar+"[0]", ta.TargetVar+"[0]"); err != nil {
+ if p.proto {
+ p.targetInit = target.Attribute.Name()
+ tAtt := unwrapAttr(expr.DupAtt(targetArr.ElemType))
+ target = target.Dup(tAtt, true)
+ } else {
+ sAtt := unwrapAttr(expr.DupAtt(sourceArr.ElemType))
+ source = source.Dup(sAtt, true)
+ ta = &codegen.TransformAttrs{
+ SourceVar: ta.SourceVar + ".Field",
+ TargetVar: ta.TargetVar,
+ NewVar: ta.NewVar,
+ }
+ }
+ if err := codegen.IsCompatible(source.Attribute.Expr().Type, target.Attribute.Expr().Type, ta.SourceVar+"[0]", ta.TargetVar+"[0]"); err != nil {
+ return "", err
+ }
+ }
+
+ data := map[string]interface{}{
+ "Transformer": p,
+ "ElemTypeRef": targetRef,
+ "SourceElem": source,
+ "TargetElem": target,
+ "SourceVar": ta.SourceVar,
+ "TargetVar": ta.TargetVar,
+ "NewVar": ta.NewVar,
+ }
+ c, err := codegen.RunGoArrayTemplate(data)
+ if err != nil {
+ return "", err
+ }
+ return code + c, nil
+}
+
+// transformMap returns the code to transform source attribute of map
+// type to target attribute of map type. It returns an error if source
+// and target are not compatible for transformation.
+func (p *protoBufTransformer) TransformMap(source, target *codegen.ContextualAttribute, ta *codegen.TransformAttrs) (string, error) {
+ sourceType := source.Attribute.Expr().Type
+ targetType := target.Attribute.Expr().Type
+ sourceMap := expr.AsMap(sourceType)
+ if sourceMap == nil {
+ return "", fmt.Errorf("source is not a map type: received %T", sourceType)
+ }
+ targetMap := expr.AsMap(targetType)
+ if targetMap == nil {
+ return "", fmt.Errorf("target is not a map type: received %T", targetType)
+ }
+
+ // Target map key cannot be nested in protocol buffers. So no need to worry
+ // about unwrapping.
+ sourceKey := source.Dup(sourceMap.KeyType, true)
+ targetKey := target.Dup(targetMap.KeyType, true)
+ if err := codegen.IsCompatible(sourceKey.Attribute.Expr().Type, targetKey.Attribute.Expr().Type, ta.SourceVar+"[key]", ta.TargetVar+"[key]"); err != nil {
+ return "", err
+ }
+ sourceElem := source.Dup(sourceMap.ElemType, true)
+ targetElem := target.Dup(targetMap.ElemType, true)
+ targetElemRef := targetElem.Attribute.Ref()
+
+ var code string
+
+ // If targetInit is set, the target map element is in a nested state.
+ // See grpc/docs/FAQ.md.
+ if p.targetInit != "" {
+ assign := "="
+ if ta.NewVar {
+ assign = ":="
+ }
+ code = fmt.Sprintf("%s %s &%s{}\n", ta.TargetVar, assign, p.targetInit)
+ ta = &codegen.TransformAttrs{
+ SourceVar: ta.SourceVar,
+ TargetVar: ta.TargetVar + ".Field",
+ NewVar: false,
+ }
+ p.targetInit = ""
+ }
+ if err := codegen.IsCompatible(sourceMap.ElemType.Type, targetMap.ElemType.Type, ta.SourceVar+"[*]", ta.TargetVar+"[*]"); err != nil {
+ if p.proto {
+ p.targetInit = targetElem.Attribute.Name()
+ tAtt := unwrapAttr(expr.DupAtt(targetMap.ElemType))
+ targetElem = target.Dup(tAtt, true)
+ } else {
+ sAtt := unwrapAttr(expr.DupAtt(sourceMap.ElemType))
+ sourceElem = source.Dup(sAtt, true)
+ ta = &codegen.TransformAttrs{
+ SourceVar: ta.SourceVar + ".Field",
+ TargetVar: ta.TargetVar,
+ NewVar: ta.NewVar,
+ }
+ }
+ if err := codegen.IsCompatible(sourceElem.Attribute.Expr().Type, targetElem.Attribute.Expr().Type, ta.SourceVar+"[*]", ta.TargetVar+"[*]"); err != nil {
+ return "", err
+ }
+ }
+ data := map[string]interface{}{
+ "Transformer": p,
+ "KeyTypeRef": targetKey.Attribute.Ref(),
+ "ElemTypeRef": targetElemRef,
+ "SourceKey": sourceKey,
+ "TargetKey": targetKey,
+ "SourceElem": sourceElem,
+ "TargetElem": targetElem,
+ "SourceVar": ta.SourceVar,
+ "TargetVar": ta.TargetVar,
+ "NewVar": ta.NewVar,
+ "TargetMap": targetMap,
+ }
+ c, err := codegen.RunGoMapTemplate(data)
+ if err != nil {
+ return "", err
+ }
+ return code + c, nil
+}
diff --git a/vendor/goa.design/goa/grpc/codegen/server.go b/vendor/goa.design/goa/grpc/codegen/server.go
new file mode 100644
index 000000000..0dffb57e1
--- /dev/null
+++ b/vendor/goa.design/goa/grpc/codegen/server.go
@@ -0,0 +1,630 @@
+package codegen
+
+import (
+ "fmt"
+ "path/filepath"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/codegen/service"
+ "goa.design/goa/expr"
+)
+
+// ServerFiles returns all the server gRPC transport files.
+func ServerFiles(genpkg string, root *expr.RootExpr) []*codegen.File {
+ svcLen := len(root.API.GRPC.Services)
+ fw := make([]*codegen.File, 2*svcLen)
+ for i, svc := range root.API.GRPC.Services {
+ fw[i] = server(genpkg, svc)
+ }
+ for i, svc := range root.API.GRPC.Services {
+ fw[i+svcLen] = serverEncodeDecode(genpkg, svc)
+ }
+ return fw
+}
+
+// server returns the files defining the gRPC server.
+func server(genpkg string, svc *expr.GRPCServiceExpr) *codegen.File {
+ path := filepath.Join(codegen.Gendir, "grpc", codegen.SnakeCase(svc.Name()), "server", "server.go")
+ data := GRPCServices.Get(svc.Name())
+ title := fmt.Sprintf("%s GRPC server", svc.Name())
+ sections := []*codegen.SectionTemplate{
+ codegen.Header(title, "server", []*codegen.ImportSpec{
+ {Path: "context"},
+ {Path: "goa.design/goa"},
+ {Path: "google.golang.org/grpc/codes"},
+ {Path: "goa.design/goa/grpc", Name: "goagrpc"},
+ {Path: filepath.Join(genpkg, codegen.SnakeCase(svc.Name())), Name: data.Service.PkgName},
+ {Path: filepath.Join(genpkg, codegen.SnakeCase(svc.Name()), "views"), Name: data.Service.ViewsPkg},
+ {Path: filepath.Join(genpkg, "grpc", codegen.SnakeCase(svc.Name()), pbPkgName)},
+ }),
+ }
+
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "server-struct",
+ Source: serverStructT,
+ Data: data,
+ })
+ for _, e := range data.Endpoints {
+ if e.ServerStream != nil {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "server-stream-struct-type",
+ Source: streamStructTypeT,
+ Data: e.ServerStream,
+ })
+ }
+ }
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "server-init",
+ Source: serverInitT,
+ Data: data,
+ })
+ for _, e := range data.Endpoints {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "handler-init",
+ Source: handlerInitT,
+ Data: e,
+ })
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "server-grpc-interface",
+ Source: serverGRPCInterfaceT,
+ Data: e,
+ })
+ }
+ for _, e := range data.Endpoints {
+ if e.ServerStream != nil {
+ if e.ServerStream.SendConvert != nil {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "server-stream-send",
+ Source: streamSendT,
+ Data: e.ServerStream,
+ })
+ }
+ if e.Method.StreamKind == expr.ClientStreamKind || e.Method.StreamKind == expr.BidirectionalStreamKind {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "server-stream-recv",
+ Source: streamRecvT,
+ Data: e.ServerStream,
+ })
+ }
+ if e.ServerStream.MustClose {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "server-stream-close",
+ Source: streamCloseT,
+ Data: e.ServerStream,
+ })
+ }
+ if e.Method.ViewedResult != nil && e.Method.ViewedResult.ViewName == "" {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "server-stream-set-view",
+ Source: streamSetViewT,
+ Data: e.ServerStream,
+ })
+ }
+ }
+ }
+ return &codegen.File{Path: path, SectionTemplates: sections}
+}
+
+// serverEncodeDecode returns the file defining the gRPC server encoding and
+// decoding logic.
+func serverEncodeDecode(genpkg string, svc *expr.GRPCServiceExpr) *codegen.File {
+ path := filepath.Join(codegen.Gendir, "grpc", codegen.SnakeCase(svc.Name()), "server", "encode_decode.go")
+ data := GRPCServices.Get(svc.Name())
+ title := fmt.Sprintf("%s GRPC server encoders and decoders", svc.Name())
+ sections := []*codegen.SectionTemplate{
+ codegen.Header(title, "server", []*codegen.ImportSpec{
+ {Path: "context"},
+ {Path: "strings"},
+ {Path: "strconv"},
+ {Path: "google.golang.org/grpc"},
+ {Path: "google.golang.org/grpc/metadata"},
+ {Path: "goa.design/goa", Name: "goa"},
+ {Path: "goa.design/goa/grpc", Name: "goagrpc"},
+ {Path: filepath.Join(genpkg, codegen.SnakeCase(svc.Name())), Name: data.Service.PkgName},
+ {Path: filepath.Join(genpkg, codegen.SnakeCase(svc.Name()), "views"), Name: data.Service.ViewsPkg},
+ {Path: filepath.Join(genpkg, "grpc", codegen.SnakeCase(svc.Name()), pbPkgName)},
+ }),
+ }
+
+ for _, e := range data.Endpoints {
+ if e.Response.ServerConvert != nil {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "response-encoder",
+ Source: responseEncoderT,
+ Data: e,
+ FuncMap: map[string]interface{}{
+ "typeConversionData": typeConversionData,
+ "metadataEncodeDecodeData": metadataEncodeDecodeData,
+ },
+ })
+ }
+ if e.PayloadRef != "" {
+ fm := transTmplFuncs(svc)
+ fm["isEmpty"] = isEmpty
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "request-decoder",
+ Source: requestDecoderT,
+ Data: e,
+ FuncMap: fm,
+ })
+ }
+ }
+ return &codegen.File{Path: path, SectionTemplates: sections}
+}
+
+func transTmplFuncs(s *expr.GRPCServiceExpr) map[string]interface{} {
+ return map[string]interface{}{
+ "goTypeRef": func(dt expr.DataType) string {
+ return service.Services.Get(s.Name()).Scope.GoTypeRef(&expr.AttributeExpr{Type: dt})
+ },
+ }
+}
+
+// typeConversionData produces the template data suitable for executing the
+// "type_conversion" template.
+func typeConversionData(dt expr.DataType, varName string, target string) map[string]interface{} {
+ return map[string]interface{}{
+ "Type": dt,
+ "VarName": varName,
+ "Target": target,
+ }
+}
+
+// metadataEncodeDecodeData produces the template data suitable for executing the
+// "metadata_decoder" and "metadata_encoder" template.
+func metadataEncodeDecodeData(md *MetadataData, vname string) map[string]interface{} {
+ return map[string]interface{}{
+ "Metadata": md,
+ "VarName": vname,
+ }
+}
+
+// input: ServiceData
+const serverStructT = `{{ printf "%s implements the %s.%s interface." .ServerStruct .PkgName .ServerInterface | comment }}
+type {{ .ServerStruct }} struct {
+{{- range .Endpoints }}
+ {{ .Method.VarName }}H {{ if .ServerStream }}goagrpc.StreamHandler{{ else }}goagrpc.UnaryHandler{{ end }}
+{{- end }}
+}
+
+// ErrorNamer is an interface implemented by generated error structs that
+// exposes the name of the error as defined in the expr.
+type ErrorNamer interface {
+ ErrorName() string
+}
+`
+
+// streamStructTypeT renders the server and client struct types that
+// implements the client and server service stream interfaces.
+// input: StreamData
+const streamStructTypeT = `{{ printf "%s implements the %s.%s interface." .VarName .ServiceInterface | comment }}
+type {{ .VarName }} struct {
+ stream {{ .Interface }}
+{{- if .Endpoint.Method.ViewedResult }}
+ view string
+{{- end }}
+}
+`
+
+// input: ServiceData
+const serverInitT = `{{ printf "%s instantiates the server struct with the %s service endpoints." .ServerInit .Service.Name | comment }}
+func {{ .ServerInit }}(e *{{ .Service.PkgName }}.Endpoints{{ if .HasUnaryEndpoint }}, uh goagrpc.UnaryHandler{{ end }}{{ if .HasStreamingEndpoint }}, sh goagrpc.StreamHandler{{ end }}) *{{ .ServerStruct }} {
+ return &{{ .ServerStruct }}{
+ {{- range .Endpoints }}
+ {{ .Method.VarName }}H: New{{ .Method.VarName }}Handler(e.{{ .Method.VarName }}{{ if .ServerStream }}, sh{{ else }}, uh{{ end }}),
+ {{- end }}
+ }
+}
+`
+
+// input: EndpointData
+const handlerInitT = `{{ printf "New%sHandler creates a gRPC handler which serves the %q service %q endpoint." .Method.VarName .ServiceName .Method.Name | comment }}
+func New{{ .Method.VarName }}Handler(endpoint goa.Endpoint, h goagrpc.{{ if .ServerStream }}Stream{{ else }}Unary{{ end }}Handler) goagrpc.{{ if .ServerStream }}Stream{{ else }}Unary{{ end }}Handler {
+ if h == nil {
+ h = goagrpc.New{{ if .ServerStream }}Stream{{ else }}Unary{{ end }}Handler(endpoint, Decode{{ .Method.VarName }}Request{{ if not .ServerStream }}, Encode{{ .Method.VarName }}Response{{ end }})
+ }
+ return h
+}
+`
+
+// input: EndpointData
+const serverGRPCInterfaceT = `{{ printf "%s implements the %q method in %s.%s interface." .Method.VarName .Method.VarName .PkgName .ServerInterface | comment }}
+func (s *{{ .ServerStruct }}) {{ .Method.VarName }}(
+ {{- if not .ServerStream }}ctx context.Context, {{ end }}
+ {{- if not .Method.StreamingPayload }}message {{ .Request.Message.Ref }}{{ if .ServerStream }}, {{ end }}{{ end }}
+ {{- if .ServerStream }}stream {{ .ServerStream.Interface }}{{ end }}) {{ if .ServerStream }}error{{ else if .Response.Message }}({{ .Response.Message.Ref }}, error{{ if .Response.Message }}){{ end }}{{ end }} {
+{{- if .ServerStream }}
+ p, err := s.{{ .Method.VarName }}H.Decode(stream.Context(), {{ if .Method.StreamingPayload }}nil{{ else }}message{{ end }})
+ {{- template "handle_error" . }}
+ ep := &{{ .ServicePkgName }}.{{ .Method.VarName }}EndpointInput{
+ Stream: &{{ .ServerStream.VarName }}{stream: stream},
+ {{- if .PayloadRef }}
+ Payload: p.({{ .PayloadRef }}),
+ {{- end }}
+ }
+ err = s.{{ .Method.VarName }}H.Handle(stream.Context(), ep)
+{{- else }}
+ resp, err := s.{{ .Method.VarName }}H.Handle(ctx, message)
+{{- end }}
+ {{- template "handle_error" . }}
+ return {{ if not $.ServerStream }}resp.({{ .Response.ServerConvert.TgtRef }}), {{ end }}nil
+}
+
+{{- define "handle_error" }}
+ if err != nil {
+ {{- if .Errors }}
+ if en, ok := err.(ErrorNamer); ok {
+ switch en.ErrorName() {
+ {{- range .Errors }}
+ case {{ printf "%q" .Name }}:
+ return {{ if not $.ServerStream }}nil, {{ end }}goagrpc.NewStatusError({{ .Response.StatusCode }}, err)
+ {{- end }}
+ }
+ }
+ {{- end }}
+ return {{ if not $.ServerStream }}nil, {{ end }}goagrpc.EncodeError(err)
+ }
+{{- end }}
+`
+
+// input: EndpointData
+const requestDecoderT = `{{ printf "Decode%sRequest decodes requests sent to %q service %q endpoint." .Method.VarName .ServiceName .Method.Name | comment }}
+func Decode{{ .Method.VarName }}Request(ctx context.Context, v interface{}, md metadata.MD) (interface{}, error) {
+{{- if .Request.Metadata }}
+ var (
+ {{- range .Request.Metadata }}
+ {{ .VarName }} {{ .TypeRef }}
+ {{- end }}
+ err error
+ )
+ {
+ {{- range .Request.Metadata }}
+ {{- if or (eq .Type.Name "string") (eq .Type.Name "any") }}
+ {{- if .Required }}
+ if vals := md.Get({{ printf "%q" .Name }}); len(vals) == 0 {
+ err = goa.MergeErrors(err, goa.MissingFieldError({{ printf "%q" .Name }}, "metadata"))
+ } else {
+ {{ .VarName }} = vals[0]
+ }
+ {{- else }}
+ if vals := md.Get({{ printf "%q" .Name }}); len(vals) > 0 {
+ {{ .VarName }} = vals[0]
+ }
+ {{- end }}
+ {{- else if .StringSlice }}
+ {{- if .Required }}
+ if vals := md.Get({{ printf "%q" .Name }}); len(vals) == 0 {
+ err = goa.MergeErrors(err, goa.MissingFieldError({{ printf "%q" .Name }}, "metadata"))
+ } else {
+ {{ .VarName }} = vals
+ }
+ {{- else }}
+ {{ .VarName }} = md.Get({{ printf "%q" .Name }})
+ {{- end }}
+ {{- else if .Slice }}
+ {{- if .Required }}
+ if {{ .VarName }}Raw := md.Get({{ printf "%q" .Name }}); len({{ .VarName }}Raw) == 0 {
+ err = goa.MergeErrors(err, goa.MissingFieldError({{ printf "%q" .Name }}, "metadata"))
+ } else {
+ {{- template "slice_conversion" . }}
+ }
+ {{- else }}
+ if {{ .VarName }}Raw := md.Get({{ printf "%q" .Name }}); len({{ .VarName }}Raw) > 0 {
+ {{- template "slice_conversion" . }}
+ }
+ {{- end }}
+ {{- else }}
+ {{- if .Required }}
+ if vals := md.Get({{ printf "%q" .Name }}); len(vals) == 0 {
+ err = goa.MergeErrors(err, goa.MissingFieldError({{ printf "%q" .Name }}, "metadata"))
+ } else {
+ {{ .VarName }}Raw = vals[0]
+ {{ template "type_conversion" . }}
+ }
+ {{- else }}
+ if vals := md.Get({{ printf "%q" .Name }}); len(vals) > 0 {
+ {{ .VarName }}Raw = vals[0]
+ {{ template "type_conversion" . }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- if .Validate }}
+ {{ .Validate }}
+ {{- end }}
+ {{- end }}
+ }
+{{- end }}
+{{- if and (not .Method.StreamingPayload) (not (isEmpty .Request.Message.Type)) }}
+ var (
+ message {{ .Request.ServerConvert.SrcRef }}
+ ok bool
+ {{- if not .Request.Metadata }}
+ err error
+ {{- end }}
+ )
+ {
+ if message, ok = v.({{ .Request.ServerConvert.SrcRef }}); !ok {
+ return nil, goagrpc.ErrInvalidType("{{ .ServiceName }}", "{{ .Method.Name }}", "{{ .Request.Message.Ref }}", v)
+ }
+ {{- if .Request.ServerConvert.Validation }}
+ err = {{ .Request.ServerConvert.Validation.Name }}(message)
+ {{- end }}
+ }
+{{- end }}
+ var (
+ payload {{ .PayloadRef }}
+ {{- if and (not .Request.Metadata) .Method.StreamingPayload }}
+ err error
+ {{- end }}
+ )
+ {
+ {{- if .Request.ServerConvert }}
+ payload = {{ .Request.ServerConvert.Init.Name }}({{ range .Request.ServerConvert.Init.Args }}{{ .Name }}, {{ end }})
+ {{- else }}
+ payload = {{ (index .Request.Metadata 0).VarName }}
+ {{- end }}
+{{- range .MetadataSchemes }}
+ {{- if ne .Type "Basic" }}
+ {{- if not .CredRequired }}
+ if payload.{{ .CredField }} != nil {
+ {{- end }}
+ if strings.Contains({{ if .CredPointer }}*{{ end }}payload.{{ .CredField }}, " ") {
+ // Remove authorization scheme prefix (e.g. "Bearer")
+ cred := strings.SplitN({{ if .CredPointer }}*{{ end }}payload.{{ .CredField }}, " ", 2)[1]
+ payload.{{ .CredField }} = {{ if .CredPointer }}&{{ end }}cred
+ }
+ {{- if not .CredRequired }}
+ }
+ {{- end }}
+ {{- end }}
+{{- end }}
+ }
+ return payload, err
+}
+` + convertStringToTypeT
+
+// input: EndpointData
+const responseEncoderT = `{{ printf "Encode%sResponse encodes responses from the %q service %q endpoint." .Method.VarName .ServiceName .Method.Name | comment }}
+func Encode{{ .Method.VarName }}Response(ctx context.Context, v interface{}, hdr, trlr *metadata.MD) (interface{}, error) {
+{{- if .ViewedResultRef }}
+ vres, ok := v.({{ .ViewedResultRef }})
+ if !ok {
+ return nil, goagrpc.ErrInvalidType("{{ .ServiceName }}", "{{ .Method.Name }}", "{{ .ViewedResultRef }}", v)
+ }
+ result := vres.Projected
+ (*hdr).Append("goa-view", vres.View)
+{{- else if .ResultRef }}
+ result, ok := v.({{ .ResultRef }})
+ if !ok {
+ return nil, goagrpc.ErrInvalidType("{{ .ServiceName }}", "{{ .Method.Name }}", "{{ .ResultRef }}", v)
+ }
+{{- end }}
+ resp := {{ .Response.ServerConvert.Init.Name }}({{ range .Response.ServerConvert.Init.Args }}{{ .Name }}, {{ end }})
+{{- range .Response.Headers }}
+ {{ template "metadata_encoder" (metadataEncodeDecodeData . "(*hdr)") }}
+{{- end }}
+{{- range .Response.Trailers }}
+ {{ template "metadata_encoder" (metadataEncodeDecodeData . "(*trlr)") }}
+{{- end }}
+ return resp, nil
+}
+
+{{- define "metadata_encoder" }}
+ {{- if .Metadata.StringSlice }}
+ {{ .VarName }}.Append({{ printf "%q" .Metadata.Name }}, res.{{ .Metadata.FieldName }}...)
+ {{- else if .Metadata.Slice }}
+ for _, value := range res.{{ .Metadata.FieldName }} {
+ {{ template "string_conversion" (typeConversionData .Metadata.Type.ElemType.Type "valueStr" "value") }}
+ {{ .VarName }}.Append({{ printf "%q" .Metadata.Name }}, valueStr)
+ }
+ {{- else }}
+ {{- if .Metadata.Pointer }}
+ if res.{{ .Metadata.FieldName }} != nil {
+ {{- end }}
+ {{ .VarName }}.Append({{ printf "%q" .Metadata.Name }},
+ {{- if eq .Metadata.Type.Name "bytes" }} string(
+ {{- else if not (eq .Metadata.Type.Name "string") }} fmt.Sprintf("%v",
+ {{- end }}
+ {{- if .Metadata.Pointer }}*{{ end }}p.{{ .Metadata.FieldName }}
+ {{- if or (eq .Metadata.Type.Name "bytes") (not (eq .Metadata.Type.Name "string")) }})
+ {{- end }})
+ {{- if .Metadata.Pointer }}
+ }
+ {{- end }}
+ {{- end }}
+{{- end }}
+` + convertTypeToStringT
+
+// input: TypeData
+const convertStringToTypeT = `{{- define "slice_conversion" }}
+ {{ .VarName }} = make({{ goTypeRef .Type }}, len({{ .VarName }}Raw))
+ for i, rv := range {{ .VarName }}Raw {
+ {{- template "slice_item_conversion" . }}
+ }
+{{- end }}
+
+{{- define "slice_item_conversion" }}
+ {{- if eq .Type.ElemType.Type.Name "string" }}
+ {{ .VarName }}[i] = rv
+ {{- else if eq .Type.ElemType.Type.Name "bytes" }}
+ {{ .VarName }}[i] = []byte(rv)
+ {{- else if eq .Type.ElemType.Type.Name "int" }}
+ v, err2 := strconv.ParseInt(rv, 10, strconv.IntSize)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "array of integers"))
+ }
+ {{ .VarName }}[i] = int(v)
+ {{- else if eq .Type.ElemType.Type.Name "int32" }}
+ v, err2 := strconv.ParseInt(rv, 10, 32)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "array of integers"))
+ }
+ {{ .VarName }}[i] = int32(v)
+ {{- else if eq .Type.ElemType.Type.Name "int64" }}
+ v, err2 := strconv.ParseInt(rv, 10, 64)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "array of integers"))
+ }
+ {{ .VarName }}[i] = v
+ {{- else if eq .Type.ElemType.Type.Name "uint" }}
+ v, err2 := strconv.ParseUint(rv, 10, strconv.IntSize)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "array of unsigned integers"))
+ }
+ {{ .VarName }}[i] = uint(v)
+ {{- else if eq .Type.ElemType.Type.Name "uint32" }}
+ v, err2 := strconv.ParseUint(rv, 10, 32)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "array of unsigned integers"))
+ }
+ {{ .VarName }}[i] = int32(v)
+ {{- else if eq .Type.ElemType.Type.Name "uint64" }}
+ v, err2 := strconv.ParseUint(rv, 10, 64)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "array of unsigned integers"))
+ }
+ {{ .VarName }}[i] = v
+ {{- else if eq .Type.ElemType.Type.Name "float32" }}
+ v, err2 := strconv.ParseFloat(rv, 32)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "array of floats"))
+ }
+ {{ .VarName }}[i] = float32(v)
+ {{- else if eq .Type.ElemType.Type.Name "float64" }}
+ v, err2 := strconv.ParseFloat(rv, 64)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "array of floats"))
+ }
+ {{ .VarName }}[i] = v
+ {{- else if eq .Type.ElemType.Type.Name "boolean" }}
+ v, err2 := strconv.ParseBool(rv)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "array of booleans"))
+ }
+ {{ .VarName }}[i] = v
+ {{- else if eq .Type.ElemType.Type.Name "any" }}
+ {{ .VarName }}[i] = rv
+ {{- else }}
+ // unsupported slice type {{ .Type.ElemType.Type.Name }} for var {{ .VarName }}
+ {{- end }}
+{{- end }}
+
+{{- define "type_conversion" }}
+ {{- if eq .Type.Name "bytes" }}
+ {{ .VarName }} = []byte({{ .VarName }}Raw)
+ {{- else if eq .Type.Name "int" }}
+ v, err2 := strconv.ParseInt({{ .VarName }}Raw, 10, strconv.IntSize)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "integer"))
+ }
+ {{- if .Pointer }}
+ pv := int(v)
+ {{ .VarName }} = &pv
+ {{- else }}
+ {{ .VarName }} = int(v)
+ {{- end }}
+ {{- else if eq .Type.Name "int32" }}
+ v, err2 := strconv.ParseInt({{ .VarName }}Raw, 10, 32)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "integer"))
+ }
+ {{- if .Pointer }}
+ pv := int32(v)
+ {{ .VarName }} = &pv
+ {{- else }}
+ {{ .VarName }} = int32(v)
+ {{- end }}
+ {{- else if eq .Type.Name "int64" }}
+ v, err2 := strconv.ParseInt({{ .VarName }}Raw, 10, 64)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "integer"))
+ }
+ {{ .VarName }} = {{ if .Pointer}}&{{ end }}v
+ {{- else if eq .Type.Name "uint" }}
+ v, err2 := strconv.ParseUint({{ .VarName }}Raw, 10, strconv.IntSize)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "unsigned integer"))
+ }
+ {{- if .Pointer }}
+ pv := uint(v)
+ {{ .VarName }} = &pv
+ {{- else }}
+ {{ .VarName }} = uint(v)
+ {{- end }}
+ {{- else if eq .Type.Name "uint32" }}
+ v, err2 := strconv.ParseUint({{ .VarName }}Raw, 10, 32)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "unsigned integer"))
+ }
+ {{- if .Pointer }}
+ pv := uint32(v)
+ {{ .VarName }} = &pv
+ {{- else }}
+ {{ .VarName }} = uint32(v)
+ {{- end }}
+ {{- else if eq .Type.Name "uint64" }}
+ v, err2 := strconv.ParseUint({{ .VarName }}Raw, 10, 64)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "unsigned integer"))
+ }
+ {{ .VarName }} = {{ if .Pointer }}&{{ end }}v
+ {{- else if eq .Type.Name "float32" }}
+ v, err2 := strconv.ParseFloat({{ .VarName }}Raw, 32)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "float"))
+ }
+ {{- if .Pointer }}
+ pv := float32(v)
+ {{ .VarName }} = &pv
+ {{- else }}
+ {{ .VarName }} = float32(v)
+ {{- end }}
+ {{- else if eq .Type.Name "float64" }}
+ v, err2 := strconv.ParseFloat({{ .VarName }}Raw, 64)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "float"))
+ }
+ {{ .VarName }} = {{ if .Pointer }}&{{ end }}v
+ {{- else if eq .Type.Name "boolean" }}
+ v, err2 := strconv.ParseBool({{ .VarName }}Raw)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "boolean"))
+ }
+ {{ .VarName }} = {{ if .Pointer }}&{{ end }}v
+ {{- else }}
+ // unsupported type {{ .Type.Name }} for var {{ .VarName }}
+ {{- end }}
+{{- end }}
+`
+
+// input: TypeData
+const convertTypeToStringT = `{{- define "string_conversion" }}
+ {{- if eq .Type.Name "boolean" -}}
+ {{ .VarName }} := strconv.FormatBool({{ .Target }})
+ {{- else if eq .Type.Name "int" -}}
+ {{ .VarName }} := strconv.Itoa({{ .Target }})
+ {{- else if eq .Type.Name "int32" -}}
+ {{ .VarName }} := strconv.FormatInt(int64({{ .Target }}), 10)
+ {{- else if eq .Type.Name "int64" -}}
+ {{ .VarName }} := strconv.FormatInt({{ .Target }}, 10)
+ {{- else if eq .Type.Name "uint" -}}
+ {{ .VarName }} := strconv.FormatUint(uint64({{ .Target }}), 10)
+ {{- else if eq .Type.Name "uint32" -}}
+ {{ .VarName }} := strconv.FormatUint(uint64({{ .Target }}), 10)
+ {{- else if eq .Type.Name "uint64" -}}
+ {{ .VarName }} := strconv.FormatUint({{ .Target }}, 10)
+ {{- else if eq .Type.Name "float32" -}}
+ {{ .VarName }} := strconv.FormatFloat(float64({{ .Target }}), 'f', -1, 32)
+ {{- else if eq .Type.Name "float64" -}}
+ {{ .VarName }} := strconv.FormatFloat({{ .Target }}, 'f', -1, 64)
+ {{- else if eq .Type.Name "string" -}}
+ {{ .VarName }} := {{ .Target }}
+ {{- else if eq .Type.Name "bytes" -}}
+ {{ .VarName }} := string({{ .Target }})
+ {{- else if eq .Type.Name "any" -}}
+ {{ .VarName }} := fmt.Sprintf("%v", {{ .Target }})
+ {{- else }}
+ // unsupported type {{ .Type.Name }} for field {{ .FieldName }}
+ {{- end }}
+{{- end }}
+`
diff --git a/vendor/goa.design/goa/grpc/codegen/server_types.go b/vendor/goa.design/goa/grpc/codegen/server_types.go
new file mode 100644
index 000000000..4d02a1d0c
--- /dev/null
+++ b/vendor/goa.design/goa/grpc/codegen/server_types.go
@@ -0,0 +1,106 @@
+package codegen
+
+import (
+ "path/filepath"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+// ServerTypeFiles returns the gRPC transport type files.
+func ServerTypeFiles(genpkg string, root *expr.RootExpr) []*codegen.File {
+ fw := make([]*codegen.File, len(root.API.GRPC.Services))
+ seen := make(map[string]struct{})
+ for i, r := range root.API.GRPC.Services {
+ fw[i] = serverType(genpkg, r, seen)
+ }
+ return fw
+}
+
+// serverType returns the file containing the constructor functions to
+// transform the gRPC request types to the corresponding service payload types
+// and service result types to the corresponding gRPC response types.
+//
+// seen keeps track of the constructor names that have already been generated
+// to prevent duplicate code generation.
+func serverType(genpkg string, svc *expr.GRPCServiceExpr, seen map[string]struct{}) *codegen.File {
+ var (
+ path string
+ initData []*InitData
+ validated []*ValidationData
+
+ sd = GRPCServices.Get(svc.Name())
+ )
+ {
+ collect := func(c *ConvertData) {
+ if c.Init != nil {
+ initData = append(initData, c.Init)
+ }
+ }
+
+ path = filepath.Join(codegen.Gendir, "grpc", codegen.SnakeCase(svc.Name()), "server", "types.go")
+ for _, a := range svc.GRPCEndpoints {
+ ed := sd.Endpoint(a.Name())
+ if c := ed.Request.ServerConvert; c != nil {
+ collect(c)
+ }
+ if c := ed.Response.ServerConvert; c != nil {
+ collect(c)
+ }
+ if ed.ServerStream != nil {
+ if c := ed.ServerStream.SendConvert; c != nil {
+ collect(c)
+ }
+ if c := ed.ServerStream.RecvConvert; c != nil {
+ collect(c)
+ }
+ }
+ }
+
+ for _, v := range sd.Validations {
+ validated = append(validated, v)
+ }
+ }
+
+ header := codegen.Header(svc.Name()+" gRPC server types", "server",
+ []*codegen.ImportSpec{
+ {Path: "unicode/utf8"},
+ {Path: "goa.design/goa", Name: "goa"},
+ {Path: filepath.Join(genpkg, codegen.SnakeCase(svc.Name())), Name: sd.Service.PkgName},
+ {Path: filepath.Join(genpkg, codegen.SnakeCase(svc.Name()), "views"), Name: sd.Service.ViewsPkg},
+ {Path: filepath.Join(genpkg, "grpc", codegen.SnakeCase(svc.Name()), pbPkgName)},
+ },
+ )
+ sections := []*codegen.SectionTemplate{header}
+ for _, init := range initData {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "server-type-init",
+ Source: typeInitT,
+ Data: init,
+ })
+ }
+ for _, data := range validated {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "server-validate",
+ Source: validateT,
+ Data: data,
+ })
+ }
+ for _, h := range sd.TransformHelpers {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "server-transform-helper",
+ Source: transformHelperT,
+ Data: h,
+ })
+ }
+
+ return &codegen.File{Path: path, SectionTemplates: sections}
+}
+
+// input: TransformFunctionData
+const transformHelperT = `{{ printf "%s builds a value of type %s from a value of type %s." .Name .ResultTypeRef .ParamTypeRef | comment }}
+func {{ .Name }}(v {{ .ParamTypeRef }}) {{ .ResultTypeRef }} {
+ {{ .Code }}
+ return res
+}
+`
diff --git a/vendor/goa.design/goa/grpc/codegen/service_data.go b/vendor/goa.design/goa/grpc/codegen/service_data.go
new file mode 100644
index 000000000..37b3b8635
--- /dev/null
+++ b/vendor/goa.design/goa/grpc/codegen/service_data.go
@@ -0,0 +1,1144 @@
+package codegen
+
+import (
+ "fmt"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/codegen/service"
+ "goa.design/goa/expr"
+)
+
+// GRPCServices holds the data computed from the design needed to generate the
+// transport code of the services.
+var GRPCServices = make(ServicesData)
+
+type (
+ // ServicesData encapsulates the data computed from the expr.
+ ServicesData map[string]*ServiceData
+
+ // ServiceData contains the data used to render the code related to a
+ // single service.
+ ServiceData struct {
+ // Service contains the related service data.
+ Service *service.Data
+ // PkgName is the name of the generated package in *.pb.go.
+ PkgName string
+ // Name is the service name.
+ Name string
+ // Description is the service description.
+ Description string
+ // Endpoints describes the gRPC service endpoints.
+ Endpoints []*EndpointData
+ // Messages describes the message data for this service.
+ Messages []*service.UserTypeData
+ // ServerStruct is the name of the gRPC server struct.
+ ServerStruct string
+ // ClientStruct is the name of the gRPC client struct,
+ ClientStruct string
+ // ServerInit is the name of the constructor of the server struct.
+ ServerInit string
+ // ClientInit is the name of the constructor of the client struct.
+ ClientInit string
+ // ServerInterface is the name of the gRPC server interface implemented
+ // by the service.
+ ServerInterface string
+ // ClientInterface is the name of the gRPC client interface implemented
+ // by the service.
+ ClientInterface string
+ // ClientInterfaceInit is the name of the client constructor function in
+ // the generated pb.go package.
+ ClientInterfaceInit string
+ // TransformHelpers is the list of transform functions required by the
+ // constructors.
+ TransformHelpers []*codegen.TransformFunctionData
+ // Validations is the validation logic for gRPC messages.
+ Validations []*ValidationData
+ // Scope is the name scope for protocol buffers
+ Scope *codegen.NameScope
+ }
+
+ // EndpointData contains the data used to render the code related to
+ // gRPC endpoint.
+ EndpointData struct {
+ // ServiceName is the name of the service.
+ ServiceName string
+ // PkgName is the name of the generated package in *.pb.go.
+ PkgName string
+ // ServicePkgName is the name of the service package name.
+ ServicePkgName string
+ // Method is the data for the underlying method expression.
+ Method *service.MethodData
+ // PayloadRef is the fully qualified reference to the method payload.
+ PayloadRef string
+ // ResultRef is the fully qualified reference to the method result.
+ ResultRef string
+ // ViewedResultRef is the fully qualified reference to the viewed result.
+ ViewedResultRef string
+ // Request is the gRPC request data.
+ Request *RequestData
+ // Response is the gRPC response data.
+ Response *ResponseData
+ // MetadataSchemes lists all the security requirement schemes that
+ // apply to the method and are encoded in the request metadata.
+ MetadataSchemes []*service.SchemeData
+ // MessageSchemes lists all the security requirement schemes that
+ // apply to the method and are encoded in the request message.
+ MessageSchemes []*service.SchemeData
+ // Errors describes the method gRPC errors.
+ Errors []*ErrorData
+
+ // server side
+
+ // ServerStruct is the name of the gRPC server struct.
+ ServerStruct string
+ // ServerInterface is the name of the gRPC server interface implemented
+ // by the service.
+ ServerInterface string
+ // ServerStream is the server stream data.
+ ServerStream *StreamData
+
+ // client side
+
+ // ClientStruct is the name of the gRPC client struct,
+ ClientStruct string
+ // ClientInterface is the name of the gRPC client interface implemented
+ // by the service.
+ ClientInterface string
+ // ClientStream is the client stream data.
+ ClientStream *StreamData
+ }
+
+ // MetadataData describes a gRPC metadata field.
+ MetadataData struct {
+ // Name is the name of the metadata key.
+ Name string
+ // AttributeName is the name of the corresponding attribute.
+ AttributeName string
+ // Description is the metadata description.
+ Description string
+ // FieldName is the name of the struct field that holds the
+ // metadata value if any, empty string otherwise.
+ FieldName string
+ // VarName is the name of the Go variable used to read or
+ // convert the metadata value.
+ VarName string
+ // TypeName is the name of the type.
+ TypeName string
+ // TypeRef is the reference to the type.
+ TypeRef string
+ // Required is true if the metadata is required.
+ Required bool
+ // Pointer is true if and only the metadata variable is a pointer.
+ Pointer bool
+ // StringSlice is true if the metadata value type is array of strings.
+ StringSlice bool
+ // Slice is true if the metadata value type is an array.
+ Slice bool
+ // MapStringSlice is true if the metadata value type is a map of string
+ // slice.
+ MapStringSlice bool
+ // Map is true if the metadata value type is a map.
+ Map bool
+ // Type describes the datatype of the variable value. Mainly
+ // used for conversion.
+ Type expr.DataType
+ // Validate contains the validation code if any.
+ Validate string
+ // DefaultValue contains the default value if any.
+ DefaultValue interface{}
+ // Example is an example value.
+ Example interface{}
+ }
+
+ // ErrorData contains the error information required to generate the
+ // transport decode (client) and encode (server) code.
+ ErrorData struct {
+ // StatusCode is the response gRPC status code.
+ StatusCode string
+ // Name is the error name.
+ Name string
+ // Ref is a reference to the error type.
+ Ref string
+ // Response is the error response data.
+ Response *ResponseData
+ }
+
+ // RequestData describes a gRPC request.
+ RequestData struct {
+ // Description is the request description.
+ Description string
+ // Message is the gRPC request message.
+ Message *service.UserTypeData
+ // Metadata is the request metadata.
+ Metadata []*MetadataData
+ // ServerConvert is the request data with constructor function to
+ // initialize the method payload type from the generated payload type in
+ // *.pb.go.
+ ServerConvert *ConvertData
+ // ClientConvert is the request data with constructor function to
+ // initialize the generated payload type in *.pb.go from the
+ // method payload.
+ ClientConvert *ConvertData
+ // CLIArgs is the list of arguments for the command-line client.
+ // This is set only for the client side.
+ CLIArgs []*InitArgData
+ }
+
+ // ResponseData describes a gRPC success or error response.
+ ResponseData struct {
+ // StatusCode is the return code of the response.
+ StatusCode string
+ // Description is the response description.
+ Description string
+ // Message is the gRPC response message.
+ Message *service.UserTypeData
+ // Headers is the response header metadata.
+ Headers []*MetadataData
+ // Trailers is the response trailer metadata.
+ Trailers []*MetadataData
+ // ServerConvert is the type data with constructor function to
+ // initialize the generated response type in *.pb.go from the
+ // method result type or the projected result type.
+ ServerConvert *ConvertData
+ // ClientConvert is the type data with constructor function to
+ // initialize the method result type or the projected result type
+ // from the generated response type in *.pb.go.
+ ClientConvert *ConvertData
+ }
+
+ // ConvertData contains the data to convert source type to a target type.
+ // For request type, it contains data to transform gRPC request type to the
+ // corresponding payload type (server) and vice versa (client).
+ // For response type, it contains data to transform gRPC response type to the
+ // corresponding result type (client) and vice versa (server).
+ ConvertData struct {
+ // SrcName is the fully qualified name of the source type.
+ SrcName string
+ // SrcRef is the fully qualified reference to the source type.
+ SrcRef string
+ // TgtName is the fully qualified name of the target type.
+ TgtName string
+ // TgtRef is the fully qualified reference to the target type.
+ TgtRef string
+ // Init contains the data required to render the constructor if any
+ // to transform the source type to a target type.
+ Init *InitData
+ Validation *ValidationData
+ }
+
+ // ValidationData contains the data necessary to render the validation
+ // function.
+ ValidationData struct {
+ // Name is the validation function name.
+ Name string
+ // Def is the validation function definition.
+ Def string
+ // VarName is the name of the argument.
+ ArgName string
+ // SrcName is the fully qualified name of the type being validated.
+ SrcName string
+ // SrcRef is the fully qualified reference to the type being validated.
+ SrcRef string
+ }
+
+ // InitData contains the data required to render a constructor.
+ InitData struct {
+ // Name is the constructor function name.
+ Name string
+ // Description is the function description.
+ Description string
+ // Args is the list of constructor arguments.
+ Args []*InitArgData
+ // ReturnVarName is the name of the variable to be returned.
+ ReturnVarName string
+ // ReturnTypeRef is the qualified (including the package name)
+ // reference to the return type.
+ ReturnTypeRef string
+ // ReturnIsStruct is true if the return type is a struct.
+ ReturnIsStruct bool
+ // Code is the transformation code.
+ Code string
+ }
+
+ // InitArgData represents a single constructor argument.
+ InitArgData struct {
+ // Name is the argument name.
+ Name string
+ // Description is the argument description.
+ Description string
+ // Reference to the argument, e.g. "&body".
+ Ref string
+ // FieldName is the name of the data structure field that should
+ // be initialized with the argument if any.
+ FieldName string
+ // TypeName is the argument type name.
+ TypeName string
+ // TypeRef is the argument type reference.
+ TypeRef string
+ // Pointer is true if a pointer to the arg should be used.
+ Pointer bool
+ // Required is true if the arg is required to build the payload.
+ Required bool
+ // DefaultValue is the default value of the arg.
+ DefaultValue interface{}
+ // Validate contains the validation code for the argument
+ // value if any.
+ Validate string
+ // Example is a example value
+ Example interface{}
+ }
+
+ // StreamData contains data to render the stream struct type that implements
+ // the service stream interface.
+ StreamData struct {
+ // VarName is the name of the struct type.
+ VarName string
+ // Type is the stream type (client or server).
+ Type string
+ // ServiceInterface is the service interface that the struct implements.
+ ServiceInterface string
+ // Interface is the stream interface in *.pb.go stored in the struct.
+ Interface string
+ // Endpoint is the streaming endpoint data.
+ Endpoint *EndpointData
+ // SendName is the name of the send function.
+ SendName string
+ // SendDesc is the description for the send function.
+ SendDesc string
+ // SendRef is the fully qualified reference to the type sent across the
+ // stream.
+ SendRef string
+ // SendConvert is the type sent through the stream. It contains the
+ // constructor to convert the service send type to the type expected by
+ // the gRPC send type (in *.pb.go)
+ SendConvert *ConvertData
+ // RecvConvert is the type received through the stream. It contains the
+ // constructor to convert the gRPC type (in *.pb.go) to the service receive
+ // type.
+ RecvConvert *ConvertData
+ // RecvName is the name of the receive function.
+ RecvName string
+ // RecvDesc is the description for the recv function.
+ RecvDesc string
+ // RecvRef is the fully qualified reference to the type received from the
+ // stream.
+ RecvRef string
+ // MustClose indicates whether to generate the Close() function
+ // for the stream.
+ MustClose bool
+ }
+)
+
+const (
+ // pbPkgName is the directory name where the .proto file is generated and
+ // compiled.
+ pbPkgName = "pb"
+)
+
+// Get retrieves the transport data for the service with the given name
+// computing it if needed. It returns nil if there is no service with the given
+// name.
+func (d ServicesData) Get(name string) *ServiceData {
+ if data, ok := d[name]; ok {
+ return data
+ }
+ service := expr.Root.API.GRPC.Service(name)
+ if service == nil {
+ return nil
+ }
+ d[name] = d.analyze(service)
+ return d[name]
+}
+
+// Endpoint returns the service method transport data for the endpoint with the
+// given name, nil if there isn't one.
+func (sd *ServiceData) Endpoint(name string) *EndpointData {
+ for _, ed := range sd.Endpoints {
+ if ed.Method.Name == name {
+ return ed
+ }
+ }
+ return nil
+}
+
+// HasUnaryEndpoint returns true if the service has at least one unary endpoint.
+func (sd *ServiceData) HasUnaryEndpoint() bool {
+ for _, ed := range sd.Endpoints {
+ if ed.ServerStream == nil {
+ return true
+ }
+ }
+ return false
+}
+
+// HasStreamingEndpoint returns true if the service has at least one streaming
+// endpoint.
+func (sd *ServiceData) HasStreamingEndpoint() bool {
+ for _, ed := range sd.Endpoints {
+ if ed.ServerStream != nil {
+ return true
+ }
+ }
+ return false
+}
+
+// ValidationFor returns the validation data (if any) for the given message.
+func (sd *ServiceData) ValidationFor(name string) *ValidationData {
+ for _, v := range sd.Validations {
+ if v.SrcName == name {
+ return v
+ }
+ }
+ return nil
+}
+
+// analyze creates the data necessary to render the code of the given service.
+func (d ServicesData) analyze(gs *expr.GRPCServiceExpr) *ServiceData {
+ var (
+ sd *ServiceData
+ seen map[string]struct{}
+ svcVarN string
+
+ svc = service.Services.Get(gs.Name())
+ scope = codegen.NewNameScope()
+ )
+ {
+ svcVarN = scope.HashedUnique(gs.ServiceExpr, codegen.Goify(svc.Name, true))
+ sd = &ServiceData{
+ Service: svc,
+ Name: svcVarN,
+ Description: svc.Description,
+ PkgName: pbPkgName,
+ ServerStruct: "Server",
+ ClientStruct: "Client",
+ ServerInit: "New",
+ ClientInit: "NewClient",
+ ServerInterface: svcVarN + "Server",
+ ClientInterface: svcVarN + "Client",
+ ClientInterfaceInit: fmt.Sprintf("%s.New%sClient", pbPkgName, svcVarN),
+ Scope: scope,
+ }
+ seen = make(map[string]struct{})
+ }
+ for _, e := range gs.GRPCEndpoints {
+ en := protoBufify(e.Name(), true)
+
+ // convert request and response types to protocol buffer message types
+ makeProtoBufMessage(e.Request, en+"Request", sd.Scope)
+ if e.MethodExpr.StreamingPayload.Type != expr.Empty {
+ makeProtoBufMessage(e.StreamingRequest, en+"StreamingRequest", sd.Scope)
+ }
+ makeProtoBufMessage(e.Response.Message, en+"Response", sd.Scope)
+
+ // collect all the nested messages and return the top-level message
+ collect := func(att *expr.AttributeExpr) *service.UserTypeData {
+ msgs := collectMessages(att, sd, seen)
+ sd.Messages = append(sd.Messages, msgs...)
+ return msgs[0]
+ }
+
+ var (
+ payloadRef string
+ resultRef string
+ viewedResultRef string
+ errors []*ErrorData
+
+ md = svc.Method(e.Name())
+ )
+ {
+ if e.MethodExpr.Payload.Type != expr.Empty {
+ payloadRef = svc.Scope.GoFullTypeRef(e.MethodExpr.Payload, svc.PkgName)
+ }
+ if e.MethodExpr.Result.Type != expr.Empty {
+ resultRef = svc.Scope.GoFullTypeRef(e.MethodExpr.Result, svc.PkgName)
+ }
+ if md.ViewedResult != nil {
+ viewedResultRef = md.ViewedResult.FullRef
+ }
+ errors = buildErrorsData(e, sd)
+ }
+
+ // build request data
+ var (
+ request *RequestData
+ reqMD []*MetadataData
+
+ payload = service.TypeContext(e.MethodExpr.Payload, svc.PkgName, svc.Scope)
+ req = protoBufContext(e.Request, sd.PkgName, sd.Scope)
+ )
+ {
+ reqMD = extractMetadata(e.Metadata, payload, svc.Scope)
+ request = &RequestData{
+ Description: e.Request.Description,
+ Metadata: reqMD,
+ ServerConvert: buildRequestConvertData(req, payload, reqMD, e, sd, true),
+ ClientConvert: buildRequestConvertData(req, payload, reqMD, e, sd, false),
+ }
+ if obj := expr.AsObject(e.Request.Type); len(*obj) > 0 {
+ // add the request message as the first argument to the CLI
+ request.CLIArgs = append(request.CLIArgs, &InitArgData{
+ Name: "message",
+ Ref: "message",
+ TypeName: protoBufGoFullTypeName(e.Request, sd.PkgName, sd.Scope),
+ TypeRef: protoBufGoFullTypeRef(e.Request, sd.PkgName, sd.Scope),
+ Example: e.Request.Example(expr.Root.API.Random()),
+ })
+ }
+ // pass the metadata as arguments to client CLI args
+ for _, m := range reqMD {
+ request.CLIArgs = append(request.CLIArgs, &InitArgData{
+ Name: m.VarName,
+ Ref: m.VarName,
+ FieldName: m.FieldName,
+ TypeName: m.TypeName,
+ TypeRef: m.TypeRef,
+ Pointer: m.Pointer,
+ Required: m.Required,
+ Validate: m.Validate,
+ Example: m.Example,
+ })
+ }
+ if e.StreamingRequest.Type != expr.Empty {
+ request.Message = collect(e.StreamingRequest)
+ } else {
+ request.Message = collect(e.Request)
+ }
+ }
+
+ // build response data
+ var (
+ response *ResponseData
+ hdrs []*MetadataData
+ trlrs []*MetadataData
+
+ resp = protoBufContext(e.Response.Message, sd.PkgName, sd.Scope)
+ result = resultContext(e, sd)
+ )
+ {
+ hdrs = extractMetadata(e.Response.Headers, result, svc.Scope)
+ trlrs = extractMetadata(e.Response.Trailers, result, svc.Scope)
+ response = &ResponseData{
+ StatusCode: statusCodeToGRPCConst(e.Response.StatusCode),
+ Description: e.Response.Description,
+ Headers: hdrs,
+ Trailers: trlrs,
+ ServerConvert: buildResponseConvertData(resp, result, hdrs, trlrs, e, sd, true),
+ ClientConvert: buildResponseConvertData(resp, result, hdrs, trlrs, e, sd, false),
+ }
+ // If the endpoint is a streaming endpoint, no message is returned
+ // by gRPC. Hence, no need to set response message.
+ if e.Response.Message.Type != expr.Empty || !e.MethodExpr.IsStreaming() {
+ response.Message = collect(e.Response.Message)
+ }
+ }
+
+ // gather security requirements
+ var (
+ msgSch []*service.SchemeData
+ metSch []*service.SchemeData
+ )
+ {
+ for _, req := range e.Requirements {
+ for _, sch := range req.Schemes {
+ s := service.Scheme(md.Requirements, sch.SchemeName).Dup()
+ s.In = sch.In
+ switch s.In {
+ case "message":
+ msgSch = service.AppendScheme(msgSch, s)
+ default:
+ metSch = service.AppendScheme(metSch, s)
+ }
+ }
+ }
+ }
+ ed := &EndpointData{
+ ServiceName: svc.Name,
+ PkgName: sd.PkgName,
+ ServicePkgName: svc.PkgName,
+ Method: md,
+ PayloadRef: payloadRef,
+ ResultRef: resultRef,
+ ViewedResultRef: viewedResultRef,
+ Request: request,
+ Response: response,
+ MessageSchemes: msgSch,
+ MetadataSchemes: metSch,
+ Errors: errors,
+ ServerStruct: sd.ServerStruct,
+ ServerInterface: sd.ServerInterface,
+ ClientStruct: sd.ClientStruct,
+ ClientInterface: sd.ClientInterface,
+ }
+ sd.Endpoints = append(sd.Endpoints, ed)
+ if e.MethodExpr.IsStreaming() {
+ ed.ServerStream = buildStreamData(e, sd, true)
+ ed.ClientStream = buildStreamData(e, sd, false)
+ }
+ }
+ return sd
+}
+
+// collectMessages recurses through the attribute to gather all the messages.
+func collectMessages(at *expr.AttributeExpr, sd *ServiceData, seen map[string]struct{}) (data []*service.UserTypeData) {
+ if at == nil {
+ return
+ }
+ collect := func(at *expr.AttributeExpr) []*service.UserTypeData {
+ return collectMessages(at, sd, seen)
+ }
+ switch dt := at.Type.(type) {
+ case expr.UserType:
+ if _, ok := seen[dt.Name()]; ok {
+ return nil
+ }
+ name := protoBufMessageName(at, sd.Scope)
+ ref := protoBufGoFullTypeRef(at, sd.PkgName, sd.Scope)
+ att := dt.Attribute()
+ if rt, ok := dt.(*expr.ResultTypeExpr); ok {
+ if a := unwrapAttr(expr.DupAtt(rt.Attribute())); expr.IsArray(a.Type) {
+ // result type collection
+ att = &expr.AttributeExpr{Type: expr.AsObject(rt)}
+ }
+ }
+ data = append(data, &service.UserTypeData{
+ Name: dt.Name(),
+ VarName: name,
+ Description: dt.Attribute().Description,
+ Def: protoBufMessageDef(att, sd.Scope),
+ Ref: ref,
+ Type: dt,
+ })
+ ca := protoBufContext(at, "", sd.Scope)
+ if vDef := codegen.RecursiveValidationCode(ca, "message"); vDef != "" {
+ sd.Validations = append(sd.Validations, &ValidationData{
+ Name: "Validate" + name,
+ Def: vDef,
+ ArgName: "message",
+ SrcName: name,
+ SrcRef: ref,
+ })
+ }
+ seen[dt.Name()] = struct{}{}
+ data = append(data, collect(att)...)
+ case *expr.Object:
+ for _, nat := range *dt {
+ data = append(data, collect(nat.Attribute)...)
+ }
+ case *expr.Array:
+ data = append(data, collect(dt.ElemType)...)
+ case *expr.Map:
+ data = append(data, collect(dt.KeyType)...)
+ data = append(data, collect(dt.ElemType)...)
+ }
+ return
+}
+
+// buildRequestConvertData builds the convert data for the server and client
+// requests.
+// * server side - converts generated gRPC request type in *.pb.go and the
+// gRPC metadata to method payload type.
+// * client side - converts method payload type to generated gRPC request
+// type in *.pb.go.
+//
+// svr param indicates that the convert data is generated for server side.
+func buildRequestConvertData(request, payload *codegen.ContextualAttribute, md []*MetadataData, e *expr.GRPCEndpointExpr, sd *ServiceData, svr bool) *ConvertData {
+ // Server-side: No need to build convert data if payload is not an object
+ // type and request message is empty.
+ if (svr && isEmpty(e.Request.Type) && !expr.IsObject(e.MethodExpr.Payload.Type)) ||
+ // Client-side: No need to build convert data if streaming payload since
+ // all attributes in method payload is encoded into request metadata.
+ (!svr && e.MethodExpr.IsPayloadStreaming()) {
+ return nil
+ }
+
+ var (
+ svc = sd.Service
+ )
+
+ if svr {
+ // server side
+ var (
+ data *InitData
+ validations *ValidationData
+ )
+ {
+ data = buildInitData(request, payload, "message", "payload", false, sd)
+ validations = sd.ValidationFor(protoBufMessageName(request.Attribute.Expr(), sd.Scope))
+ data.Description = fmt.Sprintf("%s builds the payload of the %q endpoint of the %q service from the gRPC request type.", data.Name, e.Name(), svc.Name)
+ for _, m := range md {
+ // pass the metadata as arguments to payload constructor in server
+ data.Args = append(data.Args, &InitArgData{
+ Name: m.VarName,
+ Ref: m.VarName,
+ FieldName: m.FieldName,
+ TypeName: m.TypeName,
+ TypeRef: m.TypeRef,
+ Pointer: m.Pointer,
+ Required: m.Required,
+ Validate: m.Validate,
+ Example: m.Example,
+ })
+ }
+ }
+ return &ConvertData{
+ SrcName: request.Attribute.Name(),
+ SrcRef: request.Attribute.Ref(),
+ TgtName: payload.Attribute.Name(),
+ TgtRef: payload.Attribute.Ref(),
+ Init: data,
+ Validation: validations,
+ }
+ }
+
+ // client side
+
+ var (
+ data *InitData
+ )
+ {
+ data = buildInitData(payload, request, "payload", "message", true, sd)
+ data.Description = fmt.Sprintf("%s builds the gRPC request type from the payload of the %q endpoint of the %q service.", data.Name, e.Name(), svc.Name)
+ }
+ return &ConvertData{
+ SrcName: payload.Attribute.Name(),
+ SrcRef: payload.Attribute.Ref(),
+ TgtName: request.Attribute.Name(),
+ TgtRef: request.Attribute.Ref(),
+ Init: data,
+ }
+}
+
+// buildResponseConvertData builds the convert data for the server and client
+// responses.
+// * server side - converts method result type to generated gRPC response type
+// in *.pb.go
+// * client side - converts generated gRPC response type in *.pb.go and
+// response metadata to method result type.
+//
+// svr param indicates that the convert data is generated for server side.
+func buildResponseConvertData(response, result *codegen.ContextualAttribute, hdrs, trlrs []*MetadataData, e *expr.GRPCEndpointExpr, sd *ServiceData, svr bool) *ConvertData {
+ if e.MethodExpr.IsStreaming() || (!svr && isEmpty(e.MethodExpr.Result.Type)) {
+ return nil
+ }
+
+ var (
+ svc = sd.Service
+ )
+
+ if svr {
+ // server side
+
+ var (
+ data *InitData
+ )
+ {
+ data = buildInitData(result, response, "result", "message", true, sd)
+ data.Description = fmt.Sprintf("%s builds the gRPC response type from the result of the %q endpoint of the %q service.", data.Name, e.Name(), svc.Name)
+ }
+ return &ConvertData{
+ SrcName: result.Attribute.Name(),
+ SrcRef: result.Attribute.Ref(),
+ TgtName: response.Attribute.Name(),
+ TgtRef: response.Attribute.Ref(),
+ Init: data,
+ }
+ }
+
+ // client side
+
+ var (
+ data *InitData
+ )
+ {
+ data = buildInitData(response, result, "message", "result", false, sd)
+ data.Description = fmt.Sprintf("%s builds the result type of the %q endpoint of the %q service from the gRPC response type.", data.Name, e.Name(), svc.Name)
+ for _, m := range hdrs {
+ // pass the headers as arguments to result constructor in client
+ data.Args = append(data.Args, &InitArgData{
+ Name: m.VarName,
+ Ref: m.VarName,
+ FieldName: m.FieldName,
+ TypeName: m.TypeName,
+ TypeRef: m.TypeRef,
+ Pointer: m.Pointer,
+ Required: m.Required,
+ Validate: m.Validate,
+ Example: m.Example,
+ })
+ }
+ for _, m := range trlrs {
+ // pass the trailers as arguments to result constructor in client
+ data.Args = append(data.Args, &InitArgData{
+ Name: m.VarName,
+ Ref: m.VarName,
+ FieldName: m.FieldName,
+ TypeName: m.TypeName,
+ TypeRef: m.TypeRef,
+ Pointer: m.Pointer,
+ Required: m.Required,
+ Validate: m.Validate,
+ Example: m.Example,
+ })
+ }
+ }
+ return &ConvertData{
+ SrcName: response.Attribute.Name(),
+ SrcRef: response.Attribute.Ref(),
+ TgtName: result.Attribute.Name(),
+ TgtRef: result.Attribute.Ref(),
+ Init: data,
+ Validation: sd.ValidationFor(protoBufMessageName(response.Attribute.Expr(), sd.Scope)),
+ }
+}
+
+// buildInitData builds the transformation code to convert source to target.
+//
+// source, target are the source and target contextual attributesr used in the
+// transformation
+//
+// sourceVar, targetVar are the source and target variable names used in the
+// transformation
+//
+// proto if true indicates the target type is a protocol buffer type
+//
+// sd is the ServiceData
+//
+func buildInitData(source, target *codegen.ContextualAttribute, sourceVar, targetVar string, proto bool, sd *ServiceData) *InitData {
+ var (
+ name string
+ isStruct bool
+ code string
+ helpers []*codegen.TransformFunctionData
+ args []*InitArgData
+ err error
+ )
+ {
+ isStruct = expr.IsObject(target.Attribute.Expr().Type)
+ n := sd.Scope.GoTypeName(target.Attribute.Expr())
+ if !isStruct {
+ // If target is array, map, or primitive the name will be suffixed with
+ // the definition (e.g int, []string, map[int]string) which is incorrect.
+ n = sd.Scope.GoTypeName(source.Attribute.Expr())
+ }
+ name = "New" + n
+ code, helpers, err = protoBufTransform(source, target, sourceVar, targetVar, proto)
+ if err != nil {
+ fmt.Println(err.Error()) // TBD validate DSL so errors are not possible
+ return nil
+ }
+ sd.TransformHelpers = codegen.AppendHelpers(sd.TransformHelpers, helpers)
+ if (!proto && !isEmpty(source.Attribute.Expr().Type)) || (proto && !isEmpty(target.Attribute.Expr().Type)) {
+ args = []*InitArgData{
+ &InitArgData{
+ Name: sourceVar,
+ Ref: sourceVar,
+ TypeName: source.Attribute.Name(),
+ TypeRef: source.Attribute.Ref(),
+ Example: source.Attribute.Expr().Example(expr.Root.API.Random()),
+ },
+ }
+ }
+ }
+ return &InitData{
+ Name: name,
+ ReturnVarName: targetVar,
+ ReturnTypeRef: target.Attribute.Ref(),
+ ReturnIsStruct: isStruct,
+ Code: code,
+ Args: args,
+ }
+}
+
+// buildErrorsData builds the error data for all the error responses in the
+// endpoint expression. The response message for each error response are
+// inferred from the method's error expression if not specified explicitly.
+func buildErrorsData(e *expr.GRPCEndpointExpr, sd *ServiceData) []*ErrorData {
+ var (
+ errors []*ErrorData
+
+ svc = sd.Service
+ )
+ errors = make([]*ErrorData, 0, len(e.GRPCErrors))
+ for _, v := range e.GRPCErrors {
+ var responseData *ResponseData
+ {
+ responseData = &ResponseData{
+ StatusCode: statusCodeToGRPCConst(v.Response.StatusCode),
+ Description: v.Response.Description,
+ }
+ }
+ errors = append(errors, &ErrorData{
+ Name: v.Name,
+ Ref: svc.Scope.GoFullTypeRef(v.ErrorExpr.AttributeExpr, svc.PkgName),
+ Response: responseData,
+ })
+ }
+ return errors
+}
+
+// buildStreamData builds the StreamData for the server and client streams.
+//
+// svr param indicates that the stream data is built for the server.
+func buildStreamData(e *expr.GRPCEndpointExpr, sd *ServiceData, svr bool) *StreamData {
+ var (
+ varn string
+ intName string
+ svcInt string
+ sendName string
+ sendDesc string
+ sendRef string
+ sendType *ConvertData
+ recvName string
+ recvDesc string
+ recvRef string
+ recvType *ConvertData
+ mustClose bool
+ typ string
+
+ svc = sd.Service
+ ed = sd.Endpoint(e.Name())
+ md = ed.Method
+ spayload = service.TypeContext(e.MethodExpr.StreamingPayload, svc.PkgName, svc.Scope)
+ result = resultContext(e, sd)
+ request = protoBufContext(e.StreamingRequest, sd.PkgName, sd.Scope)
+ response = protoBufContext(e.Response.Message, sd.PkgName, sd.Scope)
+ )
+ {
+ resVar := "result"
+ if md.ViewedResult != nil {
+ resVar = "vresult"
+ }
+ if svr {
+ typ = "server"
+ varn = md.ServerStream.VarName
+ intName = fmt.Sprintf("%s.%s_%sServer", sd.PkgName, svc.StructName, md.VarName)
+ svcInt = fmt.Sprintf("%s.%s", svc.PkgName, md.ServerStream.Interface)
+ if e.MethodExpr.Result.Type != expr.Empty {
+ sendName = md.ServerStream.SendName
+ sendRef = ed.ResultRef
+ sendType = &ConvertData{
+ SrcName: result.Attribute.Name(),
+ SrcRef: result.Attribute.Ref(),
+ TgtName: response.Attribute.Name(),
+ TgtRef: response.Attribute.Ref(),
+ Init: buildInitData(result, response, resVar, "v", true, sd),
+ }
+ }
+ if e.MethodExpr.StreamingPayload.Type != expr.Empty {
+ recvName = md.ServerStream.RecvName
+ recvRef = spayload.Attribute.Ref()
+ recvType = &ConvertData{
+ SrcName: request.Attribute.Name(),
+ SrcRef: request.Attribute.Ref(),
+ TgtName: spayload.Attribute.Name(),
+ TgtRef: spayload.Attribute.Ref(),
+ Init: buildInitData(request, spayload, "v", "spayload", false, sd),
+ Validation: sd.ValidationFor(protoBufMessageName(request.Attribute.Expr(), sd.Scope)),
+ }
+ }
+ mustClose = md.ServerStream.MustClose
+ } else {
+ typ = "client"
+ varn = md.ClientStream.VarName
+ intName = fmt.Sprintf("%s.%s_%sClient", sd.PkgName, svc.StructName, md.VarName)
+ svcInt = fmt.Sprintf("%s.%s", svc.PkgName, md.ClientStream.Interface)
+ if e.MethodExpr.StreamingPayload.Type != expr.Empty {
+ sendName = md.ClientStream.SendName
+ sendRef = spayload.Attribute.Ref()
+ sendType = &ConvertData{
+ SrcName: spayload.Attribute.Name(),
+ SrcRef: spayload.Attribute.Ref(),
+ TgtName: request.Attribute.Name(),
+ TgtRef: request.Attribute.Ref(),
+ Init: buildInitData(spayload, request, "spayload", "v", true, sd),
+ }
+ }
+ if e.MethodExpr.Result.Type != expr.Empty {
+ recvName = md.ClientStream.RecvName
+ recvRef = ed.ResultRef
+ recvType = &ConvertData{
+ SrcName: response.Attribute.Name(),
+ SrcRef: response.Attribute.Ref(),
+ TgtName: result.Attribute.Name(),
+ TgtRef: result.Attribute.Ref(),
+ Init: buildInitData(response, result, "v", resVar, false, sd),
+ Validation: sd.ValidationFor(protoBufMessageName(response.Attribute.Expr(), sd.Scope)),
+ }
+ }
+ mustClose = md.ClientStream.MustClose
+ }
+ if sendType != nil {
+ sendDesc = fmt.Sprintf("%s streams instances of %q to the %q endpoint gRPC stream.", sendName, sendType.TgtName, md.Name)
+ }
+ if recvType != nil {
+ recvDesc = fmt.Sprintf("%s reads instances of %q from the %q endpoint gRPC stream.", recvName, recvType.SrcName, md.Name)
+ }
+ }
+ return &StreamData{
+ VarName: varn,
+ Type: typ,
+ Interface: intName,
+ ServiceInterface: svcInt,
+ Endpoint: ed,
+ SendName: sendName,
+ SendDesc: sendDesc,
+ SendRef: sendRef,
+ SendConvert: sendType,
+ RecvName: recvName,
+ RecvDesc: recvDesc,
+ RecvRef: recvRef,
+ RecvConvert: recvType,
+ MustClose: mustClose,
+ }
+}
+
+// extractMetadata collects the request/response metadata from the given
+// metadata attribute and service type (payload/result).
+func extractMetadata(a *expr.MappedAttributeExpr, service *codegen.ContextualAttribute, scope *codegen.NameScope) []*MetadataData {
+ var metadata []*MetadataData
+ codegen.WalkMappedAttr(a, func(name, elem string, required bool, c *expr.AttributeExpr) error {
+ var (
+ varn string
+ fieldName string
+ pointer bool
+
+ arr = expr.AsArray(c.Type)
+ mp = expr.AsMap(c.Type)
+ ca = service.Dup(c, required)
+ )
+ {
+ varn = scope.Unique(codegen.Goify(name, false))
+ pointer = ca.IsPointer() && expr.IsPrimitive(c.Type)
+ fieldName = codegen.Goify(name, true)
+ if !expr.IsObject(service.Attribute.Expr().Type) {
+ fieldName = ""
+ }
+ }
+ metadata = append(metadata, &MetadataData{
+ Name: elem,
+ AttributeName: name,
+ Description: c.Description,
+ FieldName: fieldName,
+ VarName: varn,
+ Required: required,
+ Type: c.Type,
+ TypeName: scope.GoTypeName(c),
+ TypeRef: scope.GoTypeRef(c),
+ Pointer: pointer,
+ Slice: arr != nil,
+ StringSlice: arr != nil && arr.ElemType.Type.Kind() == expr.StringKind,
+ Map: mp != nil,
+ MapStringSlice: mp != nil &&
+ mp.KeyType.Type.Kind() == expr.StringKind &&
+ mp.ElemType.Type.Kind() == expr.ArrayKind &&
+ expr.AsArray(mp.ElemType.Type).ElemType.Type.Kind() == expr.StringKind,
+ Validate: codegen.RecursiveValidationCode(ca, varn),
+ DefaultValue: c.DefaultValue,
+ Example: c.Example(expr.Root.API.Random()),
+ })
+ return nil
+ })
+ return metadata
+}
+
+// resultContext returns the contextual attributer for the result of the given
+// endpoint.
+func resultContext(e *expr.GRPCEndpointExpr, sd *ServiceData) *codegen.ContextualAttribute {
+ svc := sd.Service
+ md := svc.Method(e.Name())
+ if md.ViewedResult != nil {
+ vresAtt := expr.AsObject(md.ViewedResult.Type).Attribute("projected")
+ return service.ProjectedTypeContext(vresAtt, svc.ViewsPkg, svc.ViewScope)
+ }
+ return service.TypeContext(e.MethodExpr.Result, svc.PkgName, svc.Scope)
+}
+
+// isEmpty returns true if given type is empty.
+func isEmpty(dt expr.DataType) bool {
+ if dt == expr.Empty {
+ return true
+ }
+ if o := expr.AsObject(dt); o != nil && len(*o) == 0 {
+ return true
+ }
+ return false
+}
+
+// input: InitData
+const typeInitT = `{{ comment .Description }}
+func {{ .Name }}({{ range .Args }}{{ .Name }} {{ .TypeRef }}, {{ end }}) {{ .ReturnTypeRef }} {
+ {{ .Code }}
+{{- if .ReturnIsStruct }}
+ {{- range .Args }}
+ {{- if .FieldName }}
+ {{ $.ReturnVarName }}.{{ .FieldName }} = {{ if .Pointer }}&{{ end }}{{ .Name }}
+ {{- end }}
+ {{- end }}
+{{- end }}
+ return {{ .ReturnVarName }}
+}
+`
+
+// input: ValidationData
+const validateT = `{{ printf "%s runs the validations defined on %s." .Name .SrcName | comment }}
+func {{ .Name }}({{ .ArgName }} {{ .SrcRef }}) (err error) {
+ {{ .Def }}
+ return
+}
+`
+
+// streamSendT renders the function implementing the Send method in
+// stream interface.
+// input: StreamData
+const streamSendT = `{{ comment .SendDesc }}
+func (s *{{ .VarName }}) {{ .SendName }}(res {{ .SendRef }}) error {
+{{- if and .Endpoint.Method.ViewedResult (eq .Type "server") }}
+ {{- if .Endpoint.Method.ViewedResult.ViewName }}
+ vres := {{ .Endpoint.ServicePkgName }}.{{ .Endpoint.Method.ViewedResult.Init.Name }}(res, {{ printf "%q" .Endpoint.Method.ViewedResult.ViewName }})
+ {{- else }}
+ vres := {{ .Endpoint.ServicePkgName }}.{{ .Endpoint.Method.ViewedResult.Init.Name }}(res, s.view)
+ {{- end }}
+{{- end }}
+ v := {{ .SendConvert.Init.Name }}({{ if and .Endpoint.Method.ViewedResult (eq .Type "server") }}vres.Projected{{ else }}res{{ end }})
+ return s.stream.{{ .SendName }}(v)
+}
+`
+
+// streamRecvT renders the function implementing the Recv method in
+// stream interface.
+// input: StreamData
+const streamRecvT = `{{ comment .RecvDesc }}
+func (s *{{ .VarName }}) {{ .RecvName }}() ({{ .RecvRef }}, error) {
+ var res {{ .RecvRef }}
+ v, err := s.stream.{{ .RecvName }}()
+ if err != nil {
+ return res, err
+ }
+{{- if and .Endpoint.Method.ViewedResult (eq .Type "client") }}
+ proj := {{ .RecvConvert.Init.Name }}({{ range .RecvConvert.Init.Args }}{{ .Name }}, {{ end }})
+ vres := {{ if not .Endpoint.Method.ViewedResult.IsCollection }}&{{ end }}{{ .Endpoint.Method.ViewedResult.FullName }}{Projected: proj, View: {{ if .Endpoint.Method.ViewedResult.ViewName }}"{{ .Endpoint.Method.ViewedResult.ViewName }}"{{ else }}s.view{{ end }} }
+ return {{ .Endpoint.ServicePkgName }}.{{ .Endpoint.Method.ViewedResult.ResultInit.Name }}(vres), nil
+{{- else }}
+ return {{ .RecvConvert.Init.Name }}({{ range .RecvConvert.Init.Args }}{{ .Name }}, {{ end }}), nil
+{{- end }}
+}
+`
+
+// streamCloseT renders the function implementing the Close method in
+// stream interface.
+// input: StreamData
+const streamCloseT = `
+func (s *{{ .VarName }}) Close() error {
+ {{ comment "nothing to do here" }}
+ return nil
+}
+`
+
+// streamSetViewT renders the function implementing the SetView method in
+// server stream interface.
+// input: StreamData
+const streamSetViewT = `{{ printf "SetView sets the view." | comment }}
+func (s *{{ .VarName }}) SetView(view string) {
+ s.view = view
+}
+`
diff --git a/vendor/goa.design/goa/grpc/codegen/testing.go b/vendor/goa.design/goa/grpc/codegen/testing.go
new file mode 100644
index 000000000..3aa415fb4
--- /dev/null
+++ b/vendor/goa.design/goa/grpc/codegen/testing.go
@@ -0,0 +1,28 @@
+package codegen
+
+import (
+ "bytes"
+ "testing"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/codegen/service"
+ "goa.design/goa/expr"
+)
+
+// RunGRPCDSL returns the GRPC DSL root resulting from running the given DSL.
+func RunGRPCDSL(t *testing.T, dsl func()) *expr.RootExpr {
+ // reset all roots and codegen data structures
+ service.Services = make(service.ServicesData)
+ GRPCServices = make(ServicesData)
+ return expr.RunGRPCDSL(t, dsl)
+}
+
+func sectionCode(t *testing.T, section ...*codegen.SectionTemplate) string {
+ var code bytes.Buffer
+ for _, s := range section {
+ if err := s.Write(&code); err != nil {
+ t.Fatal(err)
+ }
+ }
+ return code.String()
+}
diff --git a/vendor/goa.design/goa/http/client.go b/vendor/goa.design/goa/http/client.go
new file mode 100644
index 000000000..f37829153
--- /dev/null
+++ b/vendor/goa.design/goa/http/client.go
@@ -0,0 +1,215 @@
+package http
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "net"
+ "net/http"
+ "sort"
+ "strings"
+)
+
+type (
+ // Doer is the HTTP client interface.
+ Doer interface {
+ Do(*http.Request) (*http.Response, error)
+ }
+
+ // DebugDoer is a Doer that can print the low level HTTP details.
+ DebugDoer interface {
+ Doer
+ // Fprint prints the HTTP request and response details.
+ Fprint(io.Writer)
+ }
+
+ // debugDoer wraps a doer and implements DebugDoer.
+ debugDoer struct {
+ Doer
+ // Request is the captured request.
+ Request *http.Request
+ // Response is the captured response.
+ Response *http.Response
+ }
+
+ // ClientError is an error returned by a HTTP service client.
+ ClientError struct {
+ // Name is a name for this class of errors.
+ Name string
+ // Message contains the specific error details.
+ Message string
+ // Service is the name of the service.
+ Service string
+ // Method is the name of the service method.
+ Method string
+ // Is the error temporary?
+ Temporary bool
+ // Is the error a timeout?
+ Timeout bool
+ // Is the error a server-side fault?
+ Fault bool
+ }
+)
+
+// NewDebugDoer wraps the given doer and captures the request and response so
+// they can be printed.
+func NewDebugDoer(d Doer) DebugDoer {
+ return &debugDoer{Doer: d}
+}
+
+// Do captures the request and response.
+func (dd *debugDoer) Do(req *http.Request) (*http.Response, error) {
+ var reqb []byte
+ if req.Body != nil {
+ reqb, _ := ioutil.ReadAll(req.Body)
+ req.Body = ioutil.NopCloser(bytes.NewBuffer(reqb))
+ }
+
+ resp, err := dd.Doer.Do(req)
+
+ if err != nil {
+ return nil, err
+ }
+
+ respb, err := ioutil.ReadAll(resp.Body)
+ if err != nil {
+ respb = []byte(fmt.Sprintf("!!failed to read response: %s", err))
+ }
+ resp.Body = ioutil.NopCloser(bytes.NewBuffer(respb))
+
+ dd.Response = resp
+
+ req.Body = ioutil.NopCloser(bytes.NewBuffer(reqb))
+ dd.Request = req
+
+ return resp, err
+}
+
+// Printf dumps the captured request and response details to w.
+func (dd *debugDoer) Fprint(w io.Writer) {
+ if dd.Request == nil {
+ return
+ }
+ buf := &bytes.Buffer{}
+ buf.WriteString(fmt.Sprintf("> %s %s", dd.Request.Method, dd.Request.URL.String()))
+
+ keys := make([]string, len(dd.Request.Header))
+ i := 0
+ for k := range dd.Request.Header {
+ keys[i] = k
+ i++
+ }
+ sort.Strings(keys)
+ for _, k := range keys {
+ buf.WriteString(fmt.Sprintf("\n> %s: %s", k, strings.Join(dd.Request.Header[k], ", ")))
+ }
+
+ b, _ := ioutil.ReadAll(dd.Request.Body)
+ if len(b) > 0 {
+ buf.WriteByte('\n')
+ buf.Write(b)
+ }
+
+ if dd.Response == nil {
+ w.Write(buf.Bytes())
+ return
+ }
+ buf.WriteString(fmt.Sprintf("\n< %s", dd.Response.Status))
+
+ keys = make([]string, len(dd.Response.Header))
+ i = 0
+ for k := range dd.Response.Header {
+ keys[i] = k
+ i++
+ }
+ sort.Strings(keys)
+ for _, k := range keys {
+ buf.WriteString(fmt.Sprintf("\n< %s: %s", k, strings.Join(dd.Response.Header[k], ", ")))
+ }
+
+ rb, _ := ioutil.ReadAll(dd.Response.Body) // this is reading from a memory buffer so safe to ignore errors
+ if len(rb) > 0 {
+ buf.WriteByte('\n')
+ buf.Write(rb)
+ }
+ w.Write(buf.Bytes())
+ w.Write([]byte{'\n'})
+}
+
+// Error builds an error message.
+func (c *ClientError) Error() string {
+ return fmt.Sprintf("[%s %s]: %s", c.Service, c.Method, c.Message)
+}
+
+// ErrInvalidType is the error returned when the wrong type is given to a
+// method function.
+func ErrInvalidType(svc, m, expected string, actual interface{}) error {
+ msg := fmt.Sprintf("invalid value expected %s, got %v", expected, actual)
+ return &ClientError{Name: "invalid_type", Message: msg, Service: svc, Method: m}
+}
+
+// ErrEncodingError is the error returned when the encoder fails to encode the
+// request body.
+func ErrEncodingError(svc, m string, err error) error {
+ msg := fmt.Sprintf("failed to encode request body: %s", err)
+ return &ClientError{Name: "encoding_error", Message: msg, Service: svc, Method: m}
+}
+
+// ErrInvalidURL is the error returned when the URL computed for an method is
+// invalid.
+func ErrInvalidURL(svc, m, u string, err error) error {
+ msg := fmt.Sprintf("invalid URL %s: %s", u, err)
+ return &ClientError{Name: "invalid_url", Message: msg, Service: svc, Method: m}
+}
+
+// ErrDecodingError is the error returned when the decoder fails to decode the
+// response body.
+func ErrDecodingError(svc, m string, err error) error {
+ msg := fmt.Sprintf("failed to decode response body: %s", err)
+ return &ClientError{Name: "decoding_error", Message: msg, Service: svc, Method: m}
+}
+
+// ErrValidationError is the error returned when the response body is properly
+// received and decoded but fails validation.
+func ErrValidationError(svc, m string, err error) error {
+ msg := fmt.Sprintf("invalid response: %s", err)
+ return &ClientError{Name: "validation_error", Message: msg, Service: svc, Method: m}
+}
+
+// ErrInvalidResponse is the error returned when the service responded with an
+// unexpected response status code.
+func ErrInvalidResponse(svc, m string, code int, body string) error {
+ var b string
+ if body != "" {
+ b = ", body: "
+ }
+ msg := fmt.Sprintf("invalid response code %#v"+b+"%s", code, body)
+
+ temporary := code == http.StatusServiceUnavailable ||
+ code == http.StatusConflict ||
+ code == http.StatusTooManyRequests ||
+ code == http.StatusGatewayTimeout
+
+ timeout := code == http.StatusRequestTimeout ||
+ code == http.StatusGatewayTimeout
+
+ fault := code == http.StatusInternalServerError ||
+ code == http.StatusNotImplemented ||
+ code == http.StatusBadGateway
+
+ return &ClientError{Name: "invalid_response", Message: msg, Service: svc, Method: m,
+ Temporary: temporary, Timeout: timeout, Fault: fault}
+}
+
+// ErrRequestError is the error returned when the request fails to be sent.
+func ErrRequestError(svc, m string, err error) error {
+ temporary := false
+ timeout := false
+ if nerr, ok := err.(net.Error); ok {
+ temporary = nerr.Temporary()
+ timeout = nerr.Timeout()
+ }
+ return &ClientError{Name: "request_error", Message: err.Error(), Service: svc, Method: m,
+ Temporary: temporary, Timeout: timeout}
+}
diff --git a/vendor/goa.design/goa/http/codegen/client.go b/vendor/goa.design/goa/http/codegen/client.go
new file mode 100644
index 000000000..0789450ef
--- /dev/null
+++ b/vendor/goa.design/goa/http/codegen/client.go
@@ -0,0 +1,747 @@
+package codegen
+
+import (
+ "fmt"
+ "path/filepath"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/codegen/service"
+ "goa.design/goa/expr"
+)
+
+// ClientFiles returns the client HTTP transport files.
+func ClientFiles(genpkg string, root *expr.RootExpr) []*codegen.File {
+ fw := make([]*codegen.File, 2*len(root.API.HTTP.Services))
+ for i, r := range root.API.HTTP.Services {
+ fw[i] = client(genpkg, r)
+ }
+ for i, r := range root.API.HTTP.Services {
+ fw[i+len(root.API.HTTP.Services)] = clientEncodeDecode(genpkg, r)
+ }
+ return fw
+}
+
+// client returns the client HTTP transport file
+func client(genpkg string, svc *expr.HTTPServiceExpr) *codegen.File {
+ path := filepath.Join(codegen.Gendir, "http", codegen.SnakeCase(svc.Name()), "client", "client.go")
+ data := HTTPServices.Get(svc.Name())
+ title := fmt.Sprintf("%s client HTTP transport", svc.Name())
+ sections := []*codegen.SectionTemplate{
+ codegen.Header(title, "client", []*codegen.ImportSpec{
+ {Path: "context"},
+ {Path: "fmt"},
+ {Path: "io"},
+ {Path: "mime/multipart"},
+ {Path: "net/http"},
+ {Path: "strconv"},
+ {Path: "strings"},
+ {Path: "sync"},
+ {Path: "time"},
+ {Path: "github.com/gorilla/websocket"},
+ {Path: "goa.design/goa", Name: "goa"},
+ {Path: "goa.design/goa/http", Name: "goahttp"},
+ {Path: genpkg + "/" + codegen.SnakeCase(svc.Name()), Name: data.Service.PkgName},
+ {Path: genpkg + "/" + codegen.SnakeCase(svc.Name()) + "/" + "views", Name: data.Service.ViewsPkg},
+ }),
+ }
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-struct",
+ Source: clientStructT,
+ Data: data,
+ FuncMap: map[string]interface{}{
+ "streamingEndpointExists": streamingEndpointExists,
+ },
+ })
+ for _, e := range data.Endpoints {
+ if e.ClientStream != nil {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-stream-struct-type",
+ Source: streamStructTypeT,
+ Data: e.ClientStream,
+ })
+ }
+ }
+
+ for _, e := range data.Endpoints {
+ if e.MultipartRequestEncoder != nil {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "multipart-request-encoder-type",
+ Source: multipartRequestEncoderTypeT,
+ Data: e.MultipartRequestEncoder,
+ })
+ }
+ }
+
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-init",
+ Source: clientInitT,
+ Data: data,
+ FuncMap: map[string]interface{}{
+ "streamingEndpointExists": streamingEndpointExists,
+ },
+ })
+
+ for _, e := range data.Endpoints {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-endpoint-init",
+ Source: endpointInitT,
+ Data: e,
+ })
+ if e.ClientStream != nil {
+ if e.ClientStream.RecvTypeRef != "" {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-stream-recv",
+ Source: streamRecvT,
+ Data: e.ClientStream,
+ FuncMap: map[string]interface{}{
+ "upgradeParams": upgradeParams,
+ },
+ })
+ }
+ switch e.ClientStream.Kind {
+ case expr.ClientStreamKind, expr.BidirectionalStreamKind:
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-stream-send",
+ Source: streamSendT,
+ Data: e.ClientStream,
+ FuncMap: map[string]interface{}{
+ "upgradeParams": upgradeParams,
+ "viewedServerBody": viewedServerBody,
+ },
+ })
+ }
+ if e.ClientStream.MustClose {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-stream-close",
+ Source: streamCloseT,
+ Data: e.ClientStream,
+ FuncMap: map[string]interface{}{
+ "upgradeParams": upgradeParams,
+ },
+ })
+ }
+ if e.Method.ViewedResult != nil && e.Method.ViewedResult.ViewName == "" {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-stream-set-view",
+ Source: streamSetViewT,
+ Data: e.ClientStream,
+ })
+ }
+ }
+ }
+
+ return &codegen.File{Path: path, SectionTemplates: sections}
+}
+
+// clientEncodeDecode returns the file containing the HTTP client encoding and
+// decoding logic.
+func clientEncodeDecode(genpkg string, svc *expr.HTTPServiceExpr) *codegen.File {
+ path := filepath.Join(codegen.Gendir, "http", codegen.SnakeCase(svc.Name()), "client", "encode_decode.go")
+ data := HTTPServices.Get(svc.Name())
+ title := fmt.Sprintf("%s HTTP client encoders and decoders", svc.Name())
+ sections := []*codegen.SectionTemplate{
+ codegen.Header(title, "client", []*codegen.ImportSpec{
+ {Path: "bytes"},
+ {Path: "context"},
+ {Path: "fmt"},
+ {Path: "io"},
+ {Path: "io/ioutil"},
+ {Path: "mime/multipart"},
+ {Path: "net/http"},
+ {Path: "net/url"},
+ {Path: "strconv"},
+ {Path: "strings"},
+ {Path: "unicode/utf8"},
+ {Path: "goa.design/goa", Name: "goa"},
+ {Path: "goa.design/goa/http", Name: "goahttp"},
+ {Path: genpkg + "/" + codegen.SnakeCase(svc.Name()), Name: data.Service.PkgName},
+ {Path: genpkg + "/" + codegen.SnakeCase(svc.Name()) + "/" + "views", Name: data.Service.ViewsPkg},
+ }),
+ }
+
+ for _, e := range data.Endpoints {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "request-builder",
+ Source: requestBuilderT,
+ Data: e,
+ })
+ if e.RequestEncoder != "" {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "request-encoder",
+ Source: requestEncoderT,
+ FuncMap: map[string]interface{}{
+ "typeConversionData": typeConversionData,
+ "isBearer": isBearer,
+ },
+ Data: e,
+ })
+ }
+ if e.MultipartRequestEncoder != nil {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "multipart-request-encoder",
+ Source: multipartRequestEncoderT,
+ Data: e.MultipartRequestEncoder,
+ })
+ }
+ if e.Result != nil || len(e.Errors) > 0 {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "response-decoder",
+ Source: responseDecoderT,
+ Data: e,
+ })
+ }
+ }
+ for _, h := range data.ClientTransformHelpers {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-transform-helper",
+ Source: transformHelperT,
+ Data: h,
+ })
+ }
+
+ return &codegen.File{Path: path, SectionTemplates: sections}
+}
+
+// typeConversionData produces the template data suitable for executing the
+// "header_conversion" template.
+func typeConversionData(dt expr.DataType, varName string, target string) map[string]interface{} {
+ return map[string]interface{}{
+ "Type": dt,
+ "VarName": varName,
+ "Target": target,
+ }
+}
+
+// isBearer returns true if the security scheme uses a Bearer scheme.
+func isBearer(schemes []*service.SchemeData) bool {
+ for _, s := range schemes {
+ if s.Name != "Authorization" {
+ continue
+ }
+ if s.Type == "JWT" || s.Type == "OAuth2" {
+ return true
+ }
+ }
+ return false
+}
+
+// input: ServiceData
+const clientStructT = `{{ printf "%s lists the %s service endpoint HTTP clients." .ClientStruct .Service.Name | comment }}
+type {{ .ClientStruct }} struct {
+ {{- range .Endpoints }}
+ {{ printf "%s Doer is the HTTP client used to make requests to the %s endpoint." .Method.VarName .Method.Name | comment }}
+ {{ .Method.VarName }}Doer goahttp.Doer
+ {{ end }}
+ // RestoreResponseBody controls whether the response bodies are reset after
+ // decoding so they can be read again.
+ RestoreResponseBody bool
+
+ scheme string
+ host string
+ encoder func(*http.Request) goahttp.Encoder
+ decoder func(*http.Response) goahttp.Decoder
+ {{- if streamingEndpointExists . }}
+ dialer goahttp.Dialer
+ connConfigFn goahttp.ConnConfigureFunc
+ {{- end }}
+}
+`
+
+// input: ServiceData
+const clientInitT = `{{ printf "New%s instantiates HTTP clients for all the %s service servers." .ClientStruct .Service.Name | comment }}
+func New{{ .ClientStruct }}(
+ scheme string,
+ host string,
+ doer goahttp.Doer,
+ enc func(*http.Request) goahttp.Encoder,
+ dec func(*http.Response) goahttp.Decoder,
+ restoreBody bool,
+ {{- if streamingEndpointExists . }}
+ dialer goahttp.Dialer,
+ connConfigFn goahttp.ConnConfigureFunc,
+ {{- end }}
+) *{{ .ClientStruct }} {
+ return &{{ .ClientStruct }}{
+ {{- range .Endpoints }}
+ {{ .Method.VarName }}Doer: doer,
+ {{- end }}
+ RestoreResponseBody: restoreBody,
+ scheme: scheme,
+ host: host,
+ decoder: dec,
+ encoder: enc,
+ {{- if streamingEndpointExists . }}
+ dialer: dialer,
+ connConfigFn: connConfigFn,
+ {{- end }}
+ }
+}
+`
+
+// input: EndpointData
+const endpointInitT = `{{ printf "%s returns an endpoint that makes HTTP requests to the %s service %s server." .EndpointInit .ServiceName .Method.Name | comment }}
+func (c *{{ .ClientStruct }}) {{ .EndpointInit }}({{ if .MultipartRequestEncoder }}{{ .MultipartRequestEncoder.VarName }} {{ .MultipartRequestEncoder.FuncName }}{{ end }}) goa.Endpoint {
+ var (
+ {{- if and .ClientStream .RequestEncoder }}
+ encodeRequest = {{ .RequestEncoder }}({{ if .MultipartRequestEncoder }}{{ .MultipartRequestEncoder.InitName }}({{ .MultipartRequestEncoder.VarName }}){{ else }}c.encoder{{ end }})
+ {{- else }}
+ {{- if .RequestEncoder }}
+ encodeRequest = {{ .RequestEncoder }}({{ if .MultipartRequestEncoder }}{{ .MultipartRequestEncoder.InitName }}({{ .MultipartRequestEncoder.VarName }}){{ else }}c.encoder{{ end }})
+ {{- end }}
+ {{- end }}
+ decodeResponse = {{ .ResponseDecoder }}(c.decoder, c.RestoreResponseBody)
+ )
+ return func(ctx context.Context, v interface{}) (interface{}, error) {
+ req, err := c.{{ .RequestInit.Name }}(ctx, {{ range .RequestInit.ClientArgs }}{{ .Ref }}{{ end }})
+ if err != nil {
+ return nil, err
+ }
+ {{- if .RequestEncoder }}
+ err = encodeRequest(req, v)
+ if err != nil {
+ return nil, err
+ }
+ {{- end }}
+
+ {{- if .ClientStream }}
+ conn, resp, err := c.dialer.Dial(req.URL.String(), req.Header)
+ if err != nil {
+ if resp != nil {
+ return decodeResponse(resp)
+ }
+ return nil, goahttp.ErrRequestError("{{ .ServiceName }}", "{{ .Method.Name }}", err)
+ }
+ if c.connConfigFn != nil {
+ conn = c.connConfigFn(conn)
+ }
+ stream := &{{ .ClientStream.VarName }}{conn: conn}
+ {{- if .Method.ViewedResult }}
+ {{- if not .Method.ViewedResult.ViewName }}
+ view := resp.Header.Get("goa-view")
+ stream.SetView(view)
+ {{- end }}
+ {{- end }}
+ return stream, nil
+ {{- else }}
+ resp, err := c.{{ .Method.VarName }}Doer.Do(req)
+
+ if err != nil {
+ return nil, goahttp.ErrRequestError("{{ .ServiceName }}", "{{ .Method.Name }}", err)
+ }
+ return decodeResponse(resp)
+ {{- end }}
+ }
+}
+`
+
+// input: EndpointData
+const requestBuilderT = `{{ comment .RequestInit.Description }}
+func (c *{{ .ClientStruct }}) {{ .RequestInit.Name }}(ctx context.Context, {{ range .RequestInit.ClientArgs }}{{ .Name }} {{ .TypeRef }}{{ end }}) (*http.Request, error) {
+ {{- .RequestInit.ClientCode }}
+}
+`
+
+// input: EndpointData
+const requestEncoderT = `{{ printf "%s returns an encoder for requests sent to the %s %s server." .RequestEncoder .ServiceName .Method.Name | comment }}
+func {{ .RequestEncoder }}(encoder func(*http.Request) goahttp.Encoder) func(*http.Request, interface{}) error {
+ return func(req *http.Request, v interface{}) error {
+ p, ok := v.({{ .Payload.Ref }})
+ if !ok {
+ return goahttp.ErrInvalidType("{{ .ServiceName }}", "{{ .Method.Name }}", "{{ .Payload.Ref }}", v)
+ }
+ {{- range .Payload.Request.Headers }}
+ {{- if .FieldName }}
+ {{- if .Pointer }}
+ if p.{{ .FieldName }} != nil {
+ {{- end }}
+ {{- if (and (eq .Name "Authorization") (isBearer $.HeaderSchemes)) }}
+ if !strings.Contains({{ if .Pointer }}*{{ end }}p.{{ .FieldName }}, " ") {
+ req.Header.Set({{ printf "%q" .Name }}, "Bearer "+{{ if .Pointer }}*{{ end }}p.{{ .FieldName }})
+ } else {
+ {{- end }}
+ req.Header.Set({{ printf "%q" .Name }}, {{ if .Pointer }}*{{ end }}p.{{ .FieldName }})
+ {{- if (and (eq .Name "Authorization") (isBearer $.HeaderSchemes)) }}
+ }
+ {{- end }}
+ {{- if .Pointer }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- if or .Payload.Request.QueryParams }}
+ values := req.URL.Query()
+ {{- end }}
+ {{- range .Payload.Request.QueryParams }}
+ {{- if .MapQueryParams }}
+ for key, value := range p{{ if .FieldName }}.{{ .FieldName }}{{ end }} {
+ {{ template "type_conversion" (typeConversionData .Type.KeyType.Type "keyStr" "key") }}
+ {{- if eq .Type.ElemType.Type.Name "array" }}
+ for _, val := range value {
+ {{ template "type_conversion" (typeConversionData .Type.ElemType.Type.ElemType.Type "valStr" "val") }}
+ values.Add(keyStr, valStr)
+ }
+ {{- else }}
+ {{ template "type_conversion" (typeConversionData .Type.ElemType.Type "valueStr" "value") }}
+ values.Add(keyStr, valueStr)
+ {{- end }}
+ }
+ {{- else if .StringSlice }}
+ for _, value := range p{{ if .FieldName }}.{{ .FieldName }}{{ end }} {
+ values.Add("{{ .Name }}", value)
+ }
+ {{- else if .Slice }}
+ for _, value := range p{{ if .FieldName }}.{{ .FieldName }}{{ end }} {
+ {{ template "type_conversion" (typeConversionData .Type.ElemType.Type "valueStr" "value") }}
+ values.Add("{{ .Name }}", valueStr)
+ }
+ {{- else if .FieldName }}
+ {{- if .Pointer }}
+ if p.{{ .FieldName }} != nil {
+ {{- end }}
+ values.Add("{{ .Name }}",
+ {{- if eq .Type.Name "bytes" }} string(
+ {{- else if not (eq .Type.Name "string") }} fmt.Sprintf("%v",
+ {{- end }}
+ {{- if .Pointer }}*{{ end }}p.{{ .FieldName }}
+ {{- if or (eq .Type.Name "bytes") (not (eq .Type.Name "string")) }})
+ {{- end }})
+ {{- if .Pointer }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- if .Payload.Request.QueryParams }}
+ req.URL.RawQuery = values.Encode()
+ {{- end }}
+ {{- if .MultipartRequestEncoder }}
+ if err := encoder(req).Encode(p); err != nil {
+ return goahttp.ErrEncodingError("{{ .ServiceName }}", "{{ .Method.Name }}", err)
+ }
+ {{- else if .Payload.Request.ClientBody }}
+ {{- if .Payload.Request.ClientBody.Init }}
+ body := {{ .Payload.Request.ClientBody.Init.Name }}({{ range .Payload.Request.ClientBody.Init.ClientArgs }}{{ if .Pointer }}&{{ end }}{{ .Name }}, {{ end }})
+ {{- else }}
+ body := p
+ {{- end }}
+ if err := encoder(req).Encode(&body); err != nil {
+ return goahttp.ErrEncodingError("{{ .ServiceName }}", "{{ .Method.Name }}", err)
+ }
+ {{- end }}
+ {{- if .BasicScheme }}{{ with .BasicScheme }}
+ {{- if not .UsernameRequired }}
+ if p.{{ .UsernameField }} != nil {
+ {{- end }}
+ {{- if not .PasswordRequired }}
+ if p.{{ .PasswordField }} != nil {
+ {{- end }}
+ req.SetBasicAuth({{ if .UsernamePointer }}*{{ end }}p.{{ .UsernameField }}, {{ if .PasswordPointer }}*{{ end }}p.{{ .PasswordField }})
+ {{- if not .UsernameRequired }}
+ }
+ {{- end }}
+ {{- if not .PasswordRequired }}
+ }
+ {{- end }}
+ {{- end }}{{ end }}
+ return nil
+ }
+}
+
+{{- define "type_conversion" }}
+ {{- if eq .Type.Name "boolean" -}}
+ {{ .VarName }} := strconv.FormatBool({{ .Target }})
+ {{- else if eq .Type.Name "int" -}}
+ {{ .VarName }} := strconv.Itoa({{ .Target }})
+ {{- else if eq .Type.Name "int32" -}}
+ {{ .VarName }} := strconv.FormatInt(int64({{ .Target }}), 10)
+ {{- else if eq .Type.Name "int64" -}}
+ {{ .VarName }} := strconv.FormatInt({{ .Target }}, 10)
+ {{- else if eq .Type.Name "uint" -}}
+ {{ .VarName }} := strconv.FormatUint(uint64({{ .Target }}), 10)
+ {{- else if eq .Type.Name "uint32" -}}
+ {{ .VarName }} := strconv.FormatUint(uint64({{ .Target }}), 10)
+ {{- else if eq .Type.Name "uint64" -}}
+ {{ .VarName }} := strconv.FormatUint({{ .Target }}, 10)
+ {{- else if eq .Type.Name "float32" -}}
+ {{ .VarName }} := strconv.FormatFloat(float64({{ .Target }}), 'f', -1, 32)
+ {{- else if eq .Type.Name "float64" -}}
+ {{ .VarName }} := strconv.FormatFloat({{ .Target }}, 'f', -1, 64)
+ {{- else if eq .Type.Name "string" -}}
+ {{ .VarName }} := {{ .Target }}
+ {{- else if eq .Type.Name "bytes" -}}
+ {{ .VarName }} := string({{ .Target }})
+ {{- else if eq .Type.Name "any" -}}
+ {{ .VarName }} := fmt.Sprintf("%v", {{ .Target }})
+ {{- else }}
+ // unsupported type {{ .Type.Name }} for header field {{ .FieldName }}
+ {{- end }}
+{{- end }}
+`
+
+// input: EndpointData
+const responseDecoderT = `{{ printf "%s returns a decoder for responses returned by the %s %s endpoint. restoreBody controls whether the response body should be restored after having been read." .ResponseDecoder .ServiceName .Method.Name | comment }}
+{{- if .Errors }}
+{{ printf "%s may return the following errors:" .ResponseDecoder | comment }}
+ {{- range $gerr := .Errors }}
+ {{- range $errors := .Errors }}
+// - {{ printf "%q" .Name }} (type {{ .Ref }}): {{ .Response.StatusCode }}{{ if .Response.Description }}, {{ .Response.Description }}{{ end }}
+ {{- end }}
+ {{- end }}
+// - error: internal error
+{{- end }}
+func {{ .ResponseDecoder }}(decoder func(*http.Response) goahttp.Decoder, restoreBody bool) func(*http.Response) (interface{}, error) {
+ return func(resp *http.Response) (interface{}, error) {
+ if restoreBody {
+ b, err := ioutil.ReadAll(resp.Body)
+ if err != nil {
+ return nil, err
+ }
+ resp.Body = ioutil.NopCloser(bytes.NewBuffer(b))
+ defer func() {
+ resp.Body = ioutil.NopCloser(bytes.NewBuffer(b))
+ }()
+ } else {
+ defer resp.Body.Close()
+ }
+ switch resp.StatusCode {
+ {{- range .Result.Responses }}
+ case {{ .StatusCode }}:
+` + singleResponseT + `
+ {{- if .ResultInit }}
+ {{- if .ViewedResult }}
+ p := {{ .ResultInit.Name }}({{ range .ResultInit.ClientArgs }}{{ .Ref }},{{ end }})
+ {{- if $.Method.ViewedResult.ViewName }}
+ view := {{ printf "%q" $.Method.ViewedResult.ViewName }}
+ {{- else }}
+ view := resp.Header.Get("goa-view")
+ {{- end }}
+ vres := {{ if not $.Method.ViewedResult.IsCollection }}&{{ end }}{{ $.Method.ViewedResult.ViewsPkg}}.{{ $.Method.ViewedResult.VarName }}{p, view}
+ {{- if .ClientBody }}
+ if err = {{ $.Method.ViewedResult.ViewsPkg}}.Validate{{ $.Method.Result }}(vres); err != nil {
+ return nil, goahttp.ErrValidationError("{{ $.ServiceName }}", "{{ $.Method.Name }}", err)
+ }
+ {{- end }}
+ res := {{ $.ServicePkgName }}.{{ $.Method.ViewedResult.ResultInit.Name }}(vres)
+ {{- else }}
+ res := {{ .ResultInit.Name }}({{ range .ResultInit.ClientArgs }}{{ .Ref }},{{ end }})
+ {{- end }}
+ {{- if .TagName }}
+ {{- if .TagPointer }}
+ tmp := {{ printf "%q" .TagValue }}
+ res.{{ .TagName }} = &tmp
+ {{- else }}
+ res.{{ .TagName }} = {{ printf "%q" .TagValue }}
+ {{- end }}
+ {{- end }}
+ return res, nil
+ {{- else if .ClientBody }}
+ return body, nil
+ {{- else }}
+ return nil, nil
+ {{- end }}
+ {{- end }}
+ {{- range .Errors }}
+ case {{ .StatusCode }}:
+ {{- if gt (len .Errors) 1 }}
+ en := resp.Header.Get("goa-error")
+ switch en {
+ {{- range .Errors }}
+ case {{ printf "%q" .Name }}:
+ {{- with .Response }}
+` + singleResponseT + `
+ {{- if .ResultInit }}
+ return nil, {{ .ResultInit.Name }}({{ range .ResultInit.ClientArgs }}{{ .Ref }},{{ end }})
+ {{- else if .ClientBody }}
+ return nil, body
+ {{- else }}
+ return nil, nil
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ default:
+ body, _ := ioutil.ReadAll(resp.Body)
+ return nil, goahttp.ErrInvalidResponse({{ printf "%q" $.ServiceName }}, {{ printf "%q" $.Method.Name }}, resp.StatusCode, string(body))
+ }
+ {{- else }}
+ {{- with (index .Errors 0).Response }}
+` + singleResponseT + `
+ {{- if .ResultInit }}
+ return nil, {{ .ResultInit.Name }}({{ range .ResultInit.ClientArgs }}{{ .Ref }},{{ end }})
+ {{- else if .ClientBody }}
+ return nil, body
+ {{- else }}
+ return nil, nil
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ default:
+ body, _ := ioutil.ReadAll(resp.Body)
+ return nil, goahttp.ErrInvalidResponse({{ printf "%q" .ServiceName }}, {{ printf "%q" .Method.Name }}, resp.StatusCode, string(body))
+ }
+ }
+}
+`
+
+// input: ResponseData
+const singleResponseT = ` {{- if .ClientBody }}
+ var (
+ body {{ .ClientBody.VarName }}
+ err error
+ )
+ err = decoder(resp).Decode(&body)
+ if err != nil {
+ return nil, goahttp.ErrDecodingError("{{ $.ServiceName }}", "{{ $.Method.Name }}", err)
+ }
+ {{- if .ClientBody.ValidateRef }}
+ {{ .ClientBody.ValidateRef }}
+ if err != nil {
+ return nil, goahttp.ErrValidationError("{{ $.ServiceName }}", "{{ $.Method.Name }}", err)
+ }
+ {{- end }}
+ {{- end }}
+
+ {{- if .Headers }}
+ var (
+ {{- range .Headers }}
+ {{ .VarName }} {{ .TypeRef }}
+ {{- end }}
+ {{- if not .ClientBody }}
+ {{- if .MustValidate }}
+ err error
+ {{- end }}
+ {{- end }}
+ )
+ {{- range .Headers }}
+
+ {{- if (or (eq .Type.Name "string") (eq .Type.Name "any")) }}
+ {{ .VarName }}Raw := resp.Header.Get("{{ .Name }}")
+ {{- if .Required }}
+ if {{ .VarName }}Raw == "" {
+ err = goa.MergeErrors(err, goa.MissingFieldError("{{ .Name }}", "header"))
+ }
+ {{ .VarName }} = {{ if and (eq .Type.Name "string") .Pointer }}&{{ end }}{{ .VarName }}Raw
+ {{- else }}
+ if {{ .VarName }}Raw != "" {
+ {{ .VarName }} = {{ if and (eq .Type.Name "string") .Pointer }}&{{ end }}{{ .VarName }}Raw
+ }
+ {{- if .DefaultValue }} else {
+ {{ .VarName }} = {{ if eq .Type.Name "string" }}{{ printf "%q" .DefaultValue }}{{ else }}{{ printf "%#v" .DefaultValue }}{{ end }}
+ }
+ {{- end }}
+ {{- end }}
+
+ {{- else if .StringSlice }}
+ {{ .VarName }} = resp.Header["{{ .CanonicalName }}"]
+ {{ if .Required }}
+ if {{ .VarName }} == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("{{ .Name }}", "header"))
+ }
+ {{- else if .DefaultValue }}
+ if {{ .VarName }} == nil {
+ {{ .VarName }} = {{ printf "%#v" .DefaultValue }}
+ }
+ {{- end }}
+
+ {{- else if .Slice }}
+ {{ .VarName }}Raw := resp.Header["{{ .CanonicalName }}"]
+ {{ if .Required }} if {{ .VarName }}Raw == nil {
+ return nil, goahttp.ErrValidationError("{{ $.ServiceName }}", "{{ $.Method.Name }}", goa.MissingFieldError("{{ .Name }}", "header"))
+ }
+ {{- else if .DefaultValue }}
+ if {{ .VarName }}Raw == nil {
+ {{ .VarName }} = {{ printf "%#v" .DefaultValue }}
+ }
+ {{- end }}
+
+ {{- if .DefaultValue }}else {
+ {{- else if not .Required }}
+ if {{ .VarName }}Raw != nil {
+ {{- end }}
+ {{- template "slice_conversion" . }}
+ {{- if or .DefaultValue (not .Required) }}
+ }
+ {{- end }}
+
+ {{- else }}{{/* not string, not any and not slice */}}
+ {{ .VarName }}Raw := resp.Header.Get("{{ .Name }}")
+ {{- if .Required }}
+ if {{ .VarName }}Raw == "" {
+ return nil, goahttp.ErrValidationError("{{ $.ServiceName }}", "{{ $.Method.Name }}", goa.MissingFieldError("{{ .Name }}", "header"))
+ }
+ {{- else if .DefaultValue }}
+ if {{ .VarName }}Raw == "" {
+ {{ .VarName }} = {{ printf "%#v" .DefaultValue }}
+ }
+ {{- end }}
+
+ {{- if .DefaultValue }}else {
+ {{- else if not .Required }}
+ if {{ .VarName }}Raw != "" {
+ {{- end }}
+ {{- template "type_conversion" . }}
+ {{- if or .DefaultValue (not .Required) }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- if .Validate }}
+ {{ .Validate }}
+ {{- end }}
+ {{- end }}{{/* range .Headers */}}
+ {{- end }}
+
+ {{- if .MustValidate }}
+ if err != nil {
+ return nil, goahttp.ErrValidationError("{{ $.ServiceName }}", "{{ $.Method.Name }}", err)
+ }
+ {{- end }}
+`
+
+// input: multipartData
+const multipartRequestEncoderTypeT = `{{ printf "%s is the type to encode multipart request for the %q service %q endpoint." .FuncName .ServiceName .MethodName | comment }}
+type {{ .FuncName }} func(*multipart.Writer, {{ .Payload.Ref }}) error
+`
+
+// input: multipartData
+const multipartRequestEncoderT = `{{ printf "%s returns an encoder to encode the multipart request for the %q service %q endpoint." .InitName .ServiceName .MethodName | comment }}
+func {{ .InitName }}(encoderFn {{ .FuncName }}) func(r *http.Request) goahttp.Encoder {
+ return func(r *http.Request) goahttp.Encoder {
+ body := &bytes.Buffer{}
+ mw := multipart.NewWriter(body)
+ return goahttp.EncodingFunc(func(v interface{}) error {
+ p := v.({{ .Payload.Ref }})
+ if err := encoderFn(mw, p); err != nil {
+ return err
+ }
+ r.Body = ioutil.NopCloser(body)
+ r.Header.Set("Content-Type", mw.FormDataContentType())
+ return mw.Close()
+ })
+ }
+}
+`
+
+// input: EndpointData
+const clientStreamRecvT = `{{ printf "Recv receives a %s type from the %q endpoint websocket connection." .Result.Name .Method.Name | comment }}
+func (c *{{ .ClientStream.VarName }}) Recv() ({{ .Result.Ref }}, error) {
+ {{- if .Method.ViewedResult }}
+ var vres {{ .Method.ViewedResult.ViewsPkg }}.{{ .Result.Name }}
+ {{- else }}
+ var res {{ .Result.Name }}
+ {{- end }}
+ err := c.conn.ReadJSON(&{{ if .Method.ViewedResult }}vres{{ else }}res{{ end }})
+ if websocket.IsCloseError(err, goahttp.NormalSocketCloseErrors...) {
+ return nil, io.EOF
+ }
+ if err != nil {
+ return nil, err
+ }
+ {{- if .Method.ViewedResult }}
+ if err := {{ .Method.ViewedResult.ViewsPkg }}.Validate{{ .Result.Name }}(vres); err != nil {
+ return nil, goahttp.ErrValidationError("{{ $.ServiceName }}", "{{ $.Method.Name }}", err)
+ }
+ return {{ $.ServicePkgName }}.{{ .Method.ViewedResult.ResultInit.Name }}(vres), nil
+ {{- else }}
+ return &res, nil
+ {{- end }}
+}
+`
diff --git a/vendor/goa.design/goa/http/codegen/client_cli.go b/vendor/goa.design/goa/http/codegen/client_cli.go
new file mode 100644
index 000000000..754bd92da
--- /dev/null
+++ b/vendor/goa.design/goa/http/codegen/client_cli.go
@@ -0,0 +1,875 @@
+package codegen
+
+import (
+ "encoding/json"
+ "fmt"
+ "path/filepath"
+ "reflect"
+ "strconv"
+ "strings"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+type (
+ commandData struct {
+ // Name of command e.g. "cellar-storage"
+ Name string
+ // VarName is the name of the command variable e.g.
+ // "cellarStorage"
+ VarName string
+ // Description is the help text.
+ Description string
+ // Subcommands is the list of endpoint commands.
+ Subcommands []*subcommandData
+ // Example is a valid command invocation, starting with the
+ // command name.
+ Example string
+ // PkgName is the service HTTP client package import name,
+ // e.g. "storagec".
+ PkgName string
+ // NeedStream if true passes websocket specific arguments to the CLI.
+ NeedStream bool
+ }
+
+ subcommandData struct {
+ // Name is the subcommand name e.g. "add"
+ Name string
+ // FullName is the subcommand full name e.g. "storageAdd"
+ FullName string
+ // Description is the help text.
+ Description string
+ // Flags is the list of flags supported by the subcommand.
+ Flags []*flagData
+ // MethodVarName is the endpoint method name, e.g. "Add"
+ MethodVarName string
+ // BuildFunction contains the data for the payload build
+ // function if any. Exclusive with Conversion.
+ BuildFunction *buildFunctionData
+ // Conversion contains the flag value to payload conversion
+ // function if any. Exclusive with BuildFunction.
+ Conversion string
+ // Example is a valid command invocation, starting with the
+ // command name.
+ Example string
+ // MultipartRequestEncoder is the data necessary to render
+ // multipart request encoder.
+ MultipartRequestEncoder *MultipartData
+ }
+
+ flagData struct {
+ // Name is the name of the flag, e.g. "list-vintage"
+ Name string
+ // VarName is the name of the flag variable, e.g. "listVintage"
+ VarName string
+ // Type is the type of the flag, e.g. INT
+ Type string
+ // FullName is the flag full name e.g. "storageAddVintage"
+ FullName string
+ // Description is the flag help text.
+ Description string
+ // Required is true if the flag is required.
+ Required bool
+ // Example returns a JSON serialized example value.
+ Example string
+ }
+
+ buildFunctionData struct {
+ // Name is the build payload function name.
+ Name string
+ // ActualParams is the list of passed build function parameters.
+ ActualParams []string
+ // FormalParams is the list of build function formal parameter
+ // names.
+ FormalParams []string
+ // ServiceName is the name of the service.
+ ServiceName string
+ // MethodName is the name of the method.
+ MethodName string
+ // ResultType is the fully qualified payload type name.
+ ResultType string
+ // Fields describes the payload fields.
+ Fields []*fieldData
+ // PayloadInit contains the data needed to render the function
+ // body.
+ PayloadInit *InitData
+ // CheckErr is true if the payload initialization code requires
+ // an "err error" variable that must be checked.
+ CheckErr bool
+ // Args contains the data needed to build payload.
+ Args []*InitArgData
+ }
+
+ fieldData struct {
+ // Name is the field name, e.g. "Vintage"
+ Name string
+ // VarName is the name of the local variable holding the field
+ // value, e.g. "vintage"
+ VarName string
+ // TypeName is the name of the type.
+ TypeName string
+ // Init is the code initializing the variable.
+ Init string
+ // Pointer is true if the variable needs to be declared as a
+ // pointer.
+ Pointer bool
+ }
+)
+
+// ClientCLIFiles returns the client HTTP CLI support file.
+func ClientCLIFiles(genpkg string, root *expr.RootExpr) []*codegen.File {
+ var (
+ data []*commandData
+ svcs []*expr.HTTPServiceExpr
+ )
+ for _, svc := range root.API.HTTP.Services {
+ sd := HTTPServices.Get(svc.Name())
+ if len(sd.Endpoints) > 0 {
+ data = append(data, buildCommandData(sd))
+ svcs = append(svcs, svc)
+ }
+ }
+ if len(svcs) == 0 {
+ return nil
+ }
+
+ var files []*codegen.File
+ for _, svr := range root.API.Servers {
+ files = append(files, endpointParser(genpkg, root, svr, data))
+ }
+ for i, svc := range svcs {
+ files = append(files, payloadBuilders(genpkg, svc, data[i]))
+ }
+ return files
+}
+
+// endpointParser returns the file that implements the command line parser that
+// builds the client endpoint and payload necessary to perform a request.
+func endpointParser(genpkg string, root *expr.RootExpr, svr *expr.ServerExpr, data []*commandData) *codegen.File {
+ pkg := codegen.SnakeCase(codegen.Goify(svr.Name, true))
+ path := filepath.Join(codegen.Gendir, "http", "cli", pkg, "cli.go")
+ title := fmt.Sprintf("%s HTTP client CLI support package", svr.Name)
+ specs := []*codegen.ImportSpec{
+ {Path: "encoding/json"},
+ {Path: "flag"},
+ {Path: "fmt"},
+ {Path: "net/http"},
+ {Path: "os"},
+ {Path: "strconv"},
+ {Path: "unicode/utf8"},
+ {Path: "goa.design/goa", Name: "goa"},
+ {Path: "goa.design/goa/http", Name: "goahttp"},
+ }
+ for _, sv := range svr.Services {
+ svc := root.Service(sv)
+ sd := HTTPServices.Get(svc.Name)
+ specs = append(specs, &codegen.ImportSpec{
+ Path: genpkg + "/http/" + codegen.SnakeCase(sd.Service.Name) + "/client",
+ Name: sd.Service.PkgName + "c",
+ })
+ }
+ usages := make([]string, len(data))
+ var examples []string
+ for i, cmd := range data {
+ subs := make([]string, len(cmd.Subcommands))
+ for i, s := range cmd.Subcommands {
+ subs[i] = s.Name
+ }
+ var lp, rp string
+ if len(subs) > 1 {
+ lp = "("
+ rp = ")"
+ }
+ usages[i] = fmt.Sprintf("%s %s%s%s", cmd.Name, lp, strings.Join(subs, "|"), rp)
+ if i < 5 {
+ examples = append(examples, cmd.Example)
+ }
+ }
+
+ sections := []*codegen.SectionTemplate{
+ codegen.Header(title, "cli", specs),
+ {Source: usageT, Data: usages},
+ {Source: exampleT, Data: examples},
+ }
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "parse-endpoint",
+ Source: parseT,
+ Data: data,
+ FuncMap: map[string]interface{}{
+ "streamingCmdExists": streamingCmdExists,
+ },
+ })
+ for _, cmd := range data {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "cli-command-usage",
+ Source: commandUsageT,
+ Data: cmd,
+ FuncMap: map[string]interface{}{"printDescription": printDescription},
+ })
+ }
+
+ return &codegen.File{Path: path, SectionTemplates: sections}
+}
+
+func printDescription(desc string) string {
+ res := strings.Replace(desc, "`", "`+\"`\"+`", -1)
+ res = strings.Replace(res, "\n", "\n\t", -1)
+ return res
+}
+
+// payloadBuilders returns the file that contains the payload constructors that
+// use flag values as arguments.
+func payloadBuilders(genpkg string, svc *expr.HTTPServiceExpr, data *commandData) *codegen.File {
+ path := filepath.Join(codegen.Gendir, "http", codegen.SnakeCase(svc.Name()), "client", "cli.go")
+ title := fmt.Sprintf("%s HTTP client CLI support package", svc.Name())
+ sd := HTTPServices.Get(svc.Name())
+ specs := []*codegen.ImportSpec{
+ {Path: "encoding/json"},
+ {Path: "fmt"},
+ {Path: "net/http"},
+ {Path: "os"},
+ {Path: "strconv"},
+ {Path: "unicode/utf8"},
+ {Path: "goa.design/goa", Name: "goa"},
+ {Path: "goa.design/goa/http", Name: "goahttp"},
+ {Path: genpkg + "/" + codegen.SnakeCase(svc.Name()), Name: sd.Service.PkgName},
+ }
+ sections := []*codegen.SectionTemplate{
+ codegen.Header(title, "client", specs),
+ }
+ for _, sub := range data.Subcommands {
+ if sub.BuildFunction != nil {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "cli-build-payload",
+ Source: buildPayloadT,
+ Data: sub.BuildFunction,
+ })
+ }
+ }
+
+ return &codegen.File{Path: path, SectionTemplates: sections}
+}
+
+// buildCommandData builds the data needed by the templates to render the CLI
+// parsing of the service command.
+func buildCommandData(svc *ServiceData) *commandData {
+ var (
+ name string
+ description string
+ subcommands []*subcommandData
+ example string
+ )
+ {
+ name = svc.Service.Name
+ description = svc.Service.Description
+ if description == "" {
+ description = fmt.Sprintf("Make requests to the %q service", name)
+ }
+ subcommands = make([]*subcommandData, len(svc.Endpoints))
+ for i, e := range svc.Endpoints {
+ subcommands[i] = buildSubcommandData(svc, e)
+ }
+ if len(subcommands) > 0 {
+ example = subcommands[0].Example
+ }
+ }
+ return &commandData{
+ Name: codegen.KebabCase(name),
+ VarName: codegen.Goify(name, false),
+ Description: description,
+ Subcommands: subcommands,
+ Example: example,
+ PkgName: svc.Service.PkgName + "c",
+ NeedStream: streamingEndpointExists(svc),
+ }
+}
+
+func buildSubcommandData(svc *ServiceData, e *EndpointData) *subcommandData {
+ var (
+ name string
+ fullName string
+ description string
+ flags []*flagData
+ buildFunction *buildFunctionData
+ conversion string
+ )
+ {
+ svcn := svc.Service.Name
+ en := e.Method.Name
+ name = codegen.KebabCase(en)
+ fullName = goify(svcn, en)
+ description = e.Method.Description
+ if description == "" {
+ description = fmt.Sprintf("Make request to the %q endpoint", e.Method.Name)
+ }
+ if e.Payload != nil {
+ if e.Payload.Request.PayloadInit != nil {
+ args := e.Payload.Request.PayloadInit.ClientArgs
+ args = append(args, e.Payload.Request.PayloadInit.CLIArgs...)
+ flags, buildFunction = makeFlags(e, args)
+ } else if e.Payload.Ref != "" {
+ ex := jsonExample(e.Method.PayloadEx)
+ fn := goify(svcn, en, "p")
+ flags = append(flags, &flagData{
+ Name: "p",
+ Type: flagType(e.Method.PayloadRef),
+ FullName: fn,
+ Description: e.Method.PayloadDesc,
+ Required: true,
+ Example: ex,
+ })
+ }
+ if buildFunction == nil && len(flags) > 0 {
+ // No build function, just convert the arg to the body type
+ var convPre, convSuff string
+ target := "data"
+ if flagType(e.Method.Payload) == "JSON" {
+ target = "val"
+ convPre = fmt.Sprintf("var val %s\n", e.Method.Payload)
+ convSuff = "\ndata = val"
+ }
+ conv, check := conversionCode(
+ "*"+flags[0].FullName+"Flag",
+ target,
+ e.Method.Payload,
+ false,
+ )
+ conversion = convPre + conv + convSuff
+ if check {
+ conversion = "var err error\n" + conversion
+ conversion += "\nif err != nil {\n"
+ if flagType(e.Method.Payload) == "JSON" {
+ conversion += fmt.Sprintf(`return nil, nil, fmt.Errorf("invalid JSON for %s, example of valid JSON:\n%%s", %q)`,
+ flags[0].FullName+"Flag", flags[0].Example)
+ } else {
+ conversion += fmt.Sprintf(`return nil, nil, fmt.Errorf("invalid value for %s, must be %s")`,
+ flags[0].FullName+"Flag", flags[0].Type)
+ }
+ conversion += "\n}"
+ }
+ }
+ }
+ }
+ sub := &subcommandData{
+ Name: name,
+ FullName: fullName,
+ Description: description,
+ Flags: flags,
+ MethodVarName: e.Method.VarName,
+ BuildFunction: buildFunction,
+ Conversion: conversion,
+ }
+ if e.MultipartRequestEncoder != nil {
+ sub.MultipartRequestEncoder = e.MultipartRequestEncoder
+ }
+ generateExample(sub, svc.Service.Name)
+
+ return sub
+}
+
+func generateExample(sub *subcommandData, svc string) {
+ ex := codegen.KebabCase(svc) + " " + codegen.KebabCase(sub.Name)
+ for _, f := range sub.Flags {
+ ex += " --" + f.Name + " " + f.Example
+ }
+ sub.Example = ex
+}
+
+func makeFlags(e *EndpointData, args []*InitArgData) ([]*flagData, *buildFunctionData) {
+ var (
+ fdata []*fieldData
+ flags = make([]*flagData, len(args))
+ params = make([]string, len(args))
+ check bool
+ )
+ for i, arg := range args {
+ f := argToFlag(e.ServiceName, e.Method.Name, arg)
+ flags[i] = f
+ params[i] = f.FullName
+ if arg.FieldName == "" && arg.Name != "body" {
+ continue
+ }
+ code, chek := fieldLoadCode(f.FullName, f.Type, arg)
+ check = check || chek
+ tn := arg.TypeRef
+ if f.Type == "JSON" {
+ // We need to declare the variable without
+ // a pointer to be able to unmarshal the JSON
+ // using its address.
+ tn = arg.TypeName
+ }
+ fdata = append(fdata, &fieldData{
+ Name: arg.Name,
+ VarName: arg.Name,
+ TypeName: tn,
+ Init: code,
+ Pointer: arg.Pointer,
+ })
+ }
+ return flags, &buildFunctionData{
+ Name: "Build" + e.Method.VarName + "Payload",
+ ActualParams: params,
+ FormalParams: params,
+ ServiceName: e.ServiceName,
+ MethodName: e.Method.Name,
+ ResultType: e.Payload.Ref,
+ Fields: fdata,
+ PayloadInit: e.Payload.Request.PayloadInit,
+ CheckErr: check,
+ Args: args,
+ }
+}
+
+func jsonExample(v interface{}) string {
+ // In JSON, keys must be a string. But goa allows map keys to be anything.
+ r := reflect.ValueOf(v)
+ if r.Kind() == reflect.Map {
+ keys := r.MapKeys()
+ if keys[0].Kind() != reflect.String {
+ a := make(map[string]interface{}, len(keys))
+ var kstr string
+ for _, k := range keys {
+ switch t := k.Interface().(type) {
+ case bool:
+ kstr = strconv.FormatBool(t)
+ case int32:
+ kstr = strconv.FormatInt(int64(t), 10)
+ case int64:
+ kstr = strconv.FormatInt(t, 10)
+ case int:
+ kstr = strconv.Itoa(t)
+ case float32:
+ kstr = strconv.FormatFloat(float64(t), 'f', -1, 32)
+ case float64:
+ kstr = strconv.FormatFloat(t, 'f', -1, 64)
+ default:
+ kstr = k.String()
+ }
+ a[kstr] = r.MapIndex(k).Interface()
+ }
+ v = a
+ }
+ }
+ b, err := json.MarshalIndent(v, " ", " ")
+ ex := "?"
+ if err == nil {
+ ex = string(b)
+ }
+ if strings.Contains(ex, "\n") {
+ ex = "'" + strings.Replace(ex, "'", "\\'", -1) + "'"
+ }
+ return ex
+}
+
+func goify(terms ...string) string {
+ res := codegen.Goify(terms[0], false)
+ if len(terms) == 1 {
+ return res
+ }
+ for _, t := range terms[1:] {
+ res += codegen.Goify(t, true)
+ }
+ return res
+}
+
+// fieldLoadCode returns the code of the build payload function that initializes
+// one of the payload object fields. It returns the initialization code and a
+// boolean indicating whether the code requires an "err" variable.
+func fieldLoadCode(actual, fType string, arg *InitArgData) (string, bool) {
+ var (
+ code string
+ check bool
+ startIf string
+ endIf string
+ )
+ {
+ if !arg.Required {
+ startIf = fmt.Sprintf("if %s != \"\" {\n", actual)
+ endIf = "\n}"
+ }
+ if arg.TypeName == stringN {
+ ref := "&"
+ if arg.Required || arg.DefaultValue != nil {
+ ref = ""
+ }
+ code = arg.Name + " = " + ref + actual
+ } else {
+ ex := jsonExample(arg.Example)
+ code, check = conversionCode(actual, arg.Name, arg.TypeName, !arg.Required && arg.DefaultValue == nil)
+ if check {
+ code += "\nif err != nil {\n"
+ if flagType(arg.TypeName) == "JSON" {
+ code += fmt.Sprintf(`return nil, fmt.Errorf("invalid JSON for %s, example of valid JSON:\n%%s", %q)`,
+ arg.Name, ex)
+ } else {
+ code += fmt.Sprintf(`err = fmt.Errorf("invalid value for %s, must be %s")`,
+ arg.Name, fType)
+ }
+ code += "\n}"
+ }
+ if arg.Validate != "" {
+ code += "\n" + arg.Validate + "\n" + "if err != nil {\n\treturn nil, err\n}"
+ }
+ }
+ }
+ return fmt.Sprintf("%s%s%s", startIf, code, endIf), check
+}
+
+var (
+ boolN = codegen.GoNativeTypeName(expr.Boolean)
+ intN = codegen.GoNativeTypeName(expr.Int)
+ int32N = codegen.GoNativeTypeName(expr.Int32)
+ int64N = codegen.GoNativeTypeName(expr.Int64)
+ uintN = codegen.GoNativeTypeName(expr.UInt)
+ uint32N = codegen.GoNativeTypeName(expr.UInt32)
+ uint64N = codegen.GoNativeTypeName(expr.UInt64)
+ float32N = codegen.GoNativeTypeName(expr.Float32)
+ float64N = codegen.GoNativeTypeName(expr.Float64)
+ stringN = codegen.GoNativeTypeName(expr.String)
+ bytesN = codegen.GoNativeTypeName(expr.Bytes)
+)
+
+// conversionCode produces the code that converts the string stored in the
+// variable "from" to the value stored in the variable "to" of type typeName.
+func conversionCode(from, to, typeName string, pointer bool) (string, bool) {
+ var (
+ parse string
+ cast string
+ checkErr bool
+ )
+ target := to
+ needCast := typeName != stringN && typeName != bytesN && flagType(typeName) != "JSON"
+ decl := ""
+ if needCast && pointer {
+ target = "val"
+ decl = ":"
+ }
+ switch typeName {
+ case boolN:
+ parse = fmt.Sprintf("%s, err %s= strconv.ParseBool(%s)", target, decl, from)
+ checkErr = true
+ case intN:
+ parse = fmt.Sprintf("var v int64\nv, err = strconv.ParseInt(%s, 10, 64)", from)
+ cast = fmt.Sprintf("%s %s= int(v)", target, decl)
+ checkErr = true
+ case int32N:
+ parse = fmt.Sprintf("var v int64\nv, err = strconv.ParseInt(%s, 10, 32)", from)
+ cast = fmt.Sprintf("%s %s= int32(v)", target, decl)
+ checkErr = true
+ case int64N:
+ parse = fmt.Sprintf("%s, err %s= strconv.ParseInt(%s, 10, 64)", target, decl, from)
+ case uintN:
+ parse = fmt.Sprintf("var v uint64\nv, err = strconv.ParseUint(%s, 10, 64)", from)
+ cast = fmt.Sprintf("%s %s= uint(v)", target, decl)
+ checkErr = true
+ case uint32N:
+ parse = fmt.Sprintf("var v uint64\nv, err = strconv.ParseUint(%s, 10, 32)", from)
+ cast = fmt.Sprintf("%s %s= uint32(v)", target, decl)
+ checkErr = true
+ case uint64N:
+ parse = fmt.Sprintf("%s, err %s= strconv.ParseUint(%s, 10, 64)", target, decl, from)
+ checkErr = true
+ case float32N:
+ parse = fmt.Sprintf("var v float64\nv, err = strconv.ParseFloat(%s, 32)", from)
+ cast = fmt.Sprintf("%s %s= float32(v)", target, decl)
+ checkErr = true
+ case float64N:
+ parse = fmt.Sprintf("%s, err %s= strconv.ParseFloat(%s, 64)", target, decl, from)
+ checkErr = true
+ case stringN:
+ parse = fmt.Sprintf("%s %s= %s", target, decl, from)
+ case bytesN:
+ parse = fmt.Sprintf("%s %s= string(%s)", target, decl, from)
+ default:
+ parse = fmt.Sprintf("err = json.Unmarshal([]byte(%s), &%s)", from, target)
+ checkErr = true
+ }
+ if !needCast {
+ return parse, checkErr
+ }
+ if cast != "" {
+ parse = parse + "\n" + cast
+ }
+ if to != target {
+ ref := ""
+ if pointer {
+ ref = "&"
+ }
+ parse = parse + fmt.Sprintf("\n%s = %s%s", to, ref, target)
+ }
+ return parse, checkErr
+}
+
+func flagType(tname string) string {
+ switch tname {
+ case boolN, intN, int32N, int64N, uintN, uint32N, uint64N, float32N, float64N, stringN:
+ return strings.ToUpper(tname)
+ case bytesN:
+ return "STRING"
+ default: // Any, Array, Map, Object, User
+ return "JSON"
+ }
+}
+
+func argToFlag(svcn, en string, arg *InitArgData) *flagData {
+ ex := jsonExample(arg.Example)
+ fn := goify(svcn, en, arg.Name)
+ return &flagData{
+ Name: codegen.KebabCase(arg.Name),
+ VarName: codegen.Goify(arg.Name, false),
+ Type: flagType(arg.TypeName),
+ FullName: fn,
+ Description: arg.Description,
+ Required: arg.Required,
+ Example: ex,
+ }
+}
+
+// streamingCmdExists returns true if at least one command in the list of commands
+// uses stream for sending payload/result.
+func streamingCmdExists(data []*commandData) bool {
+ for _, c := range data {
+ if c.NeedStream {
+ return true
+ }
+ }
+ return false
+}
+
+// input: []string
+const usageT = `// UsageCommands returns the set of commands and sub-commands using the format
+//
+// command (subcommand1|subcommand2|...)
+//
+func UsageCommands() string {
+ return ` + "`" + `{{ range . }}{{ . }}
+{{ end }}` + "`" + `
+}
+`
+
+// input: []string
+const exampleT = `// UsageExamples produces an example of a valid invocation of the CLI tool.
+func UsageExamples() string {
+ return {{ range . }}os.Args[0] + ` + "`" + ` {{ . }}` + "`" + ` + "\n" +
+ {{ end }}""
+}
+`
+
+// input: []commandData
+const parseT = `// ParseEndpoint returns the endpoint and payload as specified on the command
+// line.
+func ParseEndpoint(
+ scheme, host string,
+ doer goahttp.Doer,
+ enc func(*http.Request) goahttp.Encoder,
+ dec func(*http.Response) goahttp.Decoder,
+ restore bool,
+ {{- if streamingCmdExists . }}
+ dialer goahttp.Dialer,
+ connConfigFn goahttp.ConnConfigureFunc,
+ {{- end }}
+ {{- range $c := . }}
+ {{- range .Subcommands }}
+ {{- if .MultipartRequestEncoder }}
+ {{ .MultipartRequestEncoder.VarName }} {{ $c.PkgName }}.{{ .MultipartRequestEncoder.FuncName }},
+ {{- end }}
+ {{- end }}
+{{- end }}
+) (goa.Endpoint, interface{}, error) {
+ var (
+ {{- range . }}
+ {{ .VarName }}Flags = flag.NewFlagSet("{{ .Name }}", flag.ContinueOnError)
+ {{ range .Subcommands }}
+ {{ .FullName }}Flags = flag.NewFlagSet("{{ .Name }}", flag.ExitOnError)
+ {{- $sub := . }}
+ {{- range .Flags }}
+ {{ .FullName }}Flag = {{ $sub.FullName }}Flags.String("{{ .Name }}", "{{ if .Required }}REQUIRED{{ end }}", {{ printf "%q" .Description }})
+ {{- end }}
+ {{ end }}
+ {{- end }}
+ )
+ {{ range . -}}
+ {{ $cmd := . -}}
+ {{ .VarName }}Flags.Usage = {{ .VarName }}Usage
+ {{ range .Subcommands -}}
+ {{ .FullName }}Flags.Usage = {{ .FullName }}Usage
+ {{ end }}
+ {{ end }}
+ if err := flag.CommandLine.Parse(os.Args[1:]); err != nil {
+ return nil, nil, err
+ }
+
+ if len(os.Args) < flag.NFlag()+3 {
+ return nil, nil, fmt.Errorf("not enough arguments")
+ }
+
+ var (
+ svcn string
+ svcf *flag.FlagSet
+ )
+ {
+ svcn = os.Args[1+flag.NFlag()]
+ switch svcn {
+ {{- range . }}
+ case "{{ .Name }}":
+ svcf = {{ .VarName }}Flags
+ {{- end }}
+ default:
+ return nil, nil, fmt.Errorf("unknown service %q", svcn)
+ }
+ }
+ if err := svcf.Parse(os.Args[2+flag.NFlag():]); err != nil {
+ return nil, nil, err
+ }
+
+ var (
+ epn string
+ epf *flag.FlagSet
+ )
+ {
+ epn = os.Args[2+flag.NFlag()+svcf.NFlag()]
+ switch svcn {
+ {{- range . }}
+ case "{{ .Name }}":
+ switch epn {
+ {{- range .Subcommands }}
+ case "{{ .Name }}":
+ epf = {{ .FullName }}Flags
+ {{ end }}
+ }
+ {{ end }}
+ }
+ }
+ if epf == nil {
+ return nil, nil, fmt.Errorf("unknown %q endpoint %q", svcn, epn)
+ }
+
+ // Parse endpoint flags if any
+ if len(os.Args) > 2+flag.NFlag()+svcf.NFlag() {
+ if err := epf.Parse(os.Args[3+flag.NFlag()+svcf.NFlag():]); err != nil {
+ return nil, nil, err
+ }
+ }
+
+ var (
+ data interface{}
+ endpoint goa.Endpoint
+ err error
+ )
+ {
+ switch svcn {
+ {{- range . }}
+ case "{{ .Name }}":
+ c := {{ .PkgName }}.NewClient(scheme, host, doer, enc, dec, restore{{ if .NeedStream }}, dialer, connConfigFn{{- end }})
+ switch epn {
+ {{- $pkgName := .PkgName }}{{ range .Subcommands }}
+ case "{{ .Name }}":
+ endpoint = c.{{ .MethodVarName }}({{ if .MultipartRequestEncoder }}{{ .MultipartRequestEncoder.VarName }}{{ end }})
+ {{- if .BuildFunction }}
+ data, err = {{ $pkgName}}.{{ .BuildFunction.Name }}({{ range .BuildFunction.ActualParams }}*{{ . }}Flag, {{ end }})
+ {{- else if .Conversion }}
+ {{ .Conversion }}
+ {{- else }}
+ data = nil
+ {{- end }}
+ {{- end }}
+ }
+ {{- end }}
+ }
+ }
+ if err != nil {
+ return nil, nil, err
+ }
+
+ return endpoint, data, nil
+}
+`
+
+// input: buildFunctionData
+const buildPayloadT = `{{ printf "%s builds the payload for the %s %s endpoint from CLI flags." .Name .ServiceName .MethodName | comment }}
+func {{ .Name }}({{ range .FormalParams }}{{ . }} string, {{ end }}) ({{ .ResultType }}, error) {
+ {{- if .CheckErr }}
+ var err error
+ {{- end }}
+ {{- range .Fields }}
+ {{- if .VarName }}
+ var {{ .VarName }} {{ if .Pointer }}*{{ end }}{{ .TypeName }}
+ {
+ {{ .Init }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- if .CheckErr }}
+ if err != nil {
+ return nil, err
+ }
+ {{- end }}
+ {{- with .PayloadInit }}
+
+ {{- if .ClientCode }}
+ {{ .ClientCode }}
+ {{- if .ReturnTypeAttribute }}
+ res := &{{ .ReturnTypeName }}{
+ {{ .ReturnTypeAttribute }}: v,
+ }
+ {{- end }}
+ {{- if .ReturnIsStruct }}
+ {{- range $.Args }}
+ {{- if .FieldName }}
+ {{ if $.PayloadInit.ReturnTypeAttribute }}res{{ else }}v{{ end }}.{{ .FieldName }} = {{ .Name }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ return {{ if .ReturnTypeAttribute }}res{{ else }}v{{ end }}, nil
+
+ {{- else }}
+
+ {{- if .ReturnIsStruct }}
+ payload := &{{ .ReturnTypeName }}{
+ {{- range $.Args }}
+ {{- if .FieldName }}
+ {{ .FieldName }}: {{ .Name }},
+ {{- end }}
+ {{- end }}
+ }
+ return payload, nil
+ {{- end }}
+
+ {{- end }}
+ {{- end }}
+}
+`
+
+// input: commandData
+const commandUsageT = `{{ printf "%sUsage displays the usage of the %s command and its subcommands." .Name .Name | comment }}
+func {{ .VarName }}Usage() {
+ fmt.Fprintf(os.Stderr, ` + "`" + `{{ printDescription .Description }}
+Usage:
+ %s [globalflags] {{ .Name }} COMMAND [flags]
+
+COMMAND:
+ {{- range .Subcommands }}
+ {{ .Name }}: {{ printDescription .Description }}
+ {{- end }}
+
+Additional help:
+ %s {{ .Name }} COMMAND --help
+` + "`" + `, os.Args[0], os.Args[0])
+}
+
+{{- range .Subcommands }}
+func {{ .FullName }}Usage() {
+ fmt.Fprintf(os.Stderr, ` + "`" + `%s [flags] {{ $.Name }} {{ .Name }}{{range .Flags }} -{{ .Name }} {{ .Type }}{{ end }}
+
+{{ printDescription .Description}}
+ {{- range .Flags }}
+ -{{ .Name }} {{ .Type }}: {{ .Description }}
+ {{- end }}
+
+Example:
+ ` + "`+os.Args[0]+" + "`" + ` {{ .Example }}
+` + "`" + `, os.Args[0])
+}
+{{ end }}
+`
diff --git a/vendor/goa.design/goa/http/codegen/client_types.go b/vendor/goa.design/goa/http/codegen/client_types.go
new file mode 100644
index 000000000..b6ed193cc
--- /dev/null
+++ b/vendor/goa.design/goa/http/codegen/client_types.go
@@ -0,0 +1,251 @@
+package codegen
+
+import (
+ "path/filepath"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+// ClientTypeFiles returns the HTTP transport client types files.
+func ClientTypeFiles(genpkg string, root *expr.RootExpr) []*codegen.File {
+ fw := make([]*codegen.File, len(root.API.HTTP.Services))
+ seen := make(map[string]struct{})
+ for i, svc := range root.API.HTTP.Services {
+ fw[i] = clientType(genpkg, svc, seen)
+ }
+ return fw
+}
+
+// clientType return the file containing the type definitions used by the HTTP
+// transport for the given service client. seen keeps track of the names of the
+// types that have already been generated to prevent duplicate code generation.
+//
+// Below are the rules governing whether values are pointers or not. Note that
+// the rules only applies to values that hold primitive types, values that hold
+// slices, maps or objects always use pointers either implicitly - slices and
+// maps - or explicitly - objects.
+//
+// * The payload struct fields (if a struct) hold pointers when not required
+// and have no default value.
+//
+// * Request and response body fields (if the body is a struct) always hold
+// pointers to allow for explicit validation.
+//
+// * Request header, path and query string parameter variables hold pointers
+// when not required. Request header, body fields and param variables that
+// have default values are never required (enforced by DSL engine).
+//
+// * The result struct fields (if a struct) hold pointers when not required
+// or have a default value (so generated code can set when null).
+//
+// * Response header variables hold pointers when not required and have no
+// default value.
+//
+func clientType(genpkg string, svc *expr.HTTPServiceExpr, seen map[string]struct{}) *codegen.File {
+ var (
+ path string
+ rdata = HTTPServices.Get(svc.Name())
+ )
+ path = filepath.Join(codegen.Gendir, "http", codegen.SnakeCase(svc.Name()), "client", "types.go")
+ sd := HTTPServices.Get(svc.Name())
+ header := codegen.Header(svc.Name()+" HTTP client types", "client",
+ []*codegen.ImportSpec{
+ {Path: "unicode/utf8"},
+ {Path: genpkg + "/" + codegen.SnakeCase(svc.Name()), Name: sd.Service.PkgName},
+ {Path: genpkg + "/" + codegen.SnakeCase(svc.Name()) + "/" + "views", Name: sd.Service.ViewsPkg},
+ {Path: "goa.design/goa", Name: "goa"},
+ },
+ )
+
+ var (
+ initData []*InitData
+ validatedTypes []*TypeData
+
+ sections = []*codegen.SectionTemplate{header}
+ )
+
+ // request body types
+ for _, a := range svc.HTTPEndpoints {
+ adata := rdata.Endpoint(a.Name())
+ if data := adata.Payload.Request.ClientBody; data != nil {
+ if data.Def != "" {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-request-body",
+ Source: typeDeclT,
+ Data: data,
+ })
+ }
+ if data.Init != nil {
+ initData = append(initData, data.Init)
+ }
+ if data.ValidateDef != "" {
+ validatedTypes = append(validatedTypes, data)
+ }
+ }
+ if adata.ClientStream != nil {
+ if data := adata.ClientStream.Payload; data != nil {
+ if data.Def != "" {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-request-body",
+ Source: typeDeclT,
+ Data: data,
+ })
+ }
+ if data.Init != nil {
+ initData = append(initData, data.Init)
+ }
+ if data.ValidateDef != "" {
+ validatedTypes = append(validatedTypes, data)
+ }
+ }
+ }
+ }
+
+ // response body types
+ for _, a := range svc.HTTPEndpoints {
+ adata := rdata.Endpoint(a.Name())
+ for _, resp := range adata.Result.Responses {
+ if data := resp.ClientBody; data != nil {
+ if data.Def != "" {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-response-body",
+ Source: typeDeclT,
+ Data: data,
+ })
+ }
+ if data.ValidateDef != "" {
+ validatedTypes = append(validatedTypes, data)
+ }
+ }
+ }
+ }
+
+ // error body types
+ for _, a := range svc.HTTPEndpoints {
+ adata := rdata.Endpoint(a.Name())
+ for _, gerr := range adata.Errors {
+ for _, herr := range gerr.Errors {
+ if data := herr.Response.ClientBody; data != nil {
+ if data.Def != "" {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-error-body",
+ Source: typeDeclT,
+ Data: data,
+ })
+ }
+ if data.ValidateDef != "" {
+ validatedTypes = append(validatedTypes, data)
+ }
+ }
+ }
+ }
+ }
+
+ for _, data := range rdata.ClientBodyAttributeTypes {
+ if data.Def != "" {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-body-attributes",
+ Source: typeDeclT,
+ Data: data,
+ })
+ }
+
+ if data.ValidateDef != "" {
+ validatedTypes = append(validatedTypes, data)
+ }
+ }
+
+ // body constructors
+ for _, init := range initData {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-body-init",
+ Source: clientBodyInitT,
+ Data: init,
+ })
+ }
+
+ for _, adata := range rdata.Endpoints {
+ // response to method result (client)
+ for _, resp := range adata.Result.Responses {
+ if init := resp.ResultInit; init != nil {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-result-init",
+ Source: clientTypeInitT,
+ Data: init,
+ })
+ }
+ }
+
+ // error response to method result (client)
+ for _, gerr := range adata.Errors {
+ for _, herr := range gerr.Errors {
+ if init := herr.Response.ResultInit; init != nil {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-error-result-init",
+ Source: clientTypeInitT,
+ Data: init,
+ })
+ }
+ }
+ }
+ }
+
+ // body attribute types
+ // validate methods
+ for _, data := range validatedTypes {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "client-validate",
+ Source: validateT,
+ Data: data,
+ })
+ }
+ return &codegen.File{Path: path, SectionTemplates: sections}
+}
+
+// input: InitData
+const clientBodyInitT = `{{ comment .Description }}
+func {{ .Name }}({{ range .ClientArgs }}{{ .Name }} {{.TypeRef }}, {{ end }}) {{ .ReturnTypeRef }} {
+ {{ .ClientCode }}
+ return body
+}
+`
+
+// input: InitData
+const clientTypeInitT = `{{ comment .Description }}
+func {{ .Name }}({{- range .ClientArgs }}{{ .Name }} {{ .TypeRef }}, {{ end }}) {{ .ReturnTypeRef }} {
+ {{- if .ClientCode }}
+ {{ .ClientCode }}
+ {{- if .ReturnTypeAttribute }}
+ res := &{{ .ReturnTypeName }}{
+ {{ .ReturnTypeAttribute }}: {{ if .ReturnIsPrimitivePointer }}&{{ end }}v,
+ }
+ {{- end }}
+ {{- if .ReturnIsStruct }}
+ {{- range .ClientArgs }}
+ {{- if .FieldName }}
+ {{ if $.ReturnTypeAttribute }}res{{ else }}v{{ end }}.{{ .FieldName }} = {{ if .Pointer }}&{{ end }}{{ .Name }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ return {{ if .ReturnTypeAttribute }}res{{ else }}v{{ end }}
+ {{- else }}
+ {{- if .ReturnIsStruct }}
+ return &{{ .ReturnTypeName }}{
+ {{- range .ClientArgs }}
+ {{- if .FieldName }}
+ {{ .FieldName }}: {{ if .Pointer }}&{{ end }}{{ .Name }},
+ {{- end }}
+ {{- end }}
+ }
+ {{- end }}
+ {{ end -}}
+}
+`
+
+// input: service.InitData
+const viewedResultTypeInitT = `{{ comment .Description }}
+func {{ .Name }}({{ range .Args }}{{ .Name }} {{ .Ref }}, {{ end }}) {{ .ReturnRef }} {
+ {{ .Code }}
+}
+`
diff --git a/vendor/goa.design/goa/http/codegen/example_cli.go b/vendor/goa.design/goa/http/codegen/example_cli.go
new file mode 100644
index 000000000..80ea8c6c7
--- /dev/null
+++ b/vendor/goa.design/goa/http/codegen/example_cli.go
@@ -0,0 +1,160 @@
+package codegen
+
+import (
+ "os"
+ "path/filepath"
+ "strings"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+// ExampleCLIFiles returns an example client tool HTTP implementation for each
+// server expression.
+func ExampleCLIFiles(genpkg string, root *expr.RootExpr) []*codegen.File {
+ var files []*codegen.File
+ for _, svr := range root.API.Servers {
+ if f := exampleCLI(genpkg, root, svr); f != nil {
+ files = append(files, f)
+ }
+ }
+ return files
+}
+
+// exampleCLI returns an example client tool HTTP implementation for the given
+// server expression.
+func exampleCLI(genpkg string, root *expr.RootExpr, svr *expr.ServerExpr) *codegen.File {
+ pkg := codegen.SnakeCase(codegen.Goify(svr.Name, true))
+ apiPkg := strings.ToLower(codegen.Goify(root.API.Name, false))
+ path := filepath.Join("cmd", pkg+"-cli", "http.go")
+ if _, err := os.Stat(path); !os.IsNotExist(err) {
+ return nil // file already exists, skip it.
+ }
+ idx := strings.LastIndex(genpkg, string(os.PathSeparator))
+ rootPath := "."
+ if idx > 0 {
+ rootPath = genpkg[:idx]
+ }
+ specs := []*codegen.ImportSpec{
+ {Path: "context"},
+ {Path: "encoding/json"},
+ {Path: "flag"},
+ {Path: "fmt"},
+ {Path: "net/http"},
+ {Path: "net/url"},
+ {Path: "os"},
+ {Path: "strings"},
+ {Path: "time"},
+ {Path: "github.com/gorilla/websocket"},
+ {Path: "goa.design/goa"},
+ {Path: "goa.design/goa/http", Name: "goahttp"},
+ {Path: genpkg + "/http/cli/" + pkg, Name: "cli"},
+ {Path: rootPath, Name: apiPkg},
+ }
+
+ svcData := make([]*ServiceData, len(svr.Services))
+ for i, svc := range svr.Services {
+ svcData[i] = HTTPServices.Get(svc)
+ }
+ sections := []*codegen.SectionTemplate{
+ codegen.Header("", "main", specs),
+ &codegen.SectionTemplate{Name: "cli-http-start", Source: httpCLIStartT},
+ &codegen.SectionTemplate{
+ Name: "cli-http-streaming",
+ Source: httpCLIStreamingT,
+ Data: map[string]interface{}{
+ "Services": svcData,
+ },
+ FuncMap: map[string]interface{}{
+ "needStreaming": needStreaming,
+ },
+ },
+ &codegen.SectionTemplate{
+ Name: "cli-http-end",
+ Source: httpCLIEndT,
+ Data: map[string]interface{}{
+ "Services": svcData,
+ "APIPkg": apiPkg,
+ },
+ FuncMap: map[string]interface{}{
+ "needStreaming": needStreaming,
+ },
+ },
+ &codegen.SectionTemplate{Name: "cli-http-usage", Source: httpCLIUsageT},
+ }
+ return &codegen.File{
+ Path: path,
+ SectionTemplates: sections,
+ SkipExist: true,
+ }
+}
+
+// needStreaming returns true if at least one endpoint in the service
+// uses stream for sending payload/result.
+func needStreaming(data []*ServiceData) bool {
+ for _, s := range data {
+ if streamingEndpointExists(s) {
+ return true
+ }
+ }
+ return false
+}
+
+const (
+ httpCLIStartT = `func doHTTP(scheme, host string, timeout int, debug bool) (goa.Endpoint, interface{}, error) {
+ var (
+ doer goahttp.Doer
+ )
+ {
+ doer = &http.Client{Timeout: time.Duration(timeout) * time.Second}
+ if debug {
+ doer = goahttp.NewDebugDoer(doer)
+ doer.(goahttp.DebugDoer).Fprint(os.Stderr)
+ }
+ }
+`
+
+ // input: map[string]interface{}{"Services": []*ServiceData}
+ httpCLIStreamingT = `{{- if needStreaming .Services }}
+ var (
+ dialer *websocket.Dialer
+ connConfigFn goahttp.ConnConfigureFunc
+ )
+ {
+ dialer = websocket.DefaultDialer
+ }
+ {{- end }}
+`
+
+ // input: map[string]interface{}{"Services": []*ServiceData}
+ httpCLIEndT = `return cli.ParseEndpoint(
+ scheme,
+ host,
+ doer,
+ goahttp.RequestEncoder,
+ goahttp.ResponseDecoder,
+ debug,
+ {{- if needStreaming .Services }}
+ dialer,
+ connConfigFn,
+ {{- end }}
+ {{- range .Services }}
+ {{- range .Endpoints }}
+ {{- if .MultipartRequestDecoder }}
+ {{ $.APIPkg }}.{{ .MultipartRequestEncoder.FuncName }},
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ )
+}
+`
+
+ httpCLIUsageT = `func httpUsageCommands() string {
+ return cli.UsageCommands()
+}
+
+func httpUsageExamples() string {
+ return cli.UsageExamples()
+}
+`
+)
diff --git a/vendor/goa.design/goa/http/codegen/example_server.go b/vendor/goa.design/goa/http/codegen/example_server.go
new file mode 100644
index 000000000..5948ad024
--- /dev/null
+++ b/vendor/goa.design/goa/http/codegen/example_server.go
@@ -0,0 +1,318 @@
+package codegen
+
+import (
+ "os"
+ "path"
+ "path/filepath"
+ "strings"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+// ExampleServerFiles returns an example http service implementation.
+func ExampleServerFiles(genpkg string, root *expr.RootExpr) []*codegen.File {
+ var fw []*codegen.File
+ for _, svr := range root.API.Servers {
+ if m := exampleServer(genpkg, root, svr); m != nil {
+ fw = append(fw, m)
+ }
+ }
+ for _, svc := range root.API.HTTP.Services {
+ if f := dummyMultipartFile(genpkg, root, svc); f != nil {
+ fw = append(fw, f)
+ }
+ }
+ return fw
+}
+
+// exampleServer returns an example HTTP server implementation.
+func exampleServer(genpkg string, root *expr.RootExpr, svr *expr.ServerExpr) *codegen.File {
+ pkg := codegen.SnakeCase(codegen.Goify(svr.Name, true))
+ fpath := filepath.Join("cmd", pkg, "http.go")
+ idx := strings.LastIndex(genpkg, string(os.PathSeparator))
+ rootPath := "."
+ if idx > 0 {
+ rootPath = genpkg[:idx]
+ }
+ apiPkg := strings.ToLower(codegen.Goify(root.API.Name, false))
+ specs := []*codegen.ImportSpec{
+ {Path: "context"},
+ {Path: "log"},
+ {Path: "net/http"},
+ {Path: "net/url"},
+ {Path: "os"},
+ {Path: "sync"},
+ {Path: "time"},
+ {Path: "goa.design/goa/http", Name: "goahttp"},
+ {Path: "goa.design/goa/http/middleware"},
+ {Path: "github.com/gorilla/websocket"},
+ {Path: rootPath, Name: apiPkg},
+ }
+
+ for _, svc := range root.API.HTTP.Services {
+ pkgName := HTTPServices.Get(svc.Name()).Service.PkgName
+ specs = append(specs, &codegen.ImportSpec{
+ Path: path.Join(genpkg, "http", codegen.SnakeCase(svc.Name()), "server"),
+ Name: pkgName + "svr",
+ })
+ specs = append(specs, &codegen.ImportSpec{
+ Path: path.Join(genpkg, codegen.SnakeCase(svc.Name())),
+ Name: pkgName,
+ })
+ }
+
+ svcdata := make([]*ServiceData, len(svr.Services))
+ for i, svc := range svr.Services {
+ svcdata[i] = HTTPServices.Get(svc)
+ }
+
+ sections := []*codegen.SectionTemplate{
+ codegen.Header("", "main", specs),
+ &codegen.SectionTemplate{
+ Name: "server-http-start",
+ Source: httpSvrStartT,
+ Data: map[string]interface{}{
+ "Services": svcdata,
+ },
+ },
+ &codegen.SectionTemplate{Name: "server-http-logger", Source: httpSvrLoggerT},
+ &codegen.SectionTemplate{Name: "server-http-encoding", Source: httpSvrEncodingT},
+ &codegen.SectionTemplate{Name: "server-http-mux", Source: httpSvrMuxT},
+ &codegen.SectionTemplate{
+ Name: "server-http-init",
+ Source: httpSvrInitT,
+ Data: map[string]interface{}{
+ "Services": svcdata,
+ "APIPkg": apiPkg,
+ },
+ FuncMap: map[string]interface{}{"needStream": needStream},
+ },
+ &codegen.SectionTemplate{Name: "server-http-middleware", Source: httpSvrMiddlewareT},
+ &codegen.SectionTemplate{
+ Name: "server-http-end",
+ Source: httpSvrEndT,
+ Data: map[string]interface{}{
+ "Services": svcdata,
+ },
+ },
+ &codegen.SectionTemplate{Name: "server-http-errorhandler", Source: httpSvrErrorHandlerT},
+ }
+
+ return &codegen.File{Path: fpath, SectionTemplates: sections, SkipExist: true}
+}
+
+// dummyMultipartFile returns a dummy implementation of the multipart decoders
+// and encoders.
+func dummyMultipartFile(genpkg string, root *expr.RootExpr, svc *expr.HTTPServiceExpr) *codegen.File {
+ mpath := "multipart.go"
+ if _, err := os.Stat(mpath); !os.IsNotExist(err) {
+ return nil // file already exists, skip it.
+ }
+ var (
+ sections []*codegen.SectionTemplate
+ mustGen bool
+
+ apiPkg = strings.ToLower(codegen.Goify(root.API.Name, false))
+ )
+ {
+ specs := []*codegen.ImportSpec{
+ {Path: "mime/multipart"},
+ }
+ data := HTTPServices.Get(svc.Name())
+ pkgName := data.Service.PkgName
+ specs = append(specs, &codegen.ImportSpec{
+ Path: path.Join(genpkg, codegen.SnakeCase(svc.Name())),
+ Name: pkgName,
+ })
+ sections = []*codegen.SectionTemplate{codegen.Header("", apiPkg, specs)}
+ for _, e := range data.Endpoints {
+ if e.MultipartRequestDecoder != nil {
+ mustGen = true
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "dummy-multipart-request-decoder",
+ Source: dummyMultipartRequestDecoderImplT,
+ Data: e.MultipartRequestDecoder,
+ })
+ }
+ if e.MultipartRequestEncoder != nil {
+ mustGen = true
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "dummy-multipart-request-encoder",
+ Source: dummyMultipartRequestEncoderImplT,
+ Data: e.MultipartRequestEncoder,
+ })
+ }
+ }
+ }
+ if !mustGen {
+ return nil
+ }
+ return &codegen.File{
+ Path: mpath,
+ SectionTemplates: sections,
+ SkipExist: true,
+ }
+}
+
+// needStream returns true if at least one method in the defined services
+// uses stream for sending payload/result.
+func needStream(data []*ServiceData) bool {
+ for _, svc := range data {
+ if streamingEndpointExists(svc) {
+ return true
+ }
+ }
+ return false
+}
+
+const (
+ // input: MultipartData
+ dummyMultipartRequestDecoderImplT = `{{ printf "%s implements the multipart decoder for service %q endpoint %q. The decoder must populate the argument p after encoding." .FuncName .ServiceName .MethodName | comment }}
+func {{ .FuncName }}(mr *multipart.Reader, p *{{ .Payload.Ref }}) error {
+ // Add multipart request decoder logic here
+ return nil
+}
+`
+
+ // input: MultipartData
+ dummyMultipartRequestEncoderImplT = `{{ printf "%s implements the multipart encoder for service %q endpoint %q." .FuncName .ServiceName .MethodName | comment }}
+func {{ .FuncName }}(mw *multipart.Writer, p {{ .Payload.Ref }}) error {
+ // Add multipart request encoder logic here
+ return nil
+}
+`
+
+ // input: map[string]interface{}{"Services":[]*ServiceData}
+ httpSvrStartT = `{{ comment "handleHTTPServer starts configures and starts a HTTP server on the given URL. It shuts down the server if any error is received in the error channel." }}
+func handleHTTPServer(ctx context.Context, u *url.URL{{ range $.Services }}{{ if .Service.Methods }}, {{ .Service.VarName }}Endpoints *{{ .Service.PkgName }}.Endpoints{{ end }}{{ end }}, wg *sync.WaitGroup, errc chan error, logger *log.Logger, debug bool) {
+`
+
+ httpSvrLoggerT = `
+ // Setup logger and goa log adapter. Replace logger with your own using
+ // your log package of choice.
+ var (
+ adapter middleware.Logger
+ )
+ {
+ adapter = middleware.NewLogger(logger)
+ }
+`
+
+ httpSvrEncodingT = `
+ // Provide the transport specific request decoder and response encoder.
+ // The goa http package has built-in support for JSON, XML and gob.
+ // Other encodings can be used by providing the corresponding functions,
+ // see goa.design/encoding.
+ var (
+ dec = goahttp.RequestDecoder
+ enc = goahttp.ResponseEncoder
+ )
+`
+
+ httpSvrMuxT = `
+ // Build the service HTTP request multiplexer and configure it to serve
+ // HTTP requests to the service endpoints.
+ var mux goahttp.Muxer
+ {
+ mux = goahttp.NewMuxer()
+ }
+`
+
+ // input: map[string]interface{}{"APIPkg":string, "Services":[]*ServiceData}
+ httpSvrInitT = `
+ // Wrap the endpoints with the transport specific layers. The generated
+ // server packages contains code generated from the design which maps
+ // the service input and output data structures to HTTP requests and
+ // responses.
+ var (
+ {{- range .Services }}
+ {{ .Service.VarName }}Server *{{.Service.PkgName}}svr.Server
+ {{- end }}
+ )
+ {
+ eh := errorHandler(logger)
+ {{- if needStream .Services }}
+ upgrader := &websocket.Upgrader{}
+ {{- end }}
+ {{- range .Services }}
+ {{- if .Endpoints }}
+ {{ .Service.VarName }}Server = {{ .Service.PkgName }}svr.New({{ .Service.VarName }}Endpoints, mux, dec, enc, eh{{ if needStream $.Services }}, upgrader, nil{{ end }}{{ range .Endpoints }}{{ if .MultipartRequestDecoder }}, {{ $.APIPkg }}.{{ .MultipartRequestDecoder.FuncName }}{{ end }}{{ end }})
+ {{- else }}
+ {{ .Service.VarName }}Server = {{ .Service.PkgName }}svr.New(nil, mux, dec, enc, eh)
+ {{- end }}
+ {{- end }}
+ }
+ // Configure the mux.
+ {{- range .Services }}
+ {{ .Service.PkgName }}svr.Mount(mux{{ if .Endpoints }}, {{ .Service.VarName }}Server{{ end }})
+ {{- end }}
+`
+
+ httpSvrMiddlewareT = `
+ // Wrap the multiplexer with additional middlewares. Middlewares mounted
+ // here apply to all the service endpoints.
+ var handler http.Handler = mux
+ {
+ if debug {
+ handler = middleware.Debug(mux, os.Stdout)(handler)
+ }
+ handler = middleware.Log(adapter)(handler)
+ handler = middleware.RequestID()(handler)
+ }
+`
+
+ // input: map[string]interface{}{"Services":[]*ServiceData}
+ httpSvrEndT = `
+ // Start HTTP server using default configuration, change the code to
+ // configure the server as required by your service.
+ srv := &http.Server{Addr: u.Host, Handler: handler}
+
+ (*wg).Add(1)
+ go func() {
+ defer (*wg).Done()
+
+ {{ comment "Start HTTP server in a separate goroutine." }}
+ go func() {
+ {{- range .Services }}
+ for _, m := range {{ .Service.VarName }}Server.Mounts {
+ {{- if .FileServers }}
+ logger.Printf("file %q mounted on %s %s", m.Method, m.Verb, m.Pattern)
+ {{- else }}
+ logger.Printf("method %q mounted on %s %s", m.Method, m.Verb, m.Pattern)
+ {{- end }}
+ }
+ {{- end }}
+
+ logger.Printf("HTTP server listening on %q", u.Host)
+ errc <- srv.ListenAndServe()
+ }()
+
+ select {
+ case <-ctx.Done():
+ logger.Printf("shutting down HTTP server at %q", u.Host)
+
+ {{ comment "Shutdown gracefully with a 30s timeout." }}
+ ctx, cancel := context.WithTimeout(ctx, 30*time.Second)
+ defer cancel()
+
+ srv.Shutdown(ctx)
+ return
+ }
+ }()
+}
+`
+
+ httpSvrErrorHandlerT = `
+// errorHandler returns a function that writes and logs the given error.
+// The function also writes and logs the error unique ID so that it's possible
+// to correlate.
+func errorHandler(logger *log.Logger) func(context.Context, http.ResponseWriter, error) {
+ return func(ctx context.Context, w http.ResponseWriter, err error) {
+ id := ctx.Value(middleware.RequestIDKey).(string)
+ w.Write([]byte("[" + id + "] encoding: " + err.Error()))
+ logger.Printf("[%s] ERROR: %s", id, err.Error())
+ }
+}
+`
+)
diff --git a/vendor/goa.design/goa/http/codegen/funcs.go b/vendor/goa.design/goa/http/codegen/funcs.go
new file mode 100644
index 000000000..d3c7c43fd
--- /dev/null
+++ b/vendor/goa.design/goa/http/codegen/funcs.go
@@ -0,0 +1,78 @@
+package codegen
+
+import (
+ "fmt"
+ "net/http"
+)
+
+// statusCodeToHTTPConst produces the standard name for the given HTTP status
+// code. If no standard name exists then the string consisting of the code
+// integer value is returned.
+func statusCodeToHTTPConst(statusCode int) string {
+ if v, ok := statusCodeToConst[statusCode]; ok {
+ return fmt.Sprintf("http.%s", v)
+ }
+ return fmt.Sprintf("%d", statusCode)
+}
+
+var statusCodeToConst = map[int]string{
+ http.StatusContinue: "StatusContinue",
+ http.StatusSwitchingProtocols: "StatusSwitchingProtocols",
+ http.StatusProcessing: "StatusProcessing",
+ http.StatusOK: "StatusOK",
+ http.StatusCreated: "StatusCreated",
+ http.StatusAccepted: "StatusAccepted",
+ http.StatusNonAuthoritativeInfo: "StatusNonAuthoritativeInfo",
+ http.StatusNoContent: "StatusNoContent",
+ http.StatusResetContent: "StatusResetContent",
+ http.StatusPartialContent: "StatusPartialContent",
+ http.StatusMultiStatus: "StatusMultiStatus",
+ http.StatusAlreadyReported: "StatusAlreadyReported",
+ http.StatusIMUsed: "StatusIMUsed",
+ http.StatusMultipleChoices: "StatusMultipleChoices",
+ http.StatusMovedPermanently: "StatusMovedPermanently",
+ http.StatusFound: "StatusFound",
+ http.StatusSeeOther: "StatusSeeOther",
+ http.StatusNotModified: "StatusNotModified",
+ http.StatusUseProxy: "StatusUseProxy",
+ http.StatusTemporaryRedirect: "StatusTemporaryRedirect",
+ http.StatusPermanentRedirect: "StatusPermanentRedirect",
+ http.StatusBadRequest: "StatusBadRequest",
+ http.StatusUnauthorized: "StatusUnauthorized",
+ http.StatusPaymentRequired: "StatusPaymentRequired",
+ http.StatusForbidden: "StatusForbidden",
+ http.StatusNotFound: "StatusNotFound",
+ http.StatusMethodNotAllowed: "StatusMethodNotAllowed",
+ http.StatusNotAcceptable: "StatusNotAcceptable",
+ http.StatusProxyAuthRequired: "StatusProxyAuthRequired",
+ http.StatusRequestTimeout: "StatusRequestTimeout",
+ http.StatusConflict: "StatusConflict",
+ http.StatusGone: "StatusGone",
+ http.StatusLengthRequired: "StatusLengthRequired",
+ http.StatusPreconditionFailed: "StatusPreconditionFailed",
+ http.StatusRequestEntityTooLarge: "StatusRequestEntityTooLarge",
+ http.StatusRequestURITooLong: "StatusRequestURITooLong",
+ http.StatusUnsupportedMediaType: "StatusUnsupportedMediaType",
+ http.StatusRequestedRangeNotSatisfiable: "StatusRequestedRangeNotSatisfiable",
+ http.StatusExpectationFailed: "StatusExpectationFailed",
+ http.StatusTeapot: "StatusTeapot",
+ http.StatusUnprocessableEntity: "StatusUnprocessableEntity",
+ http.StatusLocked: "StatusLocked",
+ http.StatusFailedDependency: "StatusFailedDependency",
+ http.StatusUpgradeRequired: "StatusUpgradeRequired",
+ http.StatusPreconditionRequired: "StatusPreconditionRequired",
+ http.StatusTooManyRequests: "StatusTooManyRequests",
+ http.StatusRequestHeaderFieldsTooLarge: "StatusRequestHeaderFieldsTooLarge",
+ http.StatusUnavailableForLegalReasons: "StatusUnavailableForLegalReasons",
+ http.StatusInternalServerError: "StatusInternalServerError",
+ http.StatusNotImplemented: "StatusNotImplemented",
+ http.StatusBadGateway: "StatusBadGateway",
+ http.StatusServiceUnavailable: "StatusServiceUnavailable",
+ http.StatusGatewayTimeout: "StatusGatewayTimeout",
+ http.StatusHTTPVersionNotSupported: "StatusHTTPVersionNotSupported",
+ http.StatusVariantAlsoNegotiates: "StatusVariantAlsoNegotiates",
+ http.StatusInsufficientStorage: "StatusInsufficientStorage",
+ http.StatusLoopDetected: "StatusLoopDetected",
+ http.StatusNotExtended: "StatusNotExtended",
+ http.StatusNetworkAuthenticationRequired: "StatusNetworkAuthenticationRequired",
+}
diff --git a/vendor/goa.design/goa/http/codegen/openapi.go b/vendor/goa.design/goa/http/codegen/openapi.go
new file mode 100644
index 000000000..f69cfd09d
--- /dev/null
+++ b/vendor/goa.design/goa/http/codegen/openapi.go
@@ -0,0 +1,75 @@
+package codegen
+
+import (
+ "encoding/json"
+ "path/filepath"
+ "text/template"
+
+ "gopkg.in/yaml.v2"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+ "goa.design/goa/http/codegen/openapi"
+)
+
+type (
+ // openAPI is the OpenAPI spec file implementation.
+ openAPI struct {
+ spec *openapi.V2
+ }
+)
+
+// OpenAPIFiles returns the files for the OpenAPIFile spec of the given HTTP API.
+func OpenAPIFiles(root *expr.RootExpr) ([]*codegen.File, error) {
+ jsonPath := filepath.Join(codegen.Gendir, "http", "openapi.json")
+ yamlPath := filepath.Join(codegen.Gendir, "http", "openapi.yaml")
+ var (
+ jsonSection *codegen.SectionTemplate
+ yamlSection *codegen.SectionTemplate
+ )
+ {
+ spec, err := openapi.NewV2(root, root.API.Servers[0].Hosts[0])
+ if err != nil {
+ return nil, err
+ }
+ jsonSection = &codegen.SectionTemplate{
+ Name: "openapi",
+ FuncMap: template.FuncMap{"toJSON": toJSON},
+ Source: "{{ toJSON .}}",
+ Data: spec,
+ }
+ yamlSection = &codegen.SectionTemplate{
+ Name: "openapi",
+ FuncMap: template.FuncMap{"toYAML": toYAML},
+ Source: "{{ toYAML .}}",
+ Data: spec,
+ }
+ }
+
+ return []*codegen.File{
+ {
+ Path: jsonPath,
+ SectionTemplates: []*codegen.SectionTemplate{jsonSection},
+ },
+ {
+ Path: yamlPath,
+ SectionTemplates: []*codegen.SectionTemplate{yamlSection},
+ },
+ }, nil
+}
+
+func toJSON(d interface{}) string {
+ b, err := json.Marshal(d)
+ if err != nil {
+ panic("openapi: " + err.Error()) // bug
+ }
+ return string(b)
+}
+
+func toYAML(d interface{}) string {
+ b, err := yaml.Marshal(d)
+ if err != nil {
+ panic("openapi: " + err.Error()) // bug
+ }
+ return string(b)
+}
diff --git a/vendor/goa.design/goa/http/codegen/openapi/json_schema.go b/vendor/goa.design/goa/http/codegen/openapi/json_schema.go
new file mode 100644
index 000000000..eaed17604
--- /dev/null
+++ b/vendor/goa.design/goa/http/codegen/openapi/json_schema.go
@@ -0,0 +1,592 @@
+package openapi
+
+import (
+ "encoding/json"
+ "fmt"
+ "reflect"
+ "strconv"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+type (
+ // Schema represents an instance of a JSON schema.
+ // See http://json-schema.org/documentation.html
+ Schema struct {
+ Schema string `json:"$schema,omitempty" yaml:"$schema,omitempty"`
+ // Core schema
+ ID string `json:"id,omitempty" yaml:"id,omitempty"`
+ Title string `json:"title,omitempty" yaml:"title,omitempty"`
+ Type Type `json:"type,omitempty" yaml:"type,omitempty"`
+ Items *Schema `json:"items,omitempty" yaml:"items,omitempty"`
+ Properties map[string]*Schema `json:"properties,omitempty" yaml:"properties,omitempty"`
+ Definitions map[string]*Schema `json:"definitions,omitempty" yaml:"definitions,omitempty"`
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ DefaultValue interface{} `json:"default,omitempty" yaml:"default,omitempty"`
+ Example interface{} `json:"example,omitempty" yaml:"example,omitempty"`
+
+ // Hyper schema
+ Media *Media `json:"media,omitempty" yaml:"media,omitempty"`
+ ReadOnly bool `json:"readOnly,omitempty" yaml:"readOnly,omitempty"`
+ PathStart string `json:"pathStart,omitempty" yaml:"pathStart,omitempty"`
+ Links []*Link `json:"links,omitempty" yaml:"links,omitempty"`
+ Ref string `json:"$ref,omitempty" yaml:"$ref,omitempty"`
+
+ // Validation
+ Enum []interface{} `json:"enum,omitempty" yaml:"enum,omitempty"`
+ Format string `json:"format,omitempty" yaml:"format,omitempty"`
+ Pattern string `json:"pattern,omitempty" yaml:"pattern,omitempty"`
+ Minimum *float64 `json:"minimum,omitempty" yaml:"minimum,omitempty"`
+ Maximum *float64 `json:"maximum,omitempty" yaml:"maximum,omitempty"`
+ MinLength *int `json:"minLength,omitempty" yaml:"minLength,omitempty"`
+ MaxLength *int `json:"maxLength,omitempty" yaml:"maxLength,omitempty"`
+ MinItems *int `json:"minItems,omitempty" yaml:"minItems,omitempty"`
+ MaxItems *int `json:"maxItems,omitempty" yaml:"maxItems,omitempty"`
+ Required []string `json:"required,omitempty" yaml:"required,omitempty"`
+ AdditionalProperties bool `json:"additionalProperties,omitempty" yaml:"additionalProperties,omitempty"`
+
+ // Union
+ AnyOf []*Schema `json:"anyOf,omitempty" yaml:"anyOf,omitempty"`
+ }
+
+ // Type is the JSON type enum.
+ Type string
+
+ // Media represents a "media" field in a JSON hyper schema.
+ Media struct {
+ BinaryEncoding string `json:"binaryEncoding,omitempty" yaml:"binaryEncoding,omitempty"`
+ Type string `json:"type,omitempty" yaml:"type,omitempty"`
+ }
+
+ // Link represents a "link" field in a JSON hyper schema.
+ Link struct {
+ Title string `json:"title,omitempty" yaml:"title,omitempty"`
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ Rel string `json:"rel,omitempty" yaml:"rel,omitempty"`
+ Href string `json:"href,omitempty" yaml:"href,omitempty"`
+ Method string `json:"method,omitempty" yaml:"method,omitempty"`
+ Schema *Schema `json:"schema,omitempty" yaml:"schema,omitempty"`
+ TargetSchema *Schema `json:"targetSchema,omitempty" yaml:"targetSchema,omitempty"`
+ ResultType string `json:"mediaType,omitempty" yaml:"mediaType,omitempty"`
+ EncType string `json:"encType,omitempty" yaml:"encType,omitempty"`
+ }
+)
+
+const (
+ // Array represents a JSON array.
+ Array Type = "array"
+ // Boolean represents a JSON boolean.
+ Boolean = "boolean"
+ // Integer represents a JSON number without a fraction or exponent part.
+ Integer = "integer"
+ // Number represents any JSON number. Number includes integer.
+ Number = "number"
+ // Null represents the JSON null value.
+ Null = "null"
+ // Object represents a JSON object.
+ Object = "object"
+ // String represents a JSON string.
+ String = "string"
+ // File is an extension used by Swagger to represent a file download.
+ File = "file"
+)
+
+// SchemaRef is the JSON Hyper-schema standard href.
+const SchemaRef = "http://json-schema.org/draft-04/hyper-schema"
+
+var (
+ // Definitions contains the generated JSON schema definitions
+ Definitions map[string]*Schema
+)
+
+// Initialize the global variables
+func init() {
+ Definitions = make(map[string]*Schema)
+}
+
+// NewSchema instantiates a new JSON schema.
+func NewSchema() *Schema {
+ js := Schema{
+ Properties: make(map[string]*Schema),
+ Definitions: make(map[string]*Schema),
+ }
+ return &js
+}
+
+// JSON serializes the schema into JSON.
+// It makes sure the "$schema" standard field is set if needed prior to
+// delegating to the standard // JSON marshaler.
+func (s *Schema) JSON() ([]byte, error) {
+ if s.Ref == "" {
+ s.Schema = SchemaRef
+ }
+ return json.Marshal(s)
+}
+
+// APISchema produces the API JSON hyper schema.
+func APISchema(api *expr.APIExpr, r *expr.RootExpr) *Schema {
+ for _, res := range r.API.HTTP.Services {
+ GenerateServiceDefinition(api, res)
+ }
+ href := string(api.Servers[0].Hosts[0].URIs[0])
+ links := []*Link{
+ {
+ Href: href,
+ Rel: "self",
+ },
+ {
+ Href: "/schema",
+ Method: "GET",
+ Rel: "self",
+ TargetSchema: &Schema{
+ Schema: SchemaRef,
+ AdditionalProperties: true,
+ },
+ },
+ }
+ s := Schema{
+ ID: fmt.Sprintf("%s/schema", href),
+ Title: api.Title,
+ Description: api.Description,
+ Type: Object,
+ Definitions: Definitions,
+ Properties: propertiesFromDefs(Definitions, "#/definitions/"),
+ Links: links,
+ }
+ return &s
+}
+
+// GenerateServiceDefinition produces the JSON schema corresponding to the given
+// service. It stores the results in cachedSchema.
+func GenerateServiceDefinition(api *expr.APIExpr, res *expr.HTTPServiceExpr) {
+ s := NewSchema()
+ s.Description = res.Description()
+ s.Type = Object
+ s.Title = res.Name()
+ Definitions[res.Name()] = s
+ for _, a := range res.HTTPEndpoints {
+ var requestSchema *Schema
+ if a.MethodExpr.Payload.Type != expr.Empty {
+ requestSchema = AttributeTypeSchema(api, a.MethodExpr.Payload)
+ requestSchema.Description = a.Name() + " payload"
+ }
+ var targetSchema *Schema
+ var identifier string
+ for _, resp := range a.Responses {
+ dt := resp.Body.Type
+ if mt := dt.(*expr.ResultTypeExpr); mt != nil {
+ if identifier == "" {
+ identifier = mt.Identifier
+ } else {
+ identifier = ""
+ }
+ if targetSchema == nil {
+ targetSchema = TypeSchemaWithPrefix(api, mt, a.Name())
+ } else if targetSchema.AnyOf == nil {
+ firstSchema := targetSchema
+ targetSchema = NewSchema()
+ targetSchema.AnyOf = []*Schema{firstSchema, TypeSchemaWithPrefix(api, mt, a.Name())}
+ } else {
+ targetSchema.AnyOf = append(targetSchema.AnyOf, TypeSchemaWithPrefix(api, mt, a.Name()))
+ }
+ }
+ }
+ for i, r := range a.Routes {
+ for j, href := range toSchemaHrefs(r) {
+ link := Link{
+ Title: a.Name(),
+ Rel: a.Name(),
+ Href: href,
+ Method: r.Method,
+ Schema: requestSchema,
+ TargetSchema: targetSchema,
+ ResultType: identifier,
+ }
+ if i == 0 && j == 0 {
+ if ca := a.Service.CanonicalEndpoint(); ca != nil {
+ if ca.Name() == a.Name() {
+ link.Rel = "self"
+ }
+ }
+ }
+ s.Links = append(s.Links, &link)
+ }
+ }
+ }
+}
+
+// ResultTypeRef produces the JSON reference to the media type definition with
+// the given view.
+func ResultTypeRef(api *expr.APIExpr, mt *expr.ResultTypeExpr, view string) string {
+ return ResultTypeRefWithPrefix(api, mt, view, "")
+}
+
+// ResultTypeRefWithPrefix produces the JSON reference to the media type definition with
+// the given view and adds the provided prefix to the type name
+func ResultTypeRefWithPrefix(api *expr.APIExpr, mt *expr.ResultTypeExpr, view string, prefix string) string {
+ projected, err := expr.Project(mt, view)
+ if err != nil {
+ panic(fmt.Sprintf("failed to project media type %#v: %s", mt.Identifier, err)) // bug
+ }
+ if _, ok := Definitions[projected.TypeName]; !ok {
+ projected.TypeName = codegen.Goify(prefix, true) + projected.TypeName
+ GenerateResultTypeDefinition(api, projected, "default")
+ }
+ ref := fmt.Sprintf("#/definitions/%s", projected.TypeName)
+ return ref
+}
+
+// TypeRef produces the JSON reference to the type definition.
+func TypeRef(api *expr.APIExpr, ut *expr.UserTypeExpr) string {
+ if _, ok := Definitions[ut.TypeName]; !ok {
+ GenerateTypeDefinition(api, ut)
+ }
+ return fmt.Sprintf("#/definitions/%s", ut.TypeName)
+}
+
+// TypeRefWithPrefix produces the JSON reference to the type definition and adds the provided prefix
+// to the type name
+func TypeRefWithPrefix(api *expr.APIExpr, ut *expr.UserTypeExpr, prefix string) string {
+ typeName := ut.TypeName
+ if prefix != "" {
+ typeName = codegen.Goify(prefix, true) + codegen.Goify(ut.TypeName, true)
+ }
+ if _, ok := Definitions[typeName]; !ok {
+ GenerateTypeDefinitionWithName(api, ut, typeName)
+ }
+
+ return fmt.Sprintf("#/definitions/%s", typeName)
+}
+
+// GenerateResultTypeDefinition produces the JSON schema corresponding to the
+// given media type and given view.
+func GenerateResultTypeDefinition(api *expr.APIExpr, mt *expr.ResultTypeExpr, view string) {
+ if _, ok := Definitions[mt.TypeName]; ok {
+ return
+ }
+ s := NewSchema()
+ s.Title = fmt.Sprintf("Mediatype identifier: %s", mt.Identifier)
+ Definitions[mt.TypeName] = s
+ buildResultTypeSchema(api, mt, view, s)
+}
+
+// GenerateTypeDefinition produces the JSON schema corresponding to the given
+// type.
+func GenerateTypeDefinition(api *expr.APIExpr, ut *expr.UserTypeExpr) {
+ GenerateTypeDefinitionWithName(api, ut, ut.TypeName)
+}
+
+// GenerateTypeDefinitionWithName produces the JSON schema corresponding to the given
+// type with provided type name.
+func GenerateTypeDefinitionWithName(api *expr.APIExpr, ut *expr.UserTypeExpr, typeName string) {
+ if _, ok := Definitions[typeName]; ok {
+ return
+ }
+ s := NewSchema()
+
+ s.Title = typeName
+ Definitions[typeName] = s
+ buildAttributeSchema(api, s, ut.AttributeExpr)
+}
+
+// TypeSchema produces the JSON schema corresponding to the given data type.
+func TypeSchema(api *expr.APIExpr, t expr.DataType) *Schema {
+ return TypeSchemaWithPrefix(api, t, "")
+}
+
+// TypeSchemaWithPrefix produces the JSON schema corresponding to the given data type
+// and adds the provided prefix to the type name
+func TypeSchemaWithPrefix(api *expr.APIExpr, t expr.DataType, prefix string) *Schema {
+ s := NewSchema()
+ switch actual := t.(type) {
+ case expr.Primitive:
+ if name := actual.Name(); name != "any" {
+ s.Type = Type(actual.Name())
+ }
+ switch actual.Kind() {
+ case expr.IntKind, expr.Int64Kind,
+ expr.UIntKind, expr.UInt64Kind:
+ s.Type = Type("integer")
+ s.Format = "int64"
+ case expr.Int32Kind, expr.UInt32Kind:
+ s.Type = Type("integer")
+ s.Format = "int32"
+ case expr.Float32Kind:
+ s.Type = Type("number")
+ s.Format = "float"
+ case expr.Float64Kind:
+ s.Type = Type("number")
+ s.Format = "double"
+ case expr.BytesKind:
+ s.Type = Type("string")
+ s.Format = "byte"
+ }
+ case *expr.Array:
+ s.Type = Array
+ s.Items = NewSchema()
+ buildAttributeSchema(api, s.Items, actual.ElemType)
+ case *expr.Object:
+ s.Type = Object
+ for _, nat := range *actual {
+ prop := NewSchema()
+ buildAttributeSchema(api, prop, nat.Attribute)
+ s.Properties[nat.Name] = prop
+ }
+ case *expr.Map:
+ s.Type = Object
+ s.AdditionalProperties = true
+ case *expr.UserTypeExpr:
+ s.Ref = TypeRefWithPrefix(api, actual, prefix)
+ case *expr.ResultTypeExpr:
+ // Use "default" view by default
+ s.Ref = ResultTypeRefWithPrefix(api, actual, expr.DefaultView, prefix)
+ }
+ return s
+}
+
+type mergeItems []struct {
+ a, b interface{}
+ needed bool
+}
+
+func (s *Schema) createMergeItems(other *Schema) mergeItems {
+ minInt := func(a, b *int) bool { return (a == nil && b != nil) || (a != nil && b != nil && *a > *b) }
+ maxInt := func(a, b *int) bool { return (a == nil && b != nil) || (a != nil && b != nil && *a < *b) }
+ minFloat64 := func(a, b *float64) bool { return (a == nil && b != nil) || (a != nil && b != nil && *a > *b) }
+ maxFloat64 := func(a, b *float64) bool { return (a == nil && b != nil) || (a != nil && b != nil && *a < *b) }
+
+ return mergeItems{
+ {&s.ID, other.ID, s.ID == ""},
+ {&s.Type, other.Type, s.Type == ""},
+ {&s.Ref, other.Ref, s.Ref == ""},
+ {&s.Items, other.Items, s.Items == nil},
+ {&s.DefaultValue, other.DefaultValue, s.DefaultValue == nil},
+ {&s.Title, other.Title, s.Title == ""},
+ {&s.Media, other.Media, s.Media == nil},
+ {&s.ReadOnly, other.ReadOnly, s.ReadOnly == false},
+ {&s.PathStart, other.PathStart, s.PathStart == ""},
+ {&s.Enum, other.Enum, s.Enum == nil},
+ {&s.Format, other.Format, s.Format == ""},
+ {&s.Pattern, other.Pattern, s.Pattern == ""},
+ {&s.AdditionalProperties, other.AdditionalProperties, s.AdditionalProperties == false},
+ {&s.Minimum, other.Minimum, minFloat64(s.Minimum, other.Minimum)},
+ {&s.Maximum, other.Maximum, maxFloat64(s.Maximum, other.Maximum)},
+ {&s.MinLength, other.MinLength, minInt(s.MinLength, other.MinLength)},
+ {&s.MaxLength, other.MaxLength, maxInt(s.MaxLength, other.MaxLength)},
+ {&s.MinItems, other.MinItems, minInt(s.MinItems, other.MinItems)},
+ {&s.MaxItems, other.MaxItems, maxInt(s.MaxItems, other.MaxItems)},
+ }
+}
+
+// Merge does a two level deep merge of other into s.
+func (s *Schema) Merge(other *Schema) {
+ items := s.createMergeItems(other)
+ for _, v := range items {
+ if v.needed && v.b != nil {
+ reflect.Indirect(reflect.ValueOf(v.a)).Set(reflect.ValueOf(v.b))
+ }
+ }
+
+ for n, p := range other.Properties {
+ if _, ok := s.Properties[n]; !ok {
+ if s.Properties == nil {
+ s.Properties = make(map[string]*Schema)
+ }
+ s.Properties[n] = p
+ }
+ }
+
+ for n, d := range other.Definitions {
+ if _, ok := s.Definitions[n]; !ok {
+ s.Definitions[n] = d
+ }
+ }
+
+ for _, l := range other.Links {
+ s.Links = append(s.Links, l)
+ }
+
+ for _, r := range other.Required {
+ s.Required = append(s.Required, r)
+ }
+}
+
+// Dup creates a shallow clone of the given schema.
+func (s *Schema) Dup() *Schema {
+ js := Schema{
+ ID: s.ID,
+ Description: s.Description,
+ Schema: s.Schema,
+ Type: s.Type,
+ DefaultValue: s.DefaultValue,
+ Title: s.Title,
+ Media: s.Media,
+ ReadOnly: s.ReadOnly,
+ PathStart: s.PathStart,
+ Links: s.Links,
+ Ref: s.Ref,
+ Enum: s.Enum,
+ Format: s.Format,
+ Pattern: s.Pattern,
+ Minimum: s.Minimum,
+ Maximum: s.Maximum,
+ MinLength: s.MinLength,
+ MaxLength: s.MaxLength,
+ MinItems: s.MinItems,
+ MaxItems: s.MaxItems,
+ Required: s.Required,
+ AdditionalProperties: s.AdditionalProperties,
+ }
+ for n, p := range s.Properties {
+ js.Properties[n] = p.Dup()
+ }
+ if s.Items != nil {
+ js.Items = s.Items.Dup()
+ }
+ for n, d := range s.Definitions {
+ js.Definitions[n] = d.Dup()
+ }
+ return &js
+}
+
+// buildAttributeSchema initializes the given JSON schema that corresponds to
+// the given attribute.
+func buildAttributeSchema(api *expr.APIExpr, s *Schema, at *expr.AttributeExpr) *Schema {
+ s.Merge(TypeSchema(api, at.Type))
+ if s.Ref != "" {
+ // Ref is exclusive with other fields
+ return s
+ }
+ s.DefaultValue = toStringMap(at.DefaultValue)
+ s.Description = at.Description
+ s.Example = at.Example(api.Random())
+ initAttributeValidation(s, at)
+
+ return s
+}
+
+// initAttributeValidation initializes validation rules for an attribute.
+func initAttributeValidation(s *Schema, at *expr.AttributeExpr) {
+ val := at.Validation
+ if val == nil {
+ return
+ }
+ s.Enum = val.Values
+ s.Format = string(val.Format)
+ s.Pattern = val.Pattern
+ if val.Minimum != nil {
+ s.Minimum = val.Minimum
+ }
+ if val.Maximum != nil {
+ s.Maximum = val.Maximum
+ }
+ if val.MinLength != nil {
+ if _, ok := at.Type.(*expr.Array); ok {
+ s.MinItems = val.MinLength
+ } else {
+ s.MinLength = val.MinLength
+ }
+ }
+ if val.MaxLength != nil {
+ if _, ok := at.Type.(*expr.Array); ok {
+ s.MaxItems = val.MaxLength
+ } else {
+ s.MaxLength = val.MaxLength
+ }
+ }
+ s.Required = val.Required
+}
+
+// AttributeTypeSchema produces the JSON schema corresponding to the given attribute.
+func AttributeTypeSchema(api *expr.APIExpr, at *expr.AttributeExpr) *Schema {
+ return AttributeTypeSchemaWithPrefix(api, at, "")
+}
+
+// AttributeTypeSchemaWithPrefix produces the JSON schema corresponding to the given attribute
+// and adds the provided prefix to the type name
+func AttributeTypeSchemaWithPrefix(api *expr.APIExpr, at *expr.AttributeExpr, prefix string) *Schema {
+ s := TypeSchemaWithPrefix(api, at.Type, prefix)
+ initAttributeValidation(s, at)
+ return s
+}
+
+// toStringMap converts map[interface{}]interface{} to a map[string]interface{}
+// when possible.
+func toStringMap(val interface{}) interface{} {
+ switch actual := val.(type) {
+ case map[interface{}]interface{}:
+ m := make(map[string]interface{})
+ for k, v := range actual {
+ m[toString(k)] = toStringMap(v)
+ }
+ return m
+ case []interface{}:
+ mapSlice := make([]interface{}, len(actual))
+ for i, e := range actual {
+ mapSlice[i] = toStringMap(e)
+ }
+ return mapSlice
+ default:
+ return actual
+ }
+}
+
+// toString returns the string representation of the given type.
+func toString(val interface{}) string {
+ switch actual := val.(type) {
+ case string:
+ return actual
+ case int:
+ return strconv.Itoa(actual)
+ case float64:
+ return strconv.FormatFloat(actual, 'f', -1, 64)
+ case bool:
+ return strconv.FormatBool(actual)
+ default:
+ panic("unexpected key type")
+ }
+}
+
+// toSchemaHrefs produces hrefs that replace the path wildcards with JSON
+// schema references when appropriate.
+func toSchemaHrefs(r *expr.RouteExpr) []string {
+ paths := r.FullPaths()
+ res := make([]string, len(paths))
+ for i, path := range paths {
+ params := expr.ExtractRouteWildcards(path)
+ args := make([]interface{}, len(params))
+ for j, p := range params {
+ args[j] = fmt.Sprintf("/{%s}", p)
+ }
+ tmpl := expr.HTTPWildcardRegex.ReplaceAllLiteralString(path, "%s")
+ res[i] = fmt.Sprintf(tmpl, args...)
+ }
+ return res
+}
+
+// propertiesFromDefs creates a Properties map referencing the given definitions
+// under the given path.
+func propertiesFromDefs(definitions map[string]*Schema, path string) map[string]*Schema {
+ res := make(map[string]*Schema, len(definitions))
+ for n := range definitions {
+ if n == "identity" {
+ continue
+ }
+ s := NewSchema()
+ s.Ref = path + n
+ res[n] = s
+ }
+ return res
+}
+
+// buildResultTypeSchema initializes s as the JSON schema representing mt for the
+// given view.
+func buildResultTypeSchema(api *expr.APIExpr, mt *expr.ResultTypeExpr, view string, s *Schema) {
+ s.Media = &Media{Type: mt.Identifier}
+ projected, err := expr.Project(mt, view)
+ if err != nil {
+ panic(fmt.Sprintf("failed to project media type %#v: %s", mt.Identifier, err)) // bug
+ }
+ buildAttributeSchema(api, s, projected.AttributeExpr)
+}
diff --git a/vendor/goa.design/goa/http/codegen/openapi/openapi_v2.go b/vendor/goa.design/goa/http/codegen/openapi/openapi_v2.go
new file mode 100644
index 000000000..f2a7b16d8
--- /dev/null
+++ b/vendor/goa.design/goa/http/codegen/openapi/openapi_v2.go
@@ -0,0 +1,409 @@
+package openapi
+
+import (
+ "encoding/json"
+
+ "goa.design/goa/expr"
+ yaml "gopkg.in/yaml.v2"
+)
+
+type (
+ // V2 represents an instance of a Swagger object.
+ // See https://github.com/OAI/OpenAPI-Specification
+ V2 struct {
+ Swagger string `json:"swagger,omitempty" yaml:"swagger,omitempty"`
+ Info *Info `json:"info,omitempty" yaml:"info,omitempty"`
+ Host string `json:"host,omitempty" yaml:"host,omitempty"`
+ BasePath string `json:"basePath,omitempty" yaml:"basePath,omitempty"`
+ Schemes []string `json:"schemes,omitempty" yaml:"schemes,omitempty"`
+ Consumes []string `json:"consumes,omitempty" yaml:"consumes,omitempty"`
+ Produces []string `json:"produces,omitempty" yaml:"produces,omitempty"`
+ Paths map[string]interface{} `json:"paths" yaml:"paths"`
+ Definitions map[string]*Schema `json:"definitions,omitempty" yaml:"definitions,omitempty"`
+ Parameters map[string]*Parameter `json:"parameters,omitempty" yaml:"parameters,omitempty"`
+ Responses map[string]*Response `json:"responses,omitempty" yaml:"responses,omitempty"`
+ SecurityDefinitions map[string]*SecurityDefinition `json:"securityDefinitions,omitempty" yaml:"securityDefinitions,omitempty"`
+ Tags []*Tag `json:"tags,omitempty" yaml:"tags,omitempty"`
+ ExternalDocs *ExternalDocs `json:"externalDocs,omitempty" yaml:"externalDocs,omitempty"`
+ }
+
+ // Info provides metadata about the API. The metadata can be used by the clients if needed,
+ // and can be presented in the OpenAPI UI for convenience.
+ Info struct {
+ Title string `json:"title,omitempty" yaml:"title,omitempty"`
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ TermsOfService string `json:"termsOfService,omitempty" yaml:"termsOfService,omitempty"`
+ Contact *expr.ContactExpr `json:"contact,omitempty" yaml:"contact,omitempty"`
+ License *expr.LicenseExpr `json:"license,omitempty" yaml:"license,omitempty"`
+ Version string `json:"version" yaml:"version"`
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+ }
+
+ // Path holds the relative paths to the individual endpoints.
+ Path struct {
+ // Ref allows for an external definition of this path item.
+ Ref string `json:"$ref,omitempty" yaml:"$ref,omitempty"`
+ // Get defines a GET operation on this path.
+ Get *Operation `json:"get,omitempty" yaml:"get,omitempty"`
+ // Put defines a PUT operation on this path.
+ Put *Operation `json:"put,omitempty" yaml:"put,omitempty"`
+ // Post defines a POST operation on this path.
+ Post *Operation `json:"post,omitempty" yaml:"post,omitempty"`
+ // Delete defines a DELETE operation on this path.
+ Delete *Operation `json:"delete,omitempty" yaml:"delete,omitempty"`
+ // Options defines a OPTIONS operation on this path.
+ Options *Operation `json:"options,omitempty" yaml:"options,omitempty"`
+ // Head defines a HEAD operation on this path.
+ Head *Operation `json:"head,omitempty" yaml:"head,omitempty"`
+ // Patch defines a PATCH operation on this path.
+ Patch *Operation `json:"patch,omitempty" yaml:"patch,omitempty"`
+ // Parameters is the list of parameters that are applicable for all the operations
+ // described under this path.
+ Parameters []*Parameter `json:"parameters,omitempty" yaml:"parameters,omitempty"`
+ // Extensions defines the swagger extensions.
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+ }
+
+ // Operation describes a single API operation on a path.
+ Operation struct {
+ // Tags is a list of tags for API documentation control. Tags
+ // can be used for logical grouping of operations by services or
+ // any other qualifier.
+ Tags []string `json:"tags,omitempty" yaml:"tags,omitempty"`
+ // Summary is a short summary of what the operation does. For maximum readability
+ // in the swagger-ui, this field should be less than 120 characters.
+ Summary string `json:"summary,omitempty" yaml:"summary,omitempty"`
+ // Description is a verbose explanation of the operation behavior.
+ // GFM syntax can be used for rich text representation.
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ // ExternalDocs points to additional external documentation for this operation.
+ ExternalDocs *ExternalDocs `json:"externalDocs,omitempty" yaml:"externalDocs,omitempty"`
+ // OperationID is a unique string used to identify the operation.
+ OperationID string `json:"operationId,omitempty" yaml:"operationId,omitempty"`
+ // Consumes is a list of MIME types the operation can consume.
+ Consumes []string `json:"consumes,omitempty" yaml:"consumes,omitempty"`
+ // Produces is a list of MIME types the operation can produce.
+ Produces []string `json:"produces,omitempty" yaml:"produces,omitempty"`
+ // Parameters is a list of parameters that are applicable for this operation.
+ Parameters []*Parameter `json:"parameters,omitempty" yaml:"parameters,omitempty"`
+ // Responses is the list of possible responses as they are returned from executing
+ // this operation.
+ Responses map[string]*Response `json:"responses,omitempty" yaml:"responses,omitempty"`
+ // Schemes is the transfer protocol for the operation.
+ Schemes []string `json:"schemes,omitempty" yaml:"schemes,omitempty"`
+ // Deprecated declares this operation to be deprecated.
+ Deprecated bool `json:"deprecated,omitempty" yaml:"deprecated,omitempty"`
+ // Security is a declaration of which security schemes are applied for this operation.
+ Security []map[string][]string `json:"security,omitempty" yaml:"security,omitempty"`
+ // Extensions defines the swagger extensions.
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+ }
+
+ // Parameter describes a single operation parameter.
+ Parameter struct {
+ // Name of the parameter. Parameter names are case sensitive.
+ Name string `json:"name" yaml:"name"`
+ // In is the location of the parameter.
+ // Possible values are "query", "header", "path", "formData" or "body".
+ In string `json:"in" yaml:"in"`
+ // Description is a brief description of the parameter.
+ // GFM syntax can be used for rich text representation.
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ // Required determines whether this parameter is mandatory.
+ Required bool `json:"required" yaml:"required"`
+ // Schema defining the type used for the body parameter, only if "in" is body
+ Schema *Schema `json:"schema,omitempty" yaml:"schema,omitempty"`
+
+ // properties below only apply if "in" is not body
+
+ // Type of the parameter. Since the parameter is not located at the request body,
+ // it is limited to simple types (that is, not an object).
+ Type string `json:"type,omitempty" yaml:"type,omitempty"`
+ // Format is the extending format for the previously mentioned type.
+ Format string `json:"format,omitempty" yaml:"format,omitempty"`
+ // AllowEmptyValue sets the ability to pass empty-valued parameters.
+ // This is valid only for either query or formData parameters and allows you to
+ // send a parameter with a name only or an empty value. Default value is false.
+ AllowEmptyValue bool `json:"allowEmptyValue,omitempty" yaml:"allowEmptyValue,omitempty"`
+ // Items describes the type of items in the array if type is "array".
+ Items *Items `json:"items,omitempty" yaml:"items,omitempty"`
+ // CollectionFormat determines the format of the array if type array is used.
+ // Possible values are csv, ssv, tsv, pipes and multi.
+ CollectionFormat string `json:"collectionFormat,omitempty" yaml:"collectionFormat,omitempty"`
+ // Default declares the value of the parameter that the server will use if none is
+ // provided, for example a "count" to control the number of results per page might
+ // default to 100 if not supplied by the client in the request.
+ Default interface{} `json:"default,omitempty" yaml:"default,omitempty"`
+ Maximum *float64 `json:"maximum,omitempty" yaml:"maximum,omitempty"`
+ ExclusiveMaximum bool `json:"exclusiveMaximum,omitempty" yaml:"exclusiveMaximum,omitempty"`
+ Minimum *float64 `json:"minimum,omitempty" yaml:"minimum,omitempty"`
+ ExclusiveMinimum bool `json:"exclusiveMinimum,omitempty" yaml:"exclusiveMinimum,omitempty"`
+ MaxLength *int `json:"maxLength,omitempty" yaml:"maxLength,omitempty"`
+ MinLength *int `json:"minLength,omitempty" yaml:"minLength,omitempty"`
+ Pattern string `json:"pattern,omitempty" yaml:"pattern,omitempty"`
+ MaxItems *int `json:"maxItems,omitempty" yaml:"maxItems,omitempty"`
+ MinItems *int `json:"minItems,omitempty" yaml:"minItems,omitempty"`
+ UniqueItems bool `json:"uniqueItems,omitempty" yaml:"uniqueItems,omitempty"`
+ Enum []interface{} `json:"enum,omitempty" yaml:"enum,omitempty"`
+ MultipleOf float64 `json:"multipleOf,omitempty" yaml:"multipleOf,omitempty"`
+ // Extensions defines the swagger extensions.
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+ }
+
+ // Response describes an operation response.
+ Response struct {
+ // Description of the response. GFM syntax can be used for rich text representation.
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ // Schema is a definition of the response structure. It can be a primitive,
+ // an array or an object. If this field does not exist, it means no content is
+ // returned as part of the response. As an extension to the Schema Object, its root
+ // type value may also be "file".
+ Schema *Schema `json:"schema,omitempty" yaml:"schema,omitempty"`
+ // Headers is a list of headers that are sent with the response.
+ Headers map[string]*Header `json:"headers,omitempty" yaml:"headers,omitempty"`
+ // Ref references a global API response.
+ // This field is exclusive with the other fields of Response.
+ Ref string `json:"$ref,omitempty" yaml:"$ref,omitempty"`
+ // Extensions defines the swagger extensions.
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+ }
+
+ // Header represents a header parameter.
+ Header struct {
+ // Description is a brief description of the parameter.
+ // GFM syntax can be used for rich text representation.
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ // Type of the header. it is limited to simple types (that is, not an object).
+ Type string `json:"type,omitempty" yaml:"type,omitempty"`
+ // Format is the extending format for the previously mentioned type.
+ Format string `json:"format,omitempty" yaml:"format,omitempty"`
+ // Items describes the type of items in the array if type is "array".
+ Items *Items `json:"items,omitempty" yaml:"items,omitempty"`
+ // CollectionFormat determines the format of the array if type array is used.
+ // Possible values are csv, ssv, tsv, pipes and multi.
+ CollectionFormat string `json:"collectionFormat,omitempty" yaml:"collectionFormat,omitempty"`
+ // Default declares the value of the parameter that the server will use if none is
+ // provided, for example a "count" to control the number of results per page might
+ // default to 100 if not supplied by the client in the request.
+ Default interface{} `json:"default,omitempty" yaml:"default,omitempty"`
+ Maximum *float64 `json:"maximum,omitempty" yaml:"maximum,omitempty"`
+ ExclusiveMaximum bool `json:"exclusiveMaximum,omitempty" yaml:"exclusiveMaximum,omitempty"`
+ Minimum *float64 `json:"minimum,omitempty" yaml:"minimum,omitempty"`
+ ExclusiveMinimum bool `json:"exclusiveMinimum,omitempty" yaml:"exclusiveMinimum,omitempty"`
+ MaxLength *int `json:"maxLength,omitempty" yaml:"maxLength,omitempty"`
+ MinLength *int `json:"minLength,omitempty" yaml:"minLength,omitempty"`
+ Pattern string `json:"pattern,omitempty" yaml:"pattern,omitempty"`
+ MaxItems *int `json:"maxItems,omitempty" yaml:"maxItems,omitempty"`
+ MinItems *int `json:"minItems,omitempty" yaml:"minItems,omitempty"`
+ UniqueItems bool `json:"uniqueItems,omitempty" yaml:"uniqueItems,omitempty"`
+ Enum []interface{} `json:"enum,omitempty" yaml:"enum,omitempty"`
+ MultipleOf float64 `json:"multipleOf,omitempty" yaml:"multipleOf,omitempty"`
+ }
+
+ // SecurityDefinition allows the definition of a security scheme that can be used by the
+ // operations. Supported schemes are basic authentication, an API key (either as a header or
+ // as a query parameter) and OAuth2's common flows (implicit, password, application and
+ // access code).
+ SecurityDefinition struct {
+ // Type of the security scheme. Valid values are "basic", "apiKey" or "oauth2".
+ Type string `json:"type" yaml:"type"`
+ // Description for security scheme
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ // Name of the header or query parameter to be used when type is "apiKey".
+ Name string `json:"name,omitempty" yaml:"name,omitempty"`
+ // In is the location of the API key when type is "apiKey".
+ // Valid values are "query" or "header".
+ In string `json:"in,omitempty" yaml:"in,omitempty"`
+ // Flow is the flow used by the OAuth2 security scheme when type is "oauth2"
+ // Valid values are "implicit", "password", "application" or "accessCode".
+ Flow string `json:"flow,omitempty" yaml:"flow,omitempty"`
+ // The oauth2 authorization URL to be used for this flow.
+ AuthorizationURL string `json:"authorizationUrl,omitempty" yaml:"authorizationUrl,omitempty"`
+ // TokenURL is the token URL to be used for this flow.
+ TokenURL string `json:"tokenUrl,omitempty" yaml:"tokenUrl,omitempty"`
+ // Scopes list the available scopes for the OAuth2 security scheme.
+ Scopes map[string]string `json:"scopes,omitempty" yaml:"scopes,omitempty"`
+ // Extensions defines the swagger extensions.
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+ }
+
+ // Scope corresponds to an available scope for an OAuth2 security scheme.
+ Scope struct {
+ // Description for scope
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ }
+
+ // ExternalDocs allows referencing an external document for extended
+ // documentation.
+ ExternalDocs struct {
+ // Description is a short description of the target documentation.
+ // GFM syntax can be used for rich text representation.
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ // URL for the target documentation.
+ URL string `json:"url" yaml:"url"`
+ }
+
+ // Items is a limited subset of JSON-Schema's items object. It is used by parameter
+ // definitions that are not located in "body".
+ Items struct {
+ // Type of the items. it is limited to simple types (that is, not an object).
+ Type string `json:"type,omitempty" yaml:"type,omitempty"`
+ // Format is the extending format for the previously mentioned type.
+ Format string `json:"format,omitempty" yaml:"format,omitempty"`
+ // Items describes the type of items in the array if type is "array".
+ Items *Items `json:"items,omitempty" yaml:"items,omitempty"`
+ // CollectionFormat determines the format of the array if type array is used.
+ // Possible values are csv, ssv, tsv, pipes and multi.
+ CollectionFormat string `json:"collectionFormat,omitempty" yaml:"collectionFormat,omitempty"`
+ // Default declares the value of the parameter that the server will use if none is
+ // provided, for example a "count" to control the number of results per page might
+ // default to 100 if not supplied by the client in the request.
+ Default interface{} `json:"default,omitempty" yaml:"default,omitempty"`
+ Maximum *float64 `json:"maximum,omitempty" yaml:"maximum,omitempty"`
+ ExclusiveMaximum bool `json:"exclusiveMaximum,omitempty" yaml:"exclusiveMaximum,omitempty"`
+ Minimum *float64 `json:"minimum,omitempty" yaml:"minimum,omitempty"`
+ ExclusiveMinimum bool `json:"exclusiveMinimum,omitempty" yaml:"exclusiveMinimum,omitempty"`
+ MaxLength *int `json:"maxLength,omitempty" yaml:"maxLength,omitempty"`
+ MinLength *int `json:"minLength,omitempty" yaml:"minLength,omitempty"`
+ Pattern string `json:"pattern,omitempty" yaml:"pattern,omitempty"`
+ MaxItems *int `json:"maxItems,omitempty" yaml:"maxItems,omitempty"`
+ MinItems *int `json:"minItems,omitempty" yaml:"minItems,omitempty"`
+ UniqueItems bool `json:"uniqueItems,omitempty" yaml:"uniqueItems,omitempty"`
+ Enum []interface{} `json:"enum,omitempty" yaml:"enum,omitempty"`
+ MultipleOf float64 `json:"multipleOf,omitempty" yaml:"multipleOf,omitempty"`
+ }
+
+ // Tag allows adding meta data to a single tag that is used by the Operation Object. It is
+ // not mandatory to have a Tag Object per tag used there.
+ Tag struct {
+ // Name of the tag.
+ Name string `json:"name,omitempty" yaml:"name,omitempty"`
+ // Description is a short description of the tag.
+ // GFM syntax can be used for rich text representation.
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ // ExternalDocs is additional external documentation for this tag.
+ ExternalDocs *ExternalDocs `json:"externalDocs,omitempty" yaml:"externalDocs,omitempty"`
+ // Extensions defines the swagger extensions.
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+ }
+
+ // These types are used in marshalJSON() to avoid recursive call of json.Marshal().
+ _Info Info
+ _Path Path
+ _Operation Operation
+ _Parameter Parameter
+ _Response Response
+ _SecurityDefinition SecurityDefinition
+ _Tag Tag
+)
+
+func marshalJSON(v interface{}, extensions map[string]interface{}) ([]byte, error) {
+ marshaled, err := json.Marshal(v)
+ if err != nil {
+ return nil, err
+ }
+ if len(extensions) == 0 {
+ return marshaled, nil
+ }
+ var unmarshaled interface{}
+ if err := json.Unmarshal(marshaled, &unmarshaled); err != nil {
+ return nil, err
+ }
+ asserted := unmarshaled.(map[string]interface{})
+ for k, v := range extensions {
+ asserted[k] = v
+ }
+ merged, err := json.Marshal(asserted)
+ if err != nil {
+ return nil, err
+ }
+ return merged, nil
+}
+
+// MarshalJSON returns the JSON encoding of i.
+func (i Info) MarshalJSON() ([]byte, error) {
+ return marshalJSON(_Info(i), i.Extensions)
+}
+
+// MarshalJSON returns the JSON encoding of p.
+func (p Path) MarshalJSON() ([]byte, error) {
+ return marshalJSON(_Path(p), p.Extensions)
+}
+
+// MarshalJSON returns the JSON encoding of o.
+func (o Operation) MarshalJSON() ([]byte, error) {
+ return marshalJSON(_Operation(o), o.Extensions)
+}
+
+// MarshalJSON returns the JSON encoding of p.
+func (p Parameter) MarshalJSON() ([]byte, error) {
+ return marshalJSON(_Parameter(p), p.Extensions)
+}
+
+// MarshalJSON returns the JSON encoding of r.
+func (r Response) MarshalJSON() ([]byte, error) {
+ return marshalJSON(_Response(r), r.Extensions)
+}
+
+// MarshalJSON returns the JSON encoding of s.
+func (s SecurityDefinition) MarshalJSON() ([]byte, error) {
+ return marshalJSON(_SecurityDefinition(s), s.Extensions)
+}
+
+// MarshalJSON returns the JSON encoding of t.
+func (t Tag) MarshalJSON() ([]byte, error) {
+ return marshalJSON(_Tag(t), t.Extensions)
+}
+
+func marshalYAML(v interface{}, extensions map[string]interface{}) (interface{}, error) {
+ if len(extensions) == 0 {
+ return v, nil
+ }
+ marshaled, err := yaml.Marshal(v)
+ if err != nil {
+ return nil, err
+ }
+ var unmarshaled map[string]interface{}
+ if err := yaml.Unmarshal(marshaled, &unmarshaled); err != nil {
+ return nil, err
+ }
+ for k, v := range extensions {
+ unmarshaled[k] = v
+ }
+ return unmarshaled, nil
+}
+
+// MarshalYAML returns value which marshaled in place of the original value
+func (i Info) MarshalYAML() (interface{}, error) {
+ return marshalYAML(_Info(i), i.Extensions)
+}
+
+// MarshalYAML returns value which marshaled in place of the original value
+func (p Path) MarshalYAML() (interface{}, error) {
+ return marshalYAML(_Path(p), p.Extensions)
+}
+
+// MarshalYAML returns value which marshaled in place of the original value
+func (o Operation) MarshalYAML() (interface{}, error) {
+ return marshalYAML(_Operation(o), o.Extensions)
+}
+
+// MarshalYAML returns value which marshaled in place of the original value
+func (p Parameter) MarshalYAML() (interface{}, error) {
+ return marshalYAML(_Parameter(p), p.Extensions)
+}
+
+// MarshalYAML returns value which marshaled in place of the original value
+func (r Response) MarshalYAML() (interface{}, error) {
+ return marshalYAML(_Response(r), r.Extensions)
+}
+
+// MarshalYAML returns value which marshaled in place of the original value
+func (s SecurityDefinition) MarshalYAML() (interface{}, error) {
+ return marshalYAML(_SecurityDefinition(s), s.Extensions)
+}
+
+// MarshalYAML returns value which marshaled in place of the original value
+func (t Tag) MarshalYAML() (interface{}, error) {
+ return marshalYAML(_Tag(t), t.Extensions)
+}
diff --git a/vendor/goa.design/goa/http/codegen/openapi/openapi_v2_builder.go b/vendor/goa.design/goa/http/codegen/openapi/openapi_v2_builder.go
new file mode 100644
index 000000000..eb27fca9e
--- /dev/null
+++ b/vendor/goa.design/goa/http/codegen/openapi/openapi_v2_builder.go
@@ -0,0 +1,863 @@
+package openapi
+
+// New creates a OpenAPI spec from a HTTP root expression.
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "net/url"
+ "sort"
+ "strconv"
+ "strings"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+// NewV2 returns the OpenAPI v2 specification for the given API.
+func NewV2(root *expr.RootExpr, h *expr.HostExpr) (*V2, error) {
+ if root == nil {
+ return nil, nil
+ }
+ tags := tagsFromExpr(root.Meta)
+ u, err := url.Parse(string(h.URIs[0]))
+ if err != nil {
+ return nil, fmt.Errorf("failed to parse server URL: %s", err)
+ }
+ host := u.Host
+
+ basePath := root.API.HTTP.Path
+ if hasAbsoluteRoutes(root) {
+ basePath = ""
+ }
+ params, err := paramsFromExpr(root.API.HTTP.Params, basePath)
+ if err != nil {
+ return nil, err
+ }
+ var paramMap map[string]*Parameter
+ if len(params) > 0 {
+ paramMap = make(map[string]*Parameter, len(params))
+ for _, p := range params {
+ paramMap[p.Name] = p
+ }
+ }
+ s := &V2{
+ Swagger: "2.0",
+ Info: &Info{
+ Title: root.API.Title,
+ Description: root.API.Description,
+ TermsOfService: root.API.TermsOfService,
+ Contact: root.API.Contact,
+ License: root.API.License,
+ Version: root.API.Version,
+ Extensions: ExtensionsFromExpr(root.Meta),
+ },
+ Host: host,
+ BasePath: basePath,
+ Paths: make(map[string]interface{}),
+ Consumes: root.API.HTTP.Consumes,
+ Produces: root.API.HTTP.Produces,
+ Parameters: paramMap,
+ Tags: tags,
+ SecurityDefinitions: securitySpecFromExpr(root),
+ ExternalDocs: docsFromExpr(root.API.Docs),
+ }
+
+ for _, he := range root.API.HTTP.Errors {
+ res, err := responseSpecFromExpr(s, root, he.Response, "")
+ if err != nil {
+ return nil, err
+ }
+ if s.Responses == nil {
+ s.Responses = make(map[string]*Response)
+ }
+ s.Responses[he.Name] = res
+ }
+
+ for _, res := range root.API.HTTP.Services {
+ if !mustGenerate(res.Meta) || !mustGenerate(res.ServiceExpr.Meta) {
+ continue
+ }
+ for k, v := range ExtensionsFromExpr(res.Meta) {
+ s.Paths[k] = v
+ }
+ for _, fs := range res.FileServers {
+ if !mustGenerate(fs.Meta) || !mustGenerate(fs.Service.Meta) {
+ continue
+ }
+ if err := buildPathFromFileServer(s, root, fs); err != nil {
+ return nil, err
+ }
+ }
+ for _, a := range res.HTTPEndpoints {
+ if !mustGenerate(a.Meta) || !mustGenerate(a.MethodExpr.Meta) {
+ continue
+ }
+ for _, route := range a.Routes {
+ if err := buildPathFromExpr(s, root, h, route, basePath); err != nil {
+ return nil, err
+ }
+ }
+ }
+ }
+ if err != nil {
+ return nil, err
+ }
+ if len(Definitions) > 0 {
+ s.Definitions = make(map[string]*Schema)
+ for n, d := range Definitions {
+ // sad but swagger doesn't support these
+ d.Media = nil
+ d.Links = nil
+ s.Definitions[n] = d
+ }
+ }
+ return s, nil
+}
+
+// ExtensionsFromExpr generates swagger extensions from the given meta
+// expression.
+func ExtensionsFromExpr(mdata expr.MetaExpr) map[string]interface{} {
+ extensions := make(map[string]interface{})
+ for key, value := range mdata {
+ chunks := strings.Split(key, ":")
+ if len(chunks) != 3 {
+ continue
+ }
+ if chunks[0] != "swagger" || chunks[1] != "extension" {
+ continue
+ }
+ if strings.HasPrefix(chunks[2], "x-") != true {
+ continue
+ }
+ val := value[0]
+ ival := interface{}(val)
+ if err := json.Unmarshal([]byte(val), &ival); err != nil {
+ extensions[chunks[2]] = val
+ continue
+ }
+ extensions[chunks[2]] = ival
+ }
+ if len(extensions) == 0 {
+ return nil
+ }
+ return extensions
+}
+
+// mustGenerate returns true if the meta indicates that a OpenAPI specification should be
+// generated, false otherwise.
+func mustGenerate(meta expr.MetaExpr) bool {
+ if m, ok := meta["swagger:generate"]; ok {
+ if len(m) > 0 && m[0] == "false" {
+ return false
+ }
+ }
+ return true
+}
+
+// securitySpecFromExpr generates the OpenAPI security definitions from the
+// security design.
+func securitySpecFromExpr(root *expr.RootExpr) map[string]*SecurityDefinition {
+ sds := make(map[string]*SecurityDefinition)
+ for _, s := range root.Schemes {
+ sd := SecurityDefinition{
+ Description: s.Description,
+ Extensions: ExtensionsFromExpr(s.Meta),
+ }
+ switch s.Kind {
+ case expr.BasicAuthKind:
+ sd.Type = "basic"
+ case expr.APIKeyKind:
+ sd.Type = "apiKey"
+ sd.In = s.In
+ sd.Name = s.Name
+ case expr.JWTKind:
+ sd.Type = "apiKey"
+ // OpenAPI V2 spec does not support JWT scheme. Hence we add the scheme
+ // information to the description.
+ lines := []string{}
+ for _, scope := range s.Scopes {
+ lines = append(lines, fmt.Sprintf(" * `%s`: %s", scope.Name, scope.Description))
+ }
+ sd.In = s.In
+ sd.Name = s.Name
+ sd.Description += fmt.Sprintf("\n**Security Scopes**:\n%s", strings.Join(lines, "\n"))
+ case expr.OAuth2Kind:
+ sd.Type = "oauth2"
+ if scopesLen := len(s.Scopes); scopesLen > 0 {
+ scopes := make(map[string]string, scopesLen)
+ for _, scope := range s.Scopes {
+ scopes[scope.Name] = scope.Description
+ }
+ sd.Scopes = scopes
+ }
+ }
+ if len(s.Flows) > 0 {
+ switch s.Flows[0].Kind {
+ case expr.AuthorizationCodeFlowKind:
+ sd.Flow = "accessCode"
+ case expr.ImplicitFlowKind:
+ sd.Flow = "implicit"
+ case expr.PasswordFlowKind:
+ sd.Flow = "password"
+ case expr.ClientCredentialsFlowKind:
+ sd.Flow = "application"
+ }
+ sd.AuthorizationURL = s.Flows[0].AuthorizationURL
+ sd.TokenURL = s.Flows[0].TokenURL
+ }
+ sds[s.SchemeName] = &sd
+ }
+ return sds
+}
+
+// hasAbsoluteRoutes returns true if any endpoint exposed by the API uses an
+// absolute route of if the API has file servers. This is needed as OpenAPI does
+// not support exceptions to the base path so if the API has any absolute route
+// the base path must be "/" and all routes must be absolutes.
+func hasAbsoluteRoutes(root *expr.RootExpr) bool {
+ hasAbsoluteRoutes := false
+ for _, res := range root.API.HTTP.Services {
+ if !mustGenerate(res.Meta) || !mustGenerate(res.ServiceExpr.Meta) {
+ continue
+ }
+ for _, fs := range res.FileServers {
+ if !mustGenerate(fs.Meta) || !mustGenerate(fs.Service.Meta) {
+ continue
+ }
+ hasAbsoluteRoutes = true
+ break
+ }
+ for _, a := range res.HTTPEndpoints {
+ if !mustGenerate(a.Meta) || !mustGenerate(a.MethodExpr.Meta) {
+ continue
+ }
+ for _, ro := range a.Routes {
+ if ro.IsAbsolute() {
+ hasAbsoluteRoutes = true
+ break
+ }
+ }
+ if hasAbsoluteRoutes {
+ break
+ }
+ }
+ if hasAbsoluteRoutes {
+ break
+ }
+ }
+ return hasAbsoluteRoutes
+}
+
+func tagsFromExpr(mdata expr.MetaExpr) (tags []*Tag) {
+ var keys []string
+ for k := range mdata {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+ for _, key := range keys {
+ chunks := strings.Split(key, ":")
+ if len(chunks) != 3 {
+ continue
+ }
+ if chunks[0] != "swagger" || chunks[1] != "tag" {
+ continue
+ }
+
+ tag := &Tag{Name: chunks[2]}
+
+ mdata[key] = mdata[fmt.Sprintf("%s:desc", key)]
+ if len(mdata[key]) != 0 {
+ tag.Description = mdata[key][0]
+ }
+
+ hasDocs := false
+ docs := &ExternalDocs{}
+
+ mdata[key] = mdata[fmt.Sprintf("%s:url", key)]
+ if len(mdata[key]) != 0 {
+ docs.URL = mdata[key][0]
+ hasDocs = true
+ }
+
+ mdata[key] = mdata[fmt.Sprintf("%s:url:desc", key)]
+ if len(mdata[key]) != 0 {
+ docs.Description = mdata[key][0]
+ hasDocs = true
+ }
+
+ if hasDocs {
+ tag.ExternalDocs = docs
+ }
+
+ tag.Extensions = ExtensionsFromExpr(mdata)
+
+ tags = append(tags, tag)
+ }
+
+ return
+}
+
+func tagNamesFromExpr(mdatas ...expr.MetaExpr) (tagNames []string) {
+ for _, mdata := range mdatas {
+ tags := tagsFromExpr(mdata)
+ for _, tag := range tags {
+ tagNames = append(tagNames, tag.Name)
+ }
+ }
+ return
+}
+
+func summaryFromExpr(name string, e *expr.HTTPEndpointExpr) string {
+ for n, mdata := range e.Meta {
+ if n == "swagger:summary" && len(mdata) > 0 {
+ return mdata[0]
+ }
+ }
+ for n, mdata := range e.MethodExpr.Meta {
+ if n == "swagger:summary" && len(mdata) > 0 {
+ return mdata[0]
+ }
+ }
+ return name
+}
+
+func summaryFromMeta(name string, meta expr.MetaExpr) string {
+ for n, mdata := range meta {
+ if n == "swagger:summary" && len(mdata) > 0 {
+ return mdata[0]
+ }
+ }
+ return name
+}
+
+func paramsFromExpr(params *expr.MappedAttributeExpr, path string) ([]*Parameter, error) {
+ if params == nil {
+ return nil, nil
+ }
+ var (
+ res []*Parameter
+ wildcards = expr.ExtractHTTPWildcards(path)
+ i = 0
+ )
+ codegen.WalkMappedAttr(params, func(n, pn string, required bool, at *expr.AttributeExpr) error {
+ in := "query"
+ for _, w := range wildcards {
+ if n == w {
+ in = "path"
+ required = true
+ break
+ }
+ }
+ param := paramFor(at, pn, in, required)
+ res = append(res, param)
+ i++
+ return nil
+ })
+ return res, nil
+}
+
+func paramsFromHeaders(endpoint *expr.HTTPEndpointExpr) []*Parameter {
+ params := []*Parameter{}
+ var (
+ rma = endpoint.Service.Params
+ ma = endpoint.Headers
+
+ merged *expr.MappedAttributeExpr
+ )
+ {
+ if rma == nil {
+ merged = ma
+ } else if ma == nil {
+ merged = rma
+ } else {
+ merged = expr.DupMappedAtt(rma)
+ merged.Merge(ma)
+ }
+ }
+
+ for _, n := range *expr.AsObject(merged.Type) {
+ header := n.Attribute
+ required := merged.IsRequiredNoDefault(n.Name)
+ p := paramFor(header, merged.ElemName(n.Name), "header", required)
+ params = append(params, p)
+ }
+ return params
+}
+
+func paramFor(at *expr.AttributeExpr, name, in string, required bool) *Parameter {
+ p := &Parameter{
+ In: in,
+ Name: name,
+ Default: toStringMap(at.DefaultValue),
+ Description: at.Description,
+ Required: required,
+ Type: at.Type.Name(),
+ }
+ if expr.IsArray(at.Type) {
+ p.Items = itemsFromExpr(expr.AsArray(at.Type).ElemType)
+ p.CollectionFormat = "multi"
+ }
+ switch at.Type {
+ case expr.Int, expr.UInt, expr.UInt32, expr.UInt64:
+ p.Type = "integer"
+ case expr.Int32, expr.Int64:
+ p.Type = "integer"
+ p.Format = at.Type.Name()
+ case expr.Float32:
+ p.Type = "number"
+ p.Format = "float"
+ case expr.Float64:
+ p.Type = "number"
+ p.Format = "double"
+ case expr.Bytes:
+ p.Type = "string"
+ p.Format = "byte"
+ }
+ p.Extensions = ExtensionsFromExpr(at.Meta)
+ initValidations(at, p)
+ return p
+}
+
+func itemsFromExpr(at *expr.AttributeExpr) *Items {
+ items := &Items{Type: at.Type.Name()}
+ initValidations(at, items)
+ if expr.IsArray(at.Type) {
+ items.Items = itemsFromExpr(expr.AsArray(at.Type).ElemType)
+ }
+ return items
+}
+
+func responseSpecFromExpr(s *V2, root *expr.RootExpr, r *expr.HTTPResponseExpr, typeNamePrefix string) (*Response, error) {
+ var schema *Schema
+ if mt, ok := r.Body.Type.(*expr.ResultTypeExpr); ok {
+ view := expr.DefaultView
+ if v, ok := r.Body.Meta["view"]; ok {
+ view = v[0]
+ }
+ schema = NewSchema()
+ schema.Ref = ResultTypeRefWithPrefix(root.API, mt, view, typeNamePrefix)
+ } else if r.Body.Type != expr.Empty {
+ schema = AttributeTypeSchemaWithPrefix(root.API, r.Body, typeNamePrefix)
+ }
+ headers, err := headersFromExpr(r.Headers)
+ if err != nil {
+ return nil, err
+ }
+ desc := r.Description
+ if desc == "" {
+ desc = fmt.Sprintf("%s response.", http.StatusText(r.StatusCode))
+ }
+ return &Response{
+ Description: desc,
+ Schema: schema,
+ Headers: headers,
+ Extensions: ExtensionsFromExpr(r.Meta),
+ }, nil
+}
+
+func headersFromExpr(headers *expr.MappedAttributeExpr) (map[string]*Header, error) {
+ if headers == nil {
+ return nil, nil
+ }
+ obj := expr.AsObject(headers.Type)
+ if obj == nil {
+ return nil, fmt.Errorf("invalid headers definition, not an object")
+ }
+ res := make(map[string]*Header)
+ codegen.WalkMappedAttr(headers, func(_, n string, required bool, at *expr.AttributeExpr) error {
+ header := &Header{
+ Default: at.DefaultValue,
+ Description: at.Description,
+ Type: at.Type.Name(),
+ }
+ initValidations(at, header)
+ res[n] = header
+ return nil
+ })
+ if len(res) == 0 {
+ return nil, nil
+ }
+ return res, nil
+}
+
+func buildPathFromFileServer(s *V2, root *expr.RootExpr, fs *expr.HTTPFileServerExpr) error {
+ for _, path := range fs.RequestPaths {
+ wcs := expr.ExtractHTTPWildcards(path)
+ var param []*Parameter
+ if len(wcs) > 0 {
+ param = []*Parameter{{
+ In: "path",
+ Name: wcs[0],
+ Description: "Relative file path",
+ Required: true,
+ Type: "string",
+ }}
+ }
+
+ responses := map[string]*Response{
+ "200": {
+ Description: "File downloaded",
+ Schema: &Schema{Type: File},
+ },
+ }
+ if len(wcs) > 0 {
+ schema := TypeSchema(root.API, expr.ErrorResult)
+ responses["404"] = &Response{Description: "File not found", Schema: schema}
+ }
+
+ operationID := fmt.Sprintf("%s#%s", fs.Service.Name(), path)
+ schemes := root.API.Schemes()
+ // remove grpc and grpcs from schemes since it is not a valid scheme in
+ // openapi.
+ for i := len(schemes) - 1; i >= 0; i-- {
+ if schemes[i] == "grpc" || schemes[i] == "grpcs" {
+ schemes = append(schemes[:i], schemes[i+1:]...)
+ }
+ }
+
+ operation := &Operation{
+ Description: fs.Description,
+ Summary: summaryFromMeta(fmt.Sprintf("Download %s", fs.FilePath), fs.Meta),
+ ExternalDocs: docsFromExpr(fs.Docs),
+ OperationID: operationID,
+ Parameters: param,
+ Responses: responses,
+ Schemes: schemes,
+ }
+
+ key := expr.HTTPWildcardRegex.ReplaceAllStringFunc(
+ path,
+ func(w string) string {
+ return fmt.Sprintf("/{%s}", w[2:])
+ },
+ )
+ if key == "" {
+ key = "/"
+ }
+ var path interface{}
+ var ok bool
+ if path, ok = s.Paths[key]; !ok {
+ path = new(Path)
+ s.Paths[key] = path
+ }
+ p := path.(*Path)
+ p.Get = operation
+ p.Extensions = ExtensionsFromExpr(fs.Meta)
+ }
+
+ return nil
+}
+
+func buildPathFromExpr(s *V2, root *expr.RootExpr, h *expr.HostExpr, route *expr.RouteExpr, basePath string) error {
+ endpoint := route.Endpoint
+
+ tagNames := tagNamesFromExpr(endpoint.Service.Meta, endpoint.Meta)
+ if len(tagNames) == 0 {
+ // By default tag with service name
+ tagNames = []string{route.Endpoint.Service.Name()}
+ }
+ for _, key := range route.FullPaths() {
+ params, err := paramsFromExpr(endpoint.Params, key)
+ if err != nil {
+ return err
+ }
+ params = append(params, paramsFromHeaders(endpoint)...)
+ produces := []string{}
+ responses := make(map[string]*Response, len(endpoint.Responses))
+ for _, r := range endpoint.Responses {
+ if endpoint.MethodExpr.IsStreaming() {
+ // A streaming endpoint allows at most one successful response
+ // definition. So it is okay to change the first successful
+ // response to a HTTP 101 response for openapi docs.
+ if _, ok := responses[strconv.Itoa(expr.StatusSwitchingProtocols)]; !ok {
+ r = r.Dup()
+ r.StatusCode = expr.StatusSwitchingProtocols
+ }
+ }
+ resp, err := responseSpecFromExpr(s, root, r, endpoint.Service.Name())
+ if err != nil {
+ return err
+ }
+ responses[strconv.Itoa(r.StatusCode)] = resp
+ if r.ContentType != "" {
+ foundCT := false
+ for _, ct := range produces {
+ if ct == r.ContentType {
+ foundCT = true
+ break
+ }
+ }
+ if !foundCT {
+ produces = append(produces, r.ContentType)
+ }
+ }
+ }
+ for _, er := range endpoint.HTTPErrors {
+ resp, err := responseSpecFromExpr(s, root, er.Response, endpoint.Service.Name())
+ if err != nil {
+ return err
+ }
+ responses[strconv.Itoa(er.Response.StatusCode)] = resp
+ }
+
+ if endpoint.Body.Type != expr.Empty {
+ pp := &Parameter{
+ Name: endpoint.Body.Type.Name(),
+ In: "body",
+ Description: endpoint.Body.Description,
+ Required: true,
+ Schema: AttributeTypeSchemaWithPrefix(root.API, endpoint.Body, codegen.Goify(endpoint.Service.Name(), true)),
+ }
+ params = append(params, pp)
+ }
+
+ operationID := fmt.Sprintf("%s#%s", endpoint.Service.Name(), endpoint.Name())
+ index := 0
+ for i, rt := range endpoint.Routes {
+ if rt == route {
+ index = i
+ break
+ }
+ }
+ if index > 0 {
+ operationID = fmt.Sprintf("%s#%d", operationID, index)
+ }
+
+ schemes := h.Schemes()
+ // remove grpc and grpcs from schemes since it is not a valid scheme in
+ // openapi.
+ for i := len(schemes) - 1; i >= 0; i-- {
+ if schemes[i] == "grpc" || schemes[i] == "grpcs" {
+ schemes = append(schemes[:i], schemes[i+1:]...)
+ }
+ }
+
+ // replace http with ws for streaming endpoints
+ if endpoint.MethodExpr.IsStreaming() {
+ for i := len(schemes) - 1; i >= 0; i-- {
+ if schemes[i] == "http" {
+ news := append([]string{"ws"}, schemes[i+1:]...)
+ schemes = append(schemes[:i], news...)
+ }
+ if schemes[i] == "https" {
+ news := append([]string{"wss"}, schemes[i+1:]...)
+ schemes = append(schemes[:i], news...)
+ }
+ }
+ }
+
+ description := endpoint.Description()
+
+ reqs := endpoint.MethodExpr.Requirements
+ requirements := make([]map[string][]string, len(reqs))
+ for i, req := range reqs {
+ requirement := make(map[string][]string)
+ for _, s := range req.Schemes {
+ requirement[s.SchemeName] = []string{}
+ switch s.Kind {
+ case expr.OAuth2Kind:
+ for _, scope := range req.Scopes {
+ requirement[s.SchemeName] = append(requirement[s.SchemeName], scope)
+ }
+ case expr.JWTKind:
+ lines := make([]string, 0, len(req.Scopes))
+ for _, scope := range req.Scopes {
+ lines = append(lines, fmt.Sprintf(" * `%s`", scope))
+ }
+ if description != "" {
+ description += "\n"
+ }
+ description += fmt.Sprintf("\nRequired security scopes:\n%s", strings.Join(lines, "\n"))
+ }
+ }
+ requirements[i] = requirement
+ }
+
+ operation := &Operation{
+ Tags: tagNames,
+ Description: description,
+ Summary: summaryFromExpr(endpoint.Name()+" "+endpoint.Service.Name(), endpoint),
+ ExternalDocs: docsFromExpr(endpoint.MethodExpr.Docs),
+ OperationID: operationID,
+ Parameters: params,
+ Produces: produces,
+ Responses: responses,
+ Schemes: schemes,
+ Deprecated: false,
+ Extensions: ExtensionsFromExpr(route.Meta),
+ Security: requirements,
+ }
+
+ if key == "" {
+ key = "/"
+ }
+ bp := expr.HTTPWildcardRegex.ReplaceAllStringFunc(
+ basePath,
+ func(w string) string {
+ return fmt.Sprintf("/{%s}", w[2:])
+ },
+ )
+ if bp != "/" {
+ key = strings.TrimPrefix(key, bp)
+ }
+ var path interface{}
+ var ok bool
+ if path, ok = s.Paths[key]; !ok {
+ path = new(Path)
+ s.Paths[key] = path
+ }
+ p := path.(*Path)
+ switch route.Method {
+ case "GET":
+ p.Get = operation
+ case "PUT":
+ p.Put = operation
+ case "POST":
+ p.Post = operation
+ case "DELETE":
+ p.Delete = operation
+ case "OPTIONS":
+ p.Options = operation
+ case "HEAD":
+ p.Head = operation
+ case "PATCH":
+ p.Patch = operation
+ }
+ p.Extensions = ExtensionsFromExpr(route.Endpoint.Meta)
+ }
+ return nil
+}
+
+func scopesList(scopes []string) string {
+ sort.Strings(scopes)
+
+ var lines []string
+ for _, scope := range scopes {
+ lines = append(lines, fmt.Sprintf(" * `%s`", scope))
+ }
+ return strings.Join(lines, "\n")
+}
+
+func docsFromExpr(docs *expr.DocsExpr) *ExternalDocs {
+ if docs == nil {
+ return nil
+ }
+ return &ExternalDocs{
+ Description: docs.Description,
+ URL: docs.URL,
+ }
+}
+
+func initEnumValidation(def interface{}, values []interface{}) {
+ switch actual := def.(type) {
+ case *Parameter:
+ actual.Enum = values
+ case *Header:
+ actual.Enum = values
+ case *Items:
+ actual.Enum = values
+ }
+}
+
+func initFormatValidation(def interface{}, format string) {
+ switch actual := def.(type) {
+ case *Parameter:
+ actual.Format = format
+ case *Header:
+ actual.Format = format
+ case *Items:
+ actual.Format = format
+ }
+}
+
+func initPatternValidation(def interface{}, pattern string) {
+ switch actual := def.(type) {
+ case *Parameter:
+ actual.Pattern = pattern
+ case *Header:
+ actual.Pattern = pattern
+ case *Items:
+ actual.Pattern = pattern
+ }
+}
+
+func initMinimumValidation(def interface{}, min *float64) {
+ switch actual := def.(type) {
+ case *Parameter:
+ actual.Minimum = min
+ actual.ExclusiveMinimum = false
+ case *Header:
+ actual.Minimum = min
+ actual.ExclusiveMinimum = false
+ case *Items:
+ actual.Minimum = min
+ actual.ExclusiveMinimum = false
+ }
+}
+
+func initMaximumValidation(def interface{}, max *float64) {
+ switch actual := def.(type) {
+ case *Parameter:
+ actual.Maximum = max
+ actual.ExclusiveMaximum = false
+ case *Header:
+ actual.Maximum = max
+ actual.ExclusiveMaximum = false
+ case *Items:
+ actual.Maximum = max
+ actual.ExclusiveMaximum = false
+ }
+}
+
+func initMinLengthValidation(def interface{}, isArray bool, min *int) {
+ switch actual := def.(type) {
+ case *Parameter:
+ if isArray {
+ actual.MinItems = min
+ } else {
+ actual.MinLength = min
+ }
+ case *Header:
+ actual.MinLength = min
+ case *Items:
+ actual.MinLength = min
+ }
+}
+
+func initMaxLengthValidation(def interface{}, isArray bool, max *int) {
+ switch actual := def.(type) {
+ case *Parameter:
+ if isArray {
+ actual.MaxItems = max
+ } else {
+ actual.MaxLength = max
+ }
+ case *Header:
+ actual.MaxLength = max
+ case *Items:
+ actual.MaxLength = max
+ }
+}
+
+func initValidations(attr *expr.AttributeExpr, def interface{}) {
+ val := attr.Validation
+ if val == nil {
+ return
+ }
+ initEnumValidation(def, val.Values)
+ initFormatValidation(def, string(val.Format))
+ initPatternValidation(def, val.Pattern)
+ if val.Minimum != nil {
+ initMinimumValidation(def, val.Minimum)
+ }
+ if val.Maximum != nil {
+ initMaximumValidation(def, val.Maximum)
+ }
+ if val.MinLength != nil {
+ initMinLengthValidation(def, expr.IsArray(attr.Type), val.MinLength)
+ }
+ if val.MaxLength != nil {
+ initMaxLengthValidation(def, expr.IsArray(attr.Type), val.MaxLength)
+ }
+}
diff --git a/vendor/goa.design/goa/http/codegen/paths.go b/vendor/goa.design/goa/http/codegen/paths.go
new file mode 100644
index 000000000..0eb05893a
--- /dev/null
+++ b/vendor/goa.design/goa/http/codegen/paths.go
@@ -0,0 +1,64 @@
+package codegen
+
+import (
+ "fmt"
+ "path/filepath"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+// PathFiles returns the service path files.
+func PathFiles(root *expr.RootExpr) []*codegen.File {
+ fw := make([]*codegen.File, 2*len(root.API.HTTP.Services))
+ for i := 0; i < len(root.API.HTTP.Services); i++ {
+ fw[i*2] = serverPath(root.API.HTTP.Services[i])
+ fw[i*2+1] = clientPath(root.API.HTTP.Services[i])
+ }
+ return fw
+}
+
+// serverPath returns the server file containing the request path constructors
+// for the given service.
+func serverPath(svc *expr.HTTPServiceExpr) *codegen.File {
+ path := filepath.Join(codegen.Gendir, "http", codegen.SnakeCase(svc.Name()), "server", "paths.go")
+ return &codegen.File{Path: path, SectionTemplates: pathSections(svc, "server")}
+}
+
+// clientPath returns the client file containing the request path constructors
+// for the given service.
+func clientPath(svc *expr.HTTPServiceExpr) *codegen.File {
+ path := filepath.Join(codegen.Gendir, "http", codegen.SnakeCase(svc.Name()), "client", "paths.go")
+ return &codegen.File{Path: path, SectionTemplates: pathSections(svc, "client")}
+}
+
+// pathSections returns the sections of the file of the pkg package that
+// contains the request path constructors for the given service.
+func pathSections(svc *expr.HTTPServiceExpr, pkg string) []*codegen.SectionTemplate {
+ title := fmt.Sprintf("HTTP request path constructors for the %s service.", svc.Name())
+ sections := []*codegen.SectionTemplate{
+ codegen.Header(title, pkg, []*codegen.ImportSpec{
+ {Path: "fmt"},
+ {Path: "net/url"},
+ {Path: "strconv"},
+ {Path: "strings"},
+ }),
+ }
+ sdata := HTTPServices.Get(svc.Name())
+ for _, e := range svc.HTTPEndpoints {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "path",
+ Source: pathT,
+ Data: sdata.Endpoint(e.Name()),
+ })
+ }
+
+ return sections
+}
+
+// input: EndpointData
+const pathT = `{{ range .Routes }}// {{ .PathInit.Description }}
+func {{ .PathInit.Name }}({{ range .PathInit.ServerArgs }}{{ .Name }} {{ .TypeRef }}, {{ end }}) {{ .PathInit.ReturnTypeRef }} {
+{{- .PathInit.ServerCode }}
+}
+{{ end }}`
diff --git a/vendor/goa.design/goa/http/codegen/server.go b/vendor/goa.design/goa/http/codegen/server.go
new file mode 100644
index 000000000..13e263256
--- /dev/null
+++ b/vendor/goa.design/goa/http/codegen/server.go
@@ -0,0 +1,1257 @@
+package codegen
+
+import (
+ "fmt"
+ "path/filepath"
+ "reflect"
+ "strings"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/codegen/service"
+ "goa.design/goa/expr"
+)
+
+// ServerFiles returns all the server HTTP transport files.
+func ServerFiles(genpkg string, root *expr.RootExpr) []*codegen.File {
+ fw := make([]*codegen.File, 2*len(root.API.HTTP.Services))
+ for i, svc := range root.API.HTTP.Services {
+ fw[i] = serverFile(genpkg, svc)
+ }
+ for i, r := range root.API.HTTP.Services {
+ fw[i+len(root.API.HTTP.Services)] = serverEncodeDecode(genpkg, r)
+ }
+ return fw
+}
+
+// server returns the file implementing the HTTP server.
+func serverFile(genpkg string, svc *expr.HTTPServiceExpr) *codegen.File {
+ path := filepath.Join(codegen.Gendir, "http", codegen.SnakeCase(svc.Name()), "server", "server.go")
+ data := HTTPServices.Get(svc.Name())
+ title := fmt.Sprintf("%s HTTP server", svc.Name())
+ funcs := map[string]interface{}{
+ "join": func(ss []string, s string) string { return strings.Join(ss, s) },
+ "streamingEndpointExists": streamingEndpointExists,
+ "upgradeParams": upgradeParams,
+ "viewedServerBody": viewedServerBody,
+ }
+ sections := []*codegen.SectionTemplate{
+ codegen.Header(title, "server", []*codegen.ImportSpec{
+ {Path: "context"},
+ {Path: "fmt"},
+ {Path: "io"},
+ {Path: "mime/multipart"},
+ {Path: "net/http"},
+ {Path: "path"},
+ {Path: "strings"},
+ {Path: "sync"},
+ {Path: "time"},
+ {Path: "github.com/gorilla/websocket"},
+ {Path: "goa.design/goa", Name: "goa"},
+ {Path: "goa.design/goa/http", Name: "goahttp"},
+ {Path: genpkg + "/" + codegen.SnakeCase(svc.Name()), Name: data.Service.PkgName},
+ {Path: genpkg + "/" + codegen.SnakeCase(svc.Name()) + "/" + "views", Name: data.Service.ViewsPkg},
+ }),
+ }
+
+ sections = append(sections, &codegen.SectionTemplate{Name: "server-struct", Source: serverStructT, Data: data})
+ sections = append(sections, &codegen.SectionTemplate{Name: "server-mountpoint", Source: mountPointStructT, Data: data})
+
+ for _, e := range data.Endpoints {
+ if e.MultipartRequestDecoder != nil {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "multipart-request-decoder-type",
+ Source: multipartRequestDecoderTypeT,
+ Data: e.MultipartRequestDecoder,
+ })
+ }
+ if e.ServerStream != nil {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "server-stream-struct-type",
+ Source: streamStructTypeT,
+ Data: e.ServerStream,
+ })
+ }
+ }
+
+ sections = append(sections, &codegen.SectionTemplate{Name: "server-init", Source: serverInitT, Data: data, FuncMap: funcs})
+ sections = append(sections, &codegen.SectionTemplate{Name: "server-service", Source: serverServiceT, Data: data})
+ sections = append(sections, &codegen.SectionTemplate{Name: "server-use", Source: serverUseT, Data: data})
+ sections = append(sections, &codegen.SectionTemplate{Name: "server-mount", Source: serverMountT, Data: data})
+
+ for _, e := range data.Endpoints {
+ sections = append(sections, &codegen.SectionTemplate{Name: "server-handler", Source: serverHandlerT, Data: e})
+ sections = append(sections, &codegen.SectionTemplate{Name: "server-handler-init", Source: serverHandlerInitT, Data: e})
+ }
+ for _, s := range data.FileServers {
+ sections = append(sections, &codegen.SectionTemplate{Name: "server-files", Source: fileServerT, FuncMap: funcs, Data: s})
+ }
+ for _, e := range data.Endpoints {
+ if e.ServerStream != nil {
+ if e.ServerStream.SendTypeRef != "" {
+ sections = append(sections, &codegen.SectionTemplate{Name: "server-stream-send", Source: streamSendT, Data: e.ServerStream, FuncMap: funcs})
+ }
+ switch e.ServerStream.Kind {
+ case expr.ClientStreamKind, expr.BidirectionalStreamKind:
+ sections = append(sections, &codegen.SectionTemplate{Name: "server-stream-recv", Source: streamRecvT, Data: e.ServerStream, FuncMap: funcs})
+ }
+ if e.ServerStream.MustClose {
+ sections = append(sections, &codegen.SectionTemplate{Name: "server-stream-close", Source: streamCloseT, Data: e.ServerStream, FuncMap: funcs})
+ }
+ if e.Method.ViewedResult != nil && e.Method.ViewedResult.ViewName == "" {
+ sections = append(sections, &codegen.SectionTemplate{Name: "server-stream-set-view", Source: streamSetViewT, Data: e.ServerStream})
+ }
+ }
+ }
+
+ return &codegen.File{Path: path, SectionTemplates: sections}
+}
+
+// serverEncodeDecode returns the file defining the HTTP server encoding and
+// decoding logic.
+func serverEncodeDecode(genpkg string, svc *expr.HTTPServiceExpr) *codegen.File {
+ path := filepath.Join(codegen.Gendir, "http", codegen.SnakeCase(svc.Name()), "server", "encode_decode.go")
+ data := HTTPServices.Get(svc.Name())
+ title := fmt.Sprintf("%s HTTP server encoders and decoders", svc.Name())
+ sections := []*codegen.SectionTemplate{
+ codegen.Header(title, "server", []*codegen.ImportSpec{
+ {Path: "context"},
+ {Path: "fmt"},
+ {Path: "io"},
+ {Path: "net/http"},
+ {Path: "strconv"},
+ {Path: "strings"},
+ {Path: "encoding/json"},
+ {Path: "mime/multipart"},
+ {Path: "unicode/utf8"},
+ {Path: "goa.design/goa", Name: "goa"},
+ {Path: "goa.design/goa/http", Name: "goahttp"},
+ {Path: genpkg + "/" + codegen.SnakeCase(svc.Name()), Name: data.Service.PkgName},
+ {Path: genpkg + "/" + codegen.SnakeCase(svc.Name()) + "/" + "views", Name: data.Service.ViewsPkg},
+ }),
+ }
+
+ for _, e := range data.Endpoints {
+ if e.ServerStream == nil {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "response-encoder",
+ FuncMap: transTmplFuncs(svc),
+ Source: responseEncoderT,
+ Data: e,
+ })
+ }
+ if e.Payload.Ref != "" {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "request-decoder",
+ Source: requestDecoderT,
+ FuncMap: transTmplFuncs(svc),
+ Data: e,
+ })
+ }
+ if e.MultipartRequestDecoder != nil {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "multipart-request-decoder",
+ Source: multipartRequestDecoderT,
+ FuncMap: transTmplFuncs(svc),
+ Data: e.MultipartRequestDecoder,
+ })
+ }
+
+ if len(e.Errors) > 0 {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "error-encoder",
+ Source: errorEncoderT,
+ FuncMap: transTmplFuncs(svc),
+ Data: e,
+ })
+ }
+ }
+ for _, h := range data.ServerTransformHelpers {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "server-transform-helper",
+ Source: transformHelperT,
+ Data: h,
+ })
+ }
+
+ return &codegen.File{Path: path, SectionTemplates: sections}
+}
+
+// streamingEndpointExists returns true if at least one of the endpoints in
+// the service defines a streaming payload or result.
+func streamingEndpointExists(sd *ServiceData) bool {
+ for _, e := range sd.Endpoints {
+ if e.ServerStream != nil || e.ClientStream != nil {
+ return true
+ }
+ }
+ return false
+}
+
+func transTmplFuncs(s *expr.HTTPServiceExpr) map[string]interface{} {
+ return map[string]interface{}{
+ "goTypeRef": func(dt expr.DataType) string {
+ return service.Services.Get(s.Name()).Scope.GoTypeRef(&expr.AttributeExpr{Type: dt})
+ },
+ "conversionData": conversionData,
+ "headerConversionData": headerConversionData,
+ "printValue": printValue,
+ "viewedServerBody": viewedServerBody,
+ }
+}
+
+// conversionData creates a template context suitable for executing the
+// "type_conversion" template.
+func conversionData(varName, name string, dt expr.DataType) map[string]interface{} {
+ return map[string]interface{}{
+ "VarName": varName,
+ "Name": name,
+ "Type": dt,
+ }
+}
+
+// headerConversionData produces the template data suitable for executing the
+// "header_conversion" template.
+func headerConversionData(dt expr.DataType, varName string, required bool, target string) map[string]interface{} {
+ return map[string]interface{}{
+ "Type": dt,
+ "VarName": varName,
+ "Required": required,
+ "Target": target,
+ }
+}
+
+// printValue generates the Go code for a literal string containing the given
+// value. printValue panics if the data type is not a primitive or an array.
+func printValue(dt expr.DataType, v interface{}) string {
+ switch actual := dt.(type) {
+ case *expr.Array:
+ val := reflect.ValueOf(v)
+ elems := make([]string, val.Len())
+ for i := 0; i < val.Len(); i++ {
+ elems[i] = printValue(actual.ElemType.Type, val.Index(i).Interface())
+ }
+ return strings.Join(elems, ", ")
+ case expr.Primitive:
+ return fmt.Sprintf("%v", v)
+ default:
+ panic("unsupported type value " + dt.Name()) // bug
+ }
+}
+
+// viewedServerBody returns the type data that uses the given view for
+// rendering.
+func viewedServerBody(sbd []*TypeData, view string) *TypeData {
+ for _, v := range sbd {
+ if v.View == view {
+ return v
+ }
+ }
+ panic("view not found in server body types: " + view)
+}
+
+// input: ServiceData
+const serverStructT = `{{ printf "%s lists the %s service endpoint HTTP handlers." .ServerStruct .Service.Name | comment }}
+type {{ .ServerStruct }} struct {
+ Mounts []*{{ .MountPointStruct }}
+ {{- range .Endpoints }}
+ {{ .Method.VarName }} http.Handler
+ {{- end }}
+}
+
+// ErrorNamer is an interface implemented by generated error structs that
+// exposes the name of the error as defined in the design.
+type ErrorNamer interface {
+ ErrorName() string
+}
+`
+
+// input: ServiceData
+const mountPointStructT = `{{ printf "%s holds information about the mounted endpoints." .MountPointStruct | comment }}
+type {{ .MountPointStruct }} struct {
+ {{ printf "Method is the name of the service method served by the mounted HTTP handler." | comment }}
+ Method string
+ {{ printf "Verb is the HTTP method used to match requests to the mounted handler." | comment }}
+ Verb string
+ {{ printf "Pattern is the HTTP request path pattern used to match requests to the mounted handler." | comment }}
+ Pattern string
+}
+`
+
+// input: ServiceData
+const serverInitT = `{{ printf "%s instantiates HTTP handlers for all the %s service endpoints." .ServerInit .Service.Name | comment }}
+func {{ .ServerInit }}(
+ e *{{ .Service.PkgName }}.Endpoints,
+ mux goahttp.Muxer,
+ dec func(*http.Request) goahttp.Decoder,
+ enc func(context.Context, http.ResponseWriter) goahttp.Encoder,
+ eh func(context.Context, http.ResponseWriter, error),
+ {{- if streamingEndpointExists . }}
+ up goahttp.Upgrader,
+ connConfigFn goahttp.ConnConfigureFunc,
+ {{- end }}
+ {{- range .Endpoints }}
+ {{- if .MultipartRequestDecoder }}
+ {{ .MultipartRequestDecoder.VarName }} {{ .MultipartRequestDecoder.FuncName }},
+ {{- end }}
+ {{- end }}
+) *{{ .ServerStruct }} {
+ return &{{ .ServerStruct }}{
+ Mounts: []*{{ .MountPointStruct }}{
+ {{- range $e := .Endpoints }}
+ {{- range $e.Routes }}
+ {"{{ $e.Method.VarName }}", "{{ .Verb }}", "{{ .Path }}"},
+ {{- end }}
+ {{- end }}
+ {{- range .FileServers }}
+ {{- $filepath := .FilePath }}
+ {{- range .RequestPaths }}
+ {"{{ $filepath }}", "GET", "{{ . }}"},
+ {{- end }}
+ {{- end }}
+ },
+ {{- range .Endpoints }}
+ {{ .Method.VarName }}: {{ .HandlerInit }}(e.{{ .Method.VarName }}, mux, {{ if .MultipartRequestDecoder }}{{ .MultipartRequestDecoder.InitName }}(mux, {{ .MultipartRequestDecoder.VarName }}){{ else }}dec{{ end }}, enc, eh{{ if .ServerStream }}, up, connConfigFn{{ end }}),
+ {{- end }}
+ }
+}
+`
+
+// input: ServiceData
+const serverServiceT = `{{ printf "%s returns the name of the service served." .ServerService | comment }}
+func (s *{{ .ServerStruct }}) {{ .ServerService }}() string { return "{{ .Service.Name }}" }
+`
+
+// input: ServiceData
+const serverUseT = `{{ printf "Use wraps the server handlers with the given middleware." | comment }}
+func (s *{{ .ServerStruct }}) Use(m func(http.Handler) http.Handler) {
+{{- range .Endpoints }}
+ s.{{ .Method.VarName }} = m(s.{{ .Method.VarName }})
+{{- end }}
+}
+`
+
+// input: ServiceData
+const serverMountT = `{{ printf "%s configures the mux to serve the %s endpoints." .MountServer .Service.Name | comment }}
+func {{ .MountServer }}(mux goahttp.Muxer{{ if .Endpoints }}, h *{{ .ServerStruct }}{{ end }}) {
+ {{- range .Endpoints }}
+ {{ .MountHandler }}(mux, h.{{ .Method.VarName }})
+ {{- end }}
+ {{- range .FileServers }}
+ {{- if .IsDir }}
+ {{ .MountHandler }}(mux, http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ upath := path.Clean(r.URL.Path)
+ rpath := upath
+ {{- range .RequestPaths }}
+ if strings.HasPrefix(upath, "{{ . }}") {
+ rpath = upath[{{ len . }}:]
+ }
+ {{- end }}
+ http.ServeFile(w, r, path.Join({{ printf "%q" .FilePath }}, rpath))
+ }))
+ {{- else }}
+ {{ .MountHandler }}(mux, http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ http.ServeFile(w, r, {{ printf "%q" .FilePath }})
+ }))
+ {{- end }}
+ {{- end }}
+}
+`
+
+// input: EndpointData
+const serverHandlerT = `{{ printf "%s configures the mux to serve the %q service %q endpoint." .MountHandler .ServiceName .Method.Name | comment }}
+func {{ .MountHandler }}(mux goahttp.Muxer, h http.Handler) {
+ f, ok := h.(http.HandlerFunc)
+ if !ok {
+ f = func(w http.ResponseWriter, r *http.Request) {
+ h.ServeHTTP(w, r)
+ }
+ }
+ {{- range .Routes }}
+ mux.Handle("{{ .Verb }}", "{{ .Path }}", f)
+ {{- end }}
+}
+`
+
+// input: FileServerData
+const fileServerT = `{{ printf "%s configures the mux to serve GET request made to %q." .MountHandler (join .RequestPaths ", ") | comment }}
+func {{ .MountHandler }}(mux goahttp.Muxer, h http.Handler) {
+ {{- if .IsDir }}
+ {{- range .RequestPaths }}
+ mux.Handle("GET", "{{ . }}/", h.ServeHTTP)
+ mux.Handle("GET", "{{ . }}/*{{ $.PathParam }}", h.ServeHTTP)
+ {{- end }}
+ {{- else }}
+ {{- range .RequestPaths }}
+ mux.Handle("GET", "{{ . }}", h.ServeHTTP)
+ {{- end }}
+ {{- end }}
+}
+`
+
+// input: EndpointData
+const serverHandlerInitT = `{{ printf "%s creates a HTTP handler which loads the HTTP request and calls the %q service %q endpoint." .HandlerInit .ServiceName .Method.Name | comment }}
+func {{ .HandlerInit }}(
+ endpoint goa.Endpoint,
+ mux goahttp.Muxer,
+ dec func(*http.Request) goahttp.Decoder,
+ enc func(context.Context, http.ResponseWriter) goahttp.Encoder,
+ eh func(context.Context, http.ResponseWriter, error),
+ {{- if .ServerStream }}
+ up goahttp.Upgrader,
+ connConfigFn goahttp.ConnConfigureFunc,
+ {{- end }}
+) http.Handler {
+ var (
+ {{- if .ServerStream }}
+ {{- if .Payload.Ref }}
+ decodeRequest = {{ .RequestDecoder }}(mux, dec)
+ {{- end }}
+ {{- else }}
+ {{- if .Payload.Ref }}
+ decodeRequest = {{ .RequestDecoder }}(mux, dec)
+ {{- end }}
+ encodeResponse = {{ .ResponseEncoder }}(enc)
+ {{- end }}
+ encodeError = {{ if .Errors }}{{ .ErrorEncoder }}{{ else }}goahttp.ErrorEncoder{{ end }}(enc)
+ )
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ ctx := context.WithValue(r.Context(), goahttp.AcceptTypeKey, r.Header.Get("Accept"))
+ ctx = context.WithValue(ctx, goa.MethodKey, {{ printf "%q" .Method.Name }})
+ ctx = context.WithValue(ctx, goa.ServiceKey, {{ printf "%q" .ServiceName }})
+
+ {{- if .Payload.Ref }}
+ payload, err := decodeRequest(r)
+ if err != nil {
+ if err := encodeError(ctx, w, err); err != nil {
+ eh(ctx, w, err)
+ }
+ return
+ }
+ {{- end }}
+
+ {{ if .ServerStream }}
+ v := &{{ .ServicePkgName }}.{{ .Method.ServerStream.EndpointStruct }}{
+ Stream: &{{ .ServerStream.VarName }}{
+ upgrader: up,
+ connConfigFn: connConfigFn,
+ w: w,
+ r: r,
+ },
+ {{- if .Payload.Ref }}
+ Payload: payload.({{ .Payload.Ref }}),
+ {{- end }}
+ }
+ _, err = endpoint(ctx, v)
+ {{- else }}
+ res, err := endpoint(ctx, {{ if .Payload.Ref }}payload{{ else }}nil{{ end }})
+ {{- end }}
+
+ if err != nil {
+ {{- if .ServerStream }}
+ if _, ok := err.(websocket.HandshakeError); ok {
+ return
+ }
+ {{- end }}
+ if err := encodeError(ctx, w, err); err != nil {
+ eh(ctx, w, err)
+ }
+ return
+ }
+ {{- if not .ServerStream }}
+ if err := encodeResponse(ctx, w, res); err != nil {
+ eh(ctx, w, err)
+ }
+ {{- end }}
+ })
+}
+`
+
+// input: TransformFunctionData
+const transformHelperT = `{{ printf "%s builds a value of type %s from a value of type %s." .Name .ResultTypeRef .ParamTypeRef | comment }}
+func {{ .Name }}(v {{ .ParamTypeRef }}) {{ .ResultTypeRef }} {
+ {{ .Code }}
+ return res
+}
+`
+
+// input: EndpointData
+const requestDecoderT = `{{ printf "%s returns a decoder for requests sent to the %s %s endpoint." .RequestDecoder .ServiceName .Method.Name | comment }}
+func {{ .RequestDecoder }}(mux goahttp.Muxer, decoder func(*http.Request) goahttp.Decoder) func(*http.Request) (interface{}, error) {
+ return func(r *http.Request) (interface{}, error) {
+{{- if .MultipartRequestDecoder }}
+ var payload {{ .Payload.Ref }}
+ if err := decoder(r).Decode(&payload); err != nil {
+ return nil, goa.DecodePayloadError(err.Error())
+ }
+{{- else if .Payload.Request.ServerBody }}
+ var (
+ body {{ .Payload.Request.ServerBody.VarName }}
+ err error
+ )
+ err = decoder(r).Decode(&body)
+ if err != nil {
+ if err == io.EOF {
+ return nil, goa.MissingPayloadError()
+ }
+ return nil, goa.DecodePayloadError(err.Error())
+ }
+ {{- if .Payload.Request.ServerBody.ValidateRef }}
+ {{ .Payload.Request.ServerBody.ValidateRef }}
+ if err != nil {
+ return nil, err
+ }
+ {{- end }}
+{{- end }}
+{{- if not .MultipartRequestDecoder }}
+ {{- template "request_params_headers" .Payload.Request }}
+ {{- if .Payload.Request.MustValidate }}
+ if err != nil {
+ return nil, err
+ }
+ {{- end }}
+ {{- if .Payload.Request.PayloadInit }}
+ payload := {{ .Payload.Request.PayloadInit.Name }}({{ range .Payload.Request.PayloadInit.ServerArgs }}{{ .Ref }}, {{ end }})
+ {{- else if .Payload.DecoderReturnValue }}
+ payload := {{ .Payload.DecoderReturnValue }}
+ {{- else }}
+ payload := body
+ {{- end }}
+{{- end }}
+{{- if .BasicScheme }}{{ with .BasicScheme }}
+ user, pass, {{ if or .UsernameRequired .PasswordRequired }}ok{{ else }}_{{ end }} := r.BasicAuth()
+ {{- if or .UsernameRequired .PasswordRequired}}
+ if !ok {
+ return nil, goa.MissingFieldError("Authorization", "header")
+ }
+ {{- end }}
+ payload.{{ .UsernameField }} = {{ if .UsernamePointer }}&{{ end }}user
+ payload.{{ .PasswordField }} = {{ if .PasswordPointer }}&{{ end }}pass
+{{- end }}{{ end }}
+{{- range .HeaderSchemes }}
+ {{- if not .CredRequired }}
+ if payload.{{ .CredField }} != nil {
+ {{- end }}
+ if strings.Contains({{ if .CredPointer }}*{{ end }}payload.{{ .CredField }}, " ") {
+ // Remove authorization scheme prefix (e.g. "Bearer")
+ cred := strings.SplitN({{ if .CredPointer }}*{{ end }}payload.{{ .CredField }}, " ", 2)[1]
+ payload.{{ .CredField }} = {{ if .CredPointer }}&{{ end }}cred
+ }
+ {{- if not .CredRequired }}
+ }
+ {{- end }}
+{{- end }}
+
+ return payload, nil
+ }
+}
+` + requestParamsHeadersT
+
+// input: RequestData
+const requestParamsHeadersT = `{{- define "request_params_headers" }}
+{{- if or .PathParams .QueryParams .Headers }}
+{{- if .ServerBody }}{{/* we want a newline only if there was code before */}}
+{{ end }}
+ var (
+ {{- range .PathParams }}
+ {{ .VarName }} {{ .TypeRef }}
+ {{- end }}
+ {{- range .QueryParams }}
+ {{ .VarName }} {{ .TypeRef }}
+ {{- end }}
+ {{- range .Headers }}
+ {{ .VarName }} {{ .TypeRef }}
+ {{- end }}
+ {{- if and .MustValidate (or (not .ServerBody) .Multipart) }}
+ err error
+ {{- end }}
+ {{- if .PathParams }}
+
+ params = mux.Vars(r)
+ {{- end }}
+ )
+
+{{- range .PathParams }}
+ {{- if and (or (eq .Type.Name "string") (eq .Type.Name "any")) }}
+ {{ .VarName }} = params["{{ .Name }}"]
+
+ {{- else }}{{/* not string and not any */}}
+ {
+ {{ .VarName }}Raw := params["{{ .Name }}"]
+ {{- template "path_conversion" . }}
+ }
+
+ {{- end }}
+ {{- if .Validate }}
+ {{ .Validate }}
+ {{- end }}
+{{- end }}
+
+{{- range .QueryParams }}
+ {{- if and (or (eq .Type.Name "string") (eq .Type.Name "any")) .Required }}
+ {{ .VarName }} = r.URL.Query().Get("{{ .Name }}")
+ if {{ .VarName }} == "" {
+ err = goa.MergeErrors(err, goa.MissingFieldError("{{ .Name }}", "query string"))
+ }
+
+ {{- else if (or (eq .Type.Name "string") (eq .Type.Name "any")) }}
+ {{ .VarName }}Raw := r.URL.Query().Get("{{ .Name }}")
+ if {{ .VarName }}Raw != "" {
+ {{ .VarName }} = {{ if and (eq .Type.Name "string") .Pointer }}&{{ end }}{{ .VarName }}Raw
+ }
+ {{- if .DefaultValue }} else {
+ {{ .VarName }} = {{ if eq .Type.Name "string" }}{{ printf "%q" .DefaultValue }}{{ else }}{{ printf "%#v" .DefaultValue }}{{ end }}
+ }
+ {{- end }}
+
+ {{- else if .StringSlice }}
+ {{ .VarName }} = r.URL.Query()["{{ .Name }}"]
+ {{- if .Required }}
+ if {{ .VarName }} == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("{{ .Name }}", "query string"))
+ }
+ {{- else if .DefaultValue }}
+ if {{ .VarName }} == nil {
+ {{ .VarName }} = {{ printf "%#v" .DefaultValue }}
+ }
+ {{- end }}
+
+ {{- else if .Slice }}
+ {
+ {{ .VarName }}Raw := r.URL.Query()["{{ .Name }}"]
+ {{- if .Required }}
+ if {{ .VarName }}Raw == nil {
+ return goa.MergeErrors(err, goa.MissingFieldError("{{ .Name }}", "query string"))
+ }
+ {{- else if .DefaultValue }}
+ if {{ .VarName }}Raw == nil {
+ {{ .VarName }} = {{ printf "%#v" .DefaultValue }}
+ }
+ {{- end }}
+
+ {{- if .DefaultValue }}else {
+ {{- else if not .Required }}
+ if {{ .VarName }}Raw != nil {
+ {{- end }}
+ {{- template "slice_conversion" . }}
+ {{- if or .DefaultValue (not .Required) }}
+ }
+ {{- end }}
+ }
+
+ {{- else if .MapStringSlice }}
+ {{ .VarName }} = r.URL.Query()
+ {{- if .Required }}
+ if len({{ .VarName }}) == 0 {
+ err = goa.MergeErrors(err, goa.MissingFieldError("{{ .Name }}", "query string"))
+ }
+ {{- else if .DefaultValue }}
+ if len({{ .VarName }}) == 0 {
+ {{ .VarName }} = {{ printf "%#v" .DefaultValue }}
+ }
+ {{- end }}
+
+ {{- else if .Map }}
+ {
+ {{ .VarName }}Raw := r.URL.Query()
+ {{- if .Required }}
+ if len({{ .VarName }}Raw) == 0 {
+ err = goa.MergeErrors(err, goa.MissingFieldError("{{ .Name }}", "query string"))
+ }
+ {{- else if .DefaultValue }}
+ if len({{ .VarName }}Raw) == 0 {
+ {{ .VarName }} = {{ printf "%#v" .DefaultValue }}
+ }
+ {{- end }}
+
+ {{- if .DefaultValue }}else {
+ {{- else if not .Required }}
+ if len({{ .VarName }}Raw) != 0 {
+ {{- end }}
+ {{- if eq .Type.ElemType.Type.Name "array" }}
+ {{- if eq .Type.ElemType.Type.ElemType.Type.Name "string" }}
+ {{- template "map_key_conversion" . }}
+ {{- else }}
+ {{- template "map_slice_conversion" . }}
+ {{- end }}
+ {{- else }}
+ {{- template "map_conversion" . }}
+ {{- end }}
+ {{- if or .DefaultValue (not .Required) }}
+ }
+ {{- end }}
+ }
+
+ {{- else if .MapQueryParams }}
+ {
+ {{ .VarName }}Raw := r.URL.Query()
+ {{- if .Required }}
+ if len({{ .VarName }}Raw) == 0 {
+ err = goa.MergeErrors(err, goa.MissingFieldError("{{ .Name }}", "query string"))
+ }
+ {{- else if .DefaultValue }}
+ if len({{ .VarName }}Raw) == 0 {
+ {{ .VarName }} = {{ printf "%#v" .DefaultValue }}
+ }
+ {{- end }}
+
+ {{- if .DefaultValue }}else {
+ {{- else if not .Required }}
+ if len({{ .VarName }}Raw) != 0 {
+ {{- end }}
+ {{- if eq .Type.ElemType.Type.Name "array" }}
+ {{- if eq .Type.ElemType.Type.ElemType.Type.Name "string" }}
+ {{- template "map_key_conversion" . }}
+ {{- else }}
+ {{- template "map_slice_conversion" . }}
+ {{- end }}
+ {{- else }}
+ {{- template "map_conversion" . }}
+ {{- end }}
+ {{- if or .DefaultValue (not .Required) }}
+ }
+ {{- end }}
+ }
+
+ {{- else }}{{/* not string, not any, not slice and not map */}}
+ {
+ {{ .VarName }}Raw := r.URL.Query().Get("{{ .Name }}")
+ {{- if .Required }}
+ if {{ .VarName }}Raw == "" {
+ err = goa.MergeErrors(err, goa.MissingFieldError("{{ .Name }}", "query string"))
+ }
+ {{- else if .DefaultValue }}
+ if {{ .VarName }}Raw == "" {
+ {{ .VarName }} = {{ printf "%#v" .DefaultValue }}
+ }
+ {{- end }}
+
+ {{- if .DefaultValue }}else {
+ {{- else if not .Required }}
+ if {{ .VarName }}Raw != "" {
+ {{- end }}
+ {{- template "type_conversion" . }}
+ {{- if or .DefaultValue (not .Required) }}
+ }
+ {{- end }}
+ }
+
+ {{- end }}
+ {{- if .Validate }}
+ {{ .Validate }}
+ {{- end }}
+{{- end }}
+
+{{- range .Headers }}
+ {{- if and (or (eq .Type.Name "string") (eq .Type.Name "any")) .Required }}
+ {{ .VarName }} = r.Header.Get("{{ .Name }}")
+ if {{ .VarName }} == "" {
+ err = goa.MergeErrors(err, goa.MissingFieldError("{{ .Name }}", "header"))
+ }
+
+ {{- else if (or (eq .Type.Name "string") (eq .Type.Name "any")) }}
+ {{ .VarName }}Raw := r.Header.Get("{{ .Name }}")
+ if {{ .VarName }}Raw != "" {
+ {{ .VarName }} = {{ if and (eq .Type.Name "string") .Pointer }}&{{ end }}{{ .VarName }}Raw
+ }
+ {{- if .DefaultValue }} else {
+ {{ .VarName }} = {{ if eq .Type.Name "string" }}{{ printf "%q" .DefaultValue }}{{ else }}{{ printf "%#v" .DefaultValue }}{{ end }}
+ }
+ {{- end }}
+
+ {{- else if .StringSlice }}
+ {{ .VarName }} = r.Header["{{ .CanonicalName }}"]
+ {{- if .Required }}
+ if {{ .VarName }} == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("{{ .Name }}", "header"))
+ }
+ {{- else if .DefaultValue }}
+ if {{ .VarName }} == nil {
+ {{ .VarName }} = {{ printf "%#v" .DefaultValue }}
+ }
+ {{- end }}
+
+ {{- else if .Slice }}
+ {
+ {{ .VarName }}Raw := r.Header["{{ .CanonicalName }}"]
+ {{ if .Required }}if {{ .VarName }}Raw == nil {
+ err = goa.MergeErrors(err, goa.MissingFieldError("{{ .Name }}", "header"))
+ }
+ {{- else if .DefaultValue }}
+ if {{ .VarName }}Raw == nil {
+ {{ .VarName }} = {{ printf "%#v" .DefaultValue }}
+ }
+ {{- end }}
+
+ {{- if .DefaultValue }}else {
+ {{- else if not .Required }}
+ if {{ .VarName }}Raw != nil {
+ {{- end }}
+ {{- template "slice_conversion" . }}
+ {{- if or .DefaultValue (not .Required) }}
+ }
+ {{- end }}
+ }
+
+ {{- else }}{{/* not string, not any and not slice */}}
+ {
+ {{ .VarName }}Raw := r.Header.Get("{{ .Name }}")
+ {{- if .Required }}
+ if {{ .VarName }}Raw == "" {
+ err = goa.MergeErrors(err, goa.MissingFieldError("{{ .Name }}", "header"))
+ }
+ {{- else if .DefaultValue }}
+ if {{ .VarName }}Raw == "" {
+ {{ .VarName }} = {{ printf "%#v" .DefaultValue }}
+ }
+ {{- end }}
+
+ {{- if .DefaultValue }}else {
+ {{- else if not .Required }}
+ if {{ .VarName }}Raw != "" {
+ {{- end }}
+ {{- template "type_conversion" . }}
+ {{- if or .DefaultValue (not .Required) }}
+ }
+ {{- end }}
+ }
+ {{- end }}
+ {{- if .Validate }}
+ {{ .Validate }}
+ {{- end }}
+{{- end }}
+{{- end }}
+{{- end }}
+
+{{- define "path_conversion" }}
+ {{- if eq .Type.Name "array" }}
+ {{ .VarName }}RawSlice := strings.Split({{ .VarName }}Raw, ",")
+ {{ .VarName }} = make({{ goTypeRef .Type }}, len({{ .VarName }}RawSlice))
+ for i, rv := range {{ .VarName }}RawSlice {
+ {{- template "slice_item_conversion" . }}
+ }
+ {{- else }}
+ {{- template "type_conversion" . }}
+ {{- end }}
+{{- end }}
+
+{{- define "slice_conversion" }}
+ {{ .VarName }} = make({{ goTypeRef .Type }}, len({{ .VarName }}Raw))
+ for i, rv := range {{ .VarName }}Raw {
+ {{- template "slice_item_conversion" . }}
+ }
+{{- end }}
+
+{{- define "map_key_conversion" }}
+ {{ .VarName }} = make({{ goTypeRef .Type }}, len({{ .VarName }}Raw))
+ for keyRaw, val := range {{ .VarName }}Raw {
+ var key {{ goTypeRef .Type.KeyType.Type }}
+ {
+ {{- template "type_conversion" (conversionData "key" (printf "%q" "query") .Type.KeyType.Type) }}
+ }
+ {{ .VarName }}[key] = val
+ }
+{{- end }}
+
+{{- define "map_slice_conversion" }}
+ {{ .VarName }} = make({{ goTypeRef .Type }}, len({{ .VarName }}Raw))
+ for key{{ if not (eq .Type.KeyType.Type.Name "string") }}Raw{{ end }}, valRaw := range {{ .VarName }}Raw {
+
+ {{- if not (eq .Type.KeyType.Type.Name "string") }}
+ var key {{ goTypeRef .Type.KeyType.Type }}
+ {
+ {{- template "type_conversion" (conversionData "key" (printf "%q" "query") .Type.KeyType.Type) }}
+ }
+ {{- end }}
+ var val {{ goTypeRef .Type.ElemType.Type }}
+ {
+ {{- template "slice_conversion" (conversionData "val" (printf "%q" "query") .Type.ElemType.Type) }}
+ }
+ {{ .VarName }}[key] = val
+ }
+{{- end }}
+
+{{- define "map_conversion" }}
+ {{ .VarName }} = make({{ goTypeRef .Type }}, len({{ .VarName }}Raw))
+ for key{{ if not (eq .Type.KeyType.Type.Name "string") }}Raw{{ end }}, va := range {{ .VarName }}Raw {
+
+ {{- if not (eq .Type.KeyType.Type.Name "string") }}
+ var key {{ goTypeRef .Type.KeyType.Type }}
+ {
+ {{- if eq .Type.KeyType.Type.Name "string" }}
+ key = keyRaw
+ {{- else }}
+ {{- template "type_conversion" (conversionData "key" (printf "%q" "query") .Type.KeyType.Type) }}
+ {{- end }}
+ }
+ {{- end }}
+ var val {{ goTypeRef .Type.ElemType.Type }}
+ {
+ {{- if eq .Type.ElemType.Type.Name "string" }}
+ val = va[0]
+ {{- else }}
+ valRaw := va[0]
+ {{- template "type_conversion" (conversionData "val" (printf "%q" "query") .Type.ElemType.Type) }}
+ {{- end }}
+ }
+ {{ .VarName }}[key] = val
+ }
+{{- end }}
+
+{{- define "type_conversion" }}
+ {{- if eq .Type.Name "bytes" }}
+ {{ .VarName }} = []byte({{.VarName}}Raw)
+ {{- else if eq .Type.Name "int" }}
+ v, err2 := strconv.ParseInt({{ .VarName }}Raw, 10, strconv.IntSize)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "integer"))
+ }
+ {{- if .Pointer }}
+ pv := int(v)
+ {{ .VarName }} = &pv
+ {{- else }}
+ {{ .VarName }} = int(v)
+ {{- end }}
+ {{- else if eq .Type.Name "int32" }}
+ v, err2 := strconv.ParseInt({{ .VarName }}Raw, 10, 32)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "integer"))
+ }
+ {{- if .Pointer }}
+ pv := int32(v)
+ {{ .VarName }} = &pv
+ {{- else }}
+ {{ .VarName }} = int32(v)
+ {{- end }}
+ {{- else if eq .Type.Name "int64" }}
+ v, err2 := strconv.ParseInt({{ .VarName }}Raw, 10, 64)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "integer"))
+ }
+ {{ .VarName }} = {{ if .Pointer}}&{{ end }}v
+ {{- else if eq .Type.Name "uint" }}
+ v, err2 := strconv.ParseUint({{ .VarName }}Raw, 10, strconv.IntSize)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "unsigned integer"))
+ }
+ {{- if .Pointer }}
+ pv := uint(v)
+ {{ .VarName }} = &pv
+ {{- else }}
+ {{ .VarName }} = uint(v)
+ {{- end }}
+ {{- else if eq .Type.Name "uint32" }}
+ v, err2 := strconv.ParseUint({{ .VarName }}Raw, 10, 32)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "unsigned integer"))
+ }
+ {{- if .Pointer }}
+ pv := uint32(v)
+ {{ .VarName }} = &pv
+ {{- else }}
+ {{ .VarName }} = uint32(v)
+ {{- end }}
+ {{- else if eq .Type.Name "uint64" }}
+ v, err2 := strconv.ParseUint({{ .VarName }}Raw, 10, 64)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "unsigned integer"))
+ }
+ {{ .VarName }} = {{ if .Pointer }}&{{ end }}v
+ {{- else if eq .Type.Name "float32" }}
+ v, err2 := strconv.ParseFloat({{ .VarName }}Raw, 32)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "float"))
+ }
+ {{- if .Pointer }}
+ pv := float32(v)
+ {{ .VarName }} = &pv
+ {{- else }}
+ {{ .VarName }} = float32(v)
+ {{- end }}
+ {{- else if eq .Type.Name "float64" }}
+ v, err2 := strconv.ParseFloat({{ .VarName }}Raw, 64)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "float"))
+ }
+ {{ .VarName }} = {{ if .Pointer }}&{{ end }}v
+ {{- else if eq .Type.Name "boolean" }}
+ v, err2 := strconv.ParseBool({{ .VarName }}Raw)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "boolean"))
+ }
+ {{ .VarName }} = {{ if .Pointer }}&{{ end }}v
+ {{- else }}
+ // unsupported type {{ .Type.Name }} for var {{ .VarName }}
+ {{- end }}
+{{- end }}
+{{- define "slice_item_conversion" }}
+ {{- if eq .Type.ElemType.Type.Name "string" }}
+ {{ .VarName }}[i] = rv
+ {{- else if eq .Type.ElemType.Type.Name "bytes" }}
+ {{ .VarName }}[i] = []byte(rv)
+ {{- else if eq .Type.ElemType.Type.Name "int" }}
+ v, err2 := strconv.ParseInt(rv, 10, strconv.IntSize)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "array of integers"))
+ }
+ {{ .VarName }}[i] = int(v)
+ {{- else if eq .Type.ElemType.Type.Name "int32" }}
+ v, err2 := strconv.ParseInt(rv, 10, 32)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "array of integers"))
+ }
+ {{ .VarName }}[i] = int32(v)
+ {{- else if eq .Type.ElemType.Type.Name "int64" }}
+ v, err2 := strconv.ParseInt(rv, 10, 64)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "array of integers"))
+ }
+ {{ .VarName }}[i] = v
+ {{- else if eq .Type.ElemType.Type.Name "uint" }}
+ v, err2 := strconv.ParseUint(rv, 10, strconv.IntSize)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "array of unsigned integers"))
+ }
+ {{ .VarName }}[i] = uint(v)
+ {{- else if eq .Type.ElemType.Type.Name "uint32" }}
+ v, err2 := strconv.ParseUint(rv, 10, 32)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "array of unsigned integers"))
+ }
+ {{ .VarName }}[i] = int32(v)
+ {{- else if eq .Type.ElemType.Type.Name "uint64" }}
+ v, err2 := strconv.ParseUint(rv, 10, 64)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "array of unsigned integers"))
+ }
+ {{ .VarName }}[i] = v
+ {{- else if eq .Type.ElemType.Type.Name "float32" }}
+ v, err2 := strconv.ParseFloat(rv, 32)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "array of floats"))
+ }
+ {{ .VarName }}[i] = float32(v)
+ {{- else if eq .Type.ElemType.Type.Name "float64" }}
+ v, err2 := strconv.ParseFloat(rv, 64)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "array of floats"))
+ }
+ {{ .VarName }}[i] = v
+ {{- else if eq .Type.ElemType.Type.Name "boolean" }}
+ v, err2 := strconv.ParseBool(rv)
+ if err2 != nil {
+ err = goa.MergeErrors(err, goa.InvalidFieldTypeError({{ printf "%q" .VarName }}, {{ .VarName}}Raw, "array of booleans"))
+ }
+ {{ .VarName }}[i] = v
+ {{- else if eq .Type.ElemType.Type.Name "any" }}
+ {{ .VarName }}[i] = rv
+ {{- else }}
+ // unsupported slice type {{ .Type.ElemType.Type.Name }} for var {{ .VarName }}
+ {{- end }}
+{{- end }}
+`
+
+// input: EndpointData
+const responseEncoderT = `{{ printf "%s returns an encoder for responses returned by the %s %s endpoint." .ResponseEncoder .ServiceName .Method.Name | comment }}
+func {{ .ResponseEncoder }}(encoder func(context.Context, http.ResponseWriter) goahttp.Encoder) func(context.Context, http.ResponseWriter, interface{}) error {
+ return func(ctx context.Context, w http.ResponseWriter, v interface{}) error {
+ {{- if .Result.MustInit }}
+ {{- if .Method.ViewedResult }}
+ res := v.({{ .Method.ViewedResult.FullRef }})
+ {{- if not .Method.ViewedResult.ViewName }}
+ w.Header().Set("goa-view", res.View)
+ {{- end }}
+ {{- else }}
+ res := v.({{ .Result.Ref }})
+ {{- end }}
+ {{- range .Result.Responses }}
+ {{- if .ContentType }}
+ ctx = context.WithValue(ctx, goahttp.ContentTypeKey, "{{ .ContentType }}")
+ {{- end }}
+ {{- if .TagName }}
+ {{- if .TagPointer }}
+ if res.{{ if .ViewedResult }}Projected.{{ end }}{{ .TagName }} != nil && *res.{{ if .ViewedResult }}Projected.{{ end }}{{ .TagName }} == {{ printf "%q" .TagValue }} {
+ {{- else }}
+ if {{ if .ViewedResult }}*{{ end }}res.{{ if .ViewedResult }}Projected.{{ end }}{{ .TagName }} == {{ printf "%q" .TagValue }} {
+ {{- end }}
+ {{- end -}}
+ {{ template "response" . }}
+ {{- if .ServerBody }}
+ return enc.Encode(body)
+ {{- else }}
+ return nil
+ {{- end }}
+ {{- if .TagName }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- else }}
+ {{- with (index .Result.Responses 0) }}
+ w.WriteHeader({{ .StatusCode }})
+ return nil
+ {{- end }}
+ {{- end }}
+ }
+}
+` + responseT
+
+// input: EndpointData
+const errorEncoderT = `{{ printf "%s returns an encoder for errors returned by the %s %s endpoint." .ErrorEncoder .Method.Name .ServiceName | comment }}
+func {{ .ErrorEncoder }}(encoder func(context.Context, http.ResponseWriter) goahttp.Encoder) func(context.Context, http.ResponseWriter, error) error {
+ encodeError := goahttp.ErrorEncoder(encoder)
+ return func(ctx context.Context, w http.ResponseWriter, v error) error {
+ en, ok := v.(ErrorNamer)
+ if !ok {
+ return encodeError(ctx, w, v)
+ }
+ switch en.ErrorName() {
+ {{- range $gerr := .Errors }}
+ {{- range $err := .Errors }}
+ case {{ printf "%q" .Name }}:
+ res := v.({{ $err.Ref }})
+ {{- with .Response}}
+ {{- template "response" . }}
+ {{- if .ServerBody }}
+ return enc.Encode(body)
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ default:
+ return encodeError(ctx, w, v)
+ }
+ }
+}
+` + responseT
+
+// input: ResponseData
+const responseT = `{{ define "response" -}}
+ {{- $servBodyLen := len .ServerBody }}
+ {{- if gt $servBodyLen 0 }}
+ enc := encoder(ctx, w)
+ {{- end }}
+ {{- if gt $servBodyLen 0 }}
+ {{- if and (gt $servBodyLen 1) $.ViewedResult }}
+ var body interface{}
+ switch res.View {
+ {{- range $.ViewedResult.Views }}
+ case {{ printf "%q" .Name }}{{ if eq .Name "default" }}, ""{{ end }}:
+ {{- $vsb := (viewedServerBody $.ServerBody .Name) }}
+ body = {{ $vsb.Init.Name }}({{ range $vsb.Init.ServerArgs }}{{ .Ref }}, {{ end }})
+ {{- end }}
+ }
+ {{- else if (index .ServerBody 0).Init }}
+ body := {{ (index .ServerBody 0).Init.Name }}({{ range (index .ServerBody 0).Init.ServerArgs }}{{ .Ref }}, {{ end }})
+ {{- else }}
+ body := res{{ if $.ViewedResult }}.Projected{{ end }}{{ if .ResultAttr }}.{{ .ResultAttr }}{{ end }}
+ {{- end }}
+ {{- end }}
+ {{- range .Headers }}
+ {{- $initDef := and (or .Pointer .Slice) .DefaultValue (not $.TagName) }}
+ {{- $checkNil := and (or .Pointer .Slice (eq .Type.Name "bytes") (eq .Type.Name "any") $initDef) (not $.TagName) }}
+ {{- if $checkNil }}
+ if res.{{ if $.ViewedResult }}Projected.{{ end }}{{ .FieldName }} != nil {
+ {{- end }}
+
+ {{- if eq .Type.Name "string" }}
+ w.Header().Set("{{ .Name }}", {{ if or .Pointer $.ViewedResult }}*{{ end }}res{{ if $.ViewedResult }}.Projected{{ end }}{{ if .FieldName }}.{{ .FieldName }}{{ end }})
+ {{- else }}
+ val := res{{ if $.ViewedResult }}.Projected{{ end }}{{ if .FieldName }}.{{ .FieldName }}{{ end }}
+ {{ template "header_conversion" (headerConversionData .Type (printf "%ss" .VarName) (not .Pointer) "val") }}
+ w.Header().Set("{{ .Name }}", {{ .VarName }}s)
+ {{- end }}
+
+ {{- if $initDef }}
+ {{ if $checkNil }} } else { {{ else }}if res{{ if $.ViewedResult }}.Projected{{ end }}.{{ .FieldName }} == nil { {{ end }}
+ w.Header().Set("{{ .Name }}", "{{ printValue .Type .DefaultValue }}")
+ {{- end }}
+
+ {{- if or $checkNil $initDef }}
+ }
+ {{- end }}
+
+ {{- end }}
+
+ {{- if .ErrorHeader }}
+ w.Header().Set("goa-error", {{ printf "%q" .ErrorHeader }})
+ {{- end }}
+ w.WriteHeader({{ .StatusCode }})
+{{- end }}
+
+{{- define "header_conversion" }}
+ {{- if eq .Type.Name "boolean" -}}
+ {{ .VarName }} := strconv.FormatBool({{ if not .Required }}*{{ end }}{{ .Target }})
+ {{- else if eq .Type.Name "int" -}}
+ {{ .VarName }} := strconv.Itoa({{ if not .Required }}*{{ end }}{{ .Target }})
+ {{- else if eq .Type.Name "int32" -}}
+ {{ .VarName }} := strconv.FormatInt(int64({{ if not .Required }}*{{ end }}{{ .Target }}), 10)
+ {{- else if eq .Type.Name "int64" -}}
+ {{ .VarName }} := strconv.FormatInt({{ if not .Required }}*{{ end }}{{ .Target }}, 10)
+ {{- else if eq .Type.Name "uint" -}}
+ {{ .VarName }} := strconv.FormatUint(uint64({{ if not .Required }}*{{ end }}{{ .Target }}), 10)
+ {{- else if eq .Type.Name "uint32" -}}
+ {{ .VarName }} := strconv.FormatUint(uint64({{ if not .Required }}*{{ end }}{{ .Target }}), 10)
+ {{- else if eq .Type.Name "uint64" -}}
+ {{ .VarName }} := strconv.FormatUint({{ if not .Required }}*{{ end }}{{ .Target }}, 10)
+ {{- else if eq .Type.Name "float32" -}}
+ {{ .VarName }} := strconv.FormatFloat(float64({{ if not .Required }}*{{ end }}{{ .Target }}), 'f', -1, 32)
+ {{- else if eq .Type.Name "float64" -}}
+ {{ .VarName }} := strconv.FormatFloat({{ if not .Required }}*{{ end }}{{ .Target }}, 'f', -1, 64)
+ {{- else if eq .Type.Name "string" -}}
+ {{ .VarName }} := {{ .Target }}
+ {{- else if eq .Type.Name "bytes" -}}
+ {{ .VarName }} := string({{ .Target }})
+ {{- else if eq .Type.Name "any" -}}
+ {{ .VarName }} := fmt.Sprintf("%v", {{ .Target }})
+ {{- else if eq .Type.Name "array" -}}
+ {{- if eq .Type.ElemType.Type.Name "string" -}}
+ {{ .VarName }} := strings.Join({{ .Target }}, ", ")
+ {{- else -}}
+ {{ .VarName }}Slice := make([]string, len({{ .Target }}))
+ for i, e := range {{ .Target }} {
+ {{ template "header_conversion" (headerConversionData .Type.ElemType.Type "es" true "e") }}
+ {{ .VarName }}Slice[i] = es
+ }
+ {{ .VarName }} := strings.Join({{ .VarName }}Slice, ", ")
+ {{- end }}
+ {{- else }}
+ // unsupported type {{ .Type.Name }} for header field {{ .FieldName }}
+ {{- end }}
+{{- end -}}
+`
+
+// input: multipartData
+const multipartRequestDecoderTypeT = `{{ printf "%s is the type to decode multipart request for the %q service %q endpoint." .FuncName .ServiceName .MethodName | comment }}
+type {{ .FuncName }} func(*multipart.Reader, *{{ .Payload.Ref }}) error
+`
+
+// input: multipartData
+const multipartRequestDecoderT = `{{ printf "%s returns a decoder to decode the multipart request for the %q service %q endpoint." .InitName .ServiceName .MethodName | comment }}
+func {{ .InitName }}(mux goahttp.Muxer, {{ .VarName }} {{ .FuncName }}) func(r *http.Request) goahttp.Decoder {
+ return func(r *http.Request) goahttp.Decoder {
+ return goahttp.EncodingFunc(func(v interface{}) error {
+ mr, merr := r.MultipartReader()
+ if merr != nil {
+ return merr
+ }
+ p := v.(*{{ .Payload.Ref }})
+ if err := {{ .VarName }}(mr, p); err != nil {
+ return err
+ }
+ {{- template "request_params_headers" .Payload.Request }}
+ {{- if .Payload.Request.MustValidate }}
+ if err != nil {
+ return err
+ }
+ {{- end }}
+ {{- if .Payload.Request.PayloadInit }}
+ {{- range .Payload.Request.PayloadInit.ServerArgs }}
+ {{- if .FieldName }}
+ (*p).{{ .FieldName }} = {{ if .Pointer }}&{{ end }}{{ .Name }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ return nil
+ })
+ }
+}
+` + requestParamsHeadersT
diff --git a/vendor/goa.design/goa/http/codegen/server_types.go b/vendor/goa.design/goa/http/codegen/server_types.go
new file mode 100644
index 000000000..2cc016a6c
--- /dev/null
+++ b/vendor/goa.design/goa/http/codegen/server_types.go
@@ -0,0 +1,255 @@
+package codegen
+
+import (
+ "path/filepath"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+// ServerTypeFiles returns the HTTP transport type files.
+func ServerTypeFiles(genpkg string, root *expr.RootExpr) []*codegen.File {
+ fw := make([]*codegen.File, len(root.API.HTTP.Services))
+ seen := make(map[string]struct{})
+ for i, r := range root.API.HTTP.Services {
+ fw[i] = serverType(genpkg, r, seen)
+ }
+ return fw
+}
+
+// serverType return the file containing the type definitions used by the HTTP
+// transport for the given service server. seen keeps track of the names of the
+// types that have already been generated to prevent duplicate code generation.
+//
+// Below are the rules governing whether values are pointers or not. Note that
+// the rules only applies to values that hold primitive types, values that hold
+// slices, maps or objects always use pointers either implicitly - slices and
+// maps - or explicitly - objects.
+//
+// * The payload struct fields (if a struct) hold pointers when not required
+// and have no default value.
+//
+// * Request body fields (if the body is a struct) always hold pointers to
+// allow for explicit validation.
+//
+// * Request header, path and query string parameter variables hold pointers
+// when not required. Request header, body fields and param variables that
+// have default values are never required (enforced by DSL engine).
+//
+// * The result struct fields (if a struct) hold pointers when not required
+// or have a default value (so generated code can set when null)
+//
+// * Response body fields (if the body is a struct) and header variables hold
+// pointers when not required and have no default value.
+//
+func serverType(genpkg string, svc *expr.HTTPServiceExpr, seen map[string]struct{}) *codegen.File {
+ var (
+ path string
+ rdata = HTTPServices.Get(svc.Name())
+ )
+ path = filepath.Join(codegen.Gendir, "http", codegen.SnakeCase(svc.Name()), "server", "types.go")
+ sd := HTTPServices.Get(svc.Name())
+ header := codegen.Header(svc.Name()+" HTTP server types", "server",
+ []*codegen.ImportSpec{
+ {Path: "unicode/utf8"},
+ {Path: genpkg + "/" + codegen.SnakeCase(svc.Name()), Name: sd.Service.PkgName},
+ {Path: "goa.design/goa", Name: "goa"},
+ {Path: genpkg + "/" + codegen.SnakeCase(svc.Name()) + "/" + "views", Name: sd.Service.ViewsPkg},
+ },
+ )
+
+ var (
+ initData []*InitData
+ validatedTypes []*TypeData
+
+ sections = []*codegen.SectionTemplate{header}
+ )
+
+ // request body types
+ for _, a := range svc.HTTPEndpoints {
+ adata := rdata.Endpoint(a.Name())
+ if data := adata.Payload.Request.ServerBody; data != nil {
+ if data.Def != "" {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "request-body-type-decl",
+ Source: typeDeclT,
+ Data: data,
+ })
+ }
+ if data.ValidateDef != "" {
+ validatedTypes = append(validatedTypes, data)
+ }
+ }
+ if adata.ServerStream != nil {
+ if data := adata.ServerStream.Payload; data != nil {
+ if data.Def != "" {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "request-body-type-decl",
+ Source: typeDeclT,
+ Data: data,
+ })
+ }
+ if data.ValidateDef != "" {
+ validatedTypes = append(validatedTypes, data)
+ }
+ }
+ }
+ }
+
+ // response body types
+ for _, a := range svc.HTTPEndpoints {
+ adata := rdata.Endpoint(a.Name())
+ for _, resp := range adata.Result.Responses {
+ for _, data := range resp.ServerBody {
+ if generated, ok := sd.ServerTypeNames[data.Name]; ok && !generated {
+ if data.Def != "" && !sd.ServerTypeNames[data.VarName] {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "response-server-body",
+ Source: typeDeclT,
+ Data: data,
+ })
+ }
+ if data.Init != nil {
+ initData = append(initData, data.Init)
+ }
+ if data.ValidateDef != "" {
+ validatedTypes = append(validatedTypes, data)
+ }
+ sd.ServerTypeNames[data.Name] = true
+ }
+ }
+ }
+ }
+
+ // error body types
+ for _, a := range svc.HTTPEndpoints {
+ adata := rdata.Endpoint(a.Name())
+ for _, gerr := range adata.Errors {
+ for _, herr := range gerr.Errors {
+ for _, data := range herr.Response.ServerBody {
+ if data.Def != "" {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "error-body-type-decl",
+ Source: typeDeclT,
+ Data: data,
+ })
+ }
+ if data.Init != nil {
+ initData = append(initData, data.Init)
+ }
+ if data.ValidateDef != "" {
+ validatedTypes = append(validatedTypes, data)
+ }
+ }
+ }
+ }
+ }
+
+ // body attribute types
+ for _, data := range rdata.ServerBodyAttributeTypes {
+ if data.Def != "" {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "server-body-attributes",
+ Source: typeDeclT,
+ Data: data,
+ })
+ }
+
+ if data.ValidateDef != "" {
+ validatedTypes = append(validatedTypes, data)
+ }
+ }
+
+ // body constructors
+ for _, init := range initData {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "server-body-init",
+ Source: serverBodyInitT,
+ Data: init,
+ })
+ }
+
+ for _, adata := range rdata.Endpoints {
+ // request to method payload
+ if init := adata.Payload.Request.PayloadInit; init != nil {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "server-payload-init",
+ Source: serverTypeInitT,
+ Data: init,
+ })
+ }
+ if adata.ServerStream != nil && adata.ServerStream.Payload != nil {
+ if init := adata.ServerStream.Payload.Init; init != nil {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "server-payload-init",
+ Source: serverTypeInitT,
+ Data: init,
+ })
+ }
+ }
+ }
+
+ // validate methods
+ for _, data := range validatedTypes {
+ sections = append(sections, &codegen.SectionTemplate{
+ Name: "server-validate",
+ Source: validateT,
+ Data: data,
+ })
+ }
+
+ return &codegen.File{Path: path, SectionTemplates: sections}
+}
+
+// input: TypeData
+const typeDeclT = `{{ comment .Description }}
+type {{ .VarName }} {{ .Def }}
+`
+
+// input: InitData
+const serverTypeInitT = `{{ comment .Description }}
+func {{ .Name }}({{- range .ServerArgs }}{{ .Name }} {{ .TypeRef }}, {{ end }}) {{ .ReturnTypeRef }} {
+ {{- if .ServerCode }}
+ {{ .ServerCode }}
+ {{- if .ReturnTypeAttribute }}
+ res := &{{ .ReturnTypeName }}{
+ {{ .ReturnTypeAttribute }}: v,
+ }
+ {{- end }}
+ {{- if .ReturnIsStruct }}
+ {{- range .ServerArgs }}
+ {{- if .FieldName }}
+ {{ if $.ReturnTypeAttribute }}res{{ else }}v{{ end }}.{{ .FieldName }} = {{ if .Pointer }}&{{ end }}{{ .Name }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ return {{ if .ReturnTypeAttribute }}res{{ else }}v{{ end }}
+ {{- else }}
+ {{- if .ReturnIsStruct }}
+ return &{{ .ReturnTypeName }}{
+ {{- range .ServerArgs }}
+ {{- if .FieldName }}
+ {{ .FieldName }}: {{ if .Pointer }}&{{ end }}{{ .Name }},
+ {{- end }}
+ {{- end }}
+ }
+ {{- end }}
+ {{ end -}}
+}
+`
+
+// input: InitData
+const serverBodyInitT = `{{ comment .Description }}
+func {{ .Name }}({{ range .ServerArgs }}{{ .Name }} {{.TypeRef }}, {{ end }}) {{ .ReturnTypeRef }} {
+ {{ .ServerCode }}
+ return body
+}
+`
+
+// input: TypeData
+const validateT = `{{ printf "Validate%s runs the validations defined on %s" .VarName .Name | comment }}
+func Validate{{ .VarName }}(body {{ .Ref }}) (err error) {
+ {{ .ValidateDef }}
+ return
+}
+`
diff --git a/vendor/goa.design/goa/http/codegen/service_data.go b/vendor/goa.design/goa/http/codegen/service_data.go
new file mode 100644
index 000000000..a95bf59a3
--- /dev/null
+++ b/vendor/goa.design/goa/http/codegen/service_data.go
@@ -0,0 +1,2851 @@
+package codegen
+
+import (
+ "bytes"
+ "fmt"
+ "net/http"
+ "sort"
+ "strconv"
+ "strings"
+ "text/template"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/codegen/service"
+ "goa.design/goa/expr"
+)
+
+// HTTPServices holds the data computed from the design needed to generate the
+// transport code of the services.
+var HTTPServices = make(ServicesData)
+
+var (
+ // pathInitTmpl is the template used to render path constructors code.
+ pathInitTmpl = template.Must(template.New("path-init").Funcs(template.FuncMap{"goify": codegen.Goify}).Parse(pathInitT))
+ // requestInitTmpl is the template used to render request constructors.
+ requestInitTmpl = template.Must(template.New("request-init").Parse(requestInitT))
+)
+
+type (
+ // ServicesData encapsulates the data computed from the design.
+ ServicesData map[string]*ServiceData
+
+ // ServiceData contains the data used to render the code related to a
+ // single service.
+ ServiceData struct {
+ // Service contains the related service data.
+ Service *service.Data
+ // Endpoints describes the endpoint data for this service.
+ Endpoints []*EndpointData
+ // FileServers lists the file servers for this service.
+ FileServers []*FileServerData
+ // ServerStruct is the name of the HTTP server struct.
+ ServerStruct string
+ // MountPointStruct is the name of the mount point struct.
+ MountPointStruct string
+ // ServerInit is the name of the constructor of the server
+ // struct.
+ ServerInit string
+ // MountServer is the name of the mount function.
+ MountServer string
+ // ServerService is the name of service function.
+ ServerService string
+ // ClientStruct is the name of the HTTP client struct.
+ ClientStruct string
+ // ServerBodyAttributeTypes is the list of user types used to
+ // define the request, response and error response type
+ // attributes in the server code.
+ ServerBodyAttributeTypes []*TypeData
+ // ClientBodyAttributeTypes is the list of user types used to
+ // define the request, response and error response type
+ // attributes in the client code.
+ ClientBodyAttributeTypes []*TypeData
+ // ServerTypeNames records the user type names used to define
+ // the endpoint request and response bodies for server code.
+ // The type name is used as the key and a bool as the value
+ // which if true indicates that the type has been generated
+ // in the server package.
+ ServerTypeNames map[string]bool
+ // ClientTypeNames records the user type names used to define
+ // the endpoint request and response bodies for client code.
+ // The type name is used as the key and a bool as the value
+ // which if true indicates that the type has been generated
+ // in the client package.
+ ClientTypeNames map[string]bool
+ // ServerTransformHelpers is the list of transform functions
+ // required by the various server side constructors.
+ ServerTransformHelpers []*codegen.TransformFunctionData
+ // ClientTransformHelpers is the list of transform functions
+ // required by the various client side constructors.
+ ClientTransformHelpers []*codegen.TransformFunctionData
+ // Scope initialized with all the server and client types.
+ Scope *codegen.NameScope
+ }
+
+ // EndpointData contains the data used to render the code related to a
+ // single service HTTP endpoint.
+ EndpointData struct {
+ // Method contains the related service method data.
+ Method *service.MethodData
+ // ServiceName is the name of the service exposing the endpoint.
+ ServiceName string
+ // ServiceVarName is the goified service name (first letter
+ // lowercase).
+ ServiceVarName string
+ // ServicePkgName is the name of the service package.
+ ServicePkgName string
+ // Payload describes the method HTTP payload.
+ Payload *PayloadData
+ // Result describes the method HTTP result.
+ Result *ResultData
+ // Errors describes the method HTTP errors.
+ Errors []*ErrorGroupData
+ // Routes describes the possible routes for this endpoint.
+ Routes []*RouteData
+ // BasicScheme is the basic auth security scheme if any.
+ BasicScheme *service.SchemeData
+ // HeaderSchemes lists all the security requirement schemes that
+ // apply to the method and are encoded in the request header.
+ HeaderSchemes []*service.SchemeData
+ // BodySchemes lists all the security requirement schemes that
+ // apply to the method and are encoded in the request body.
+ BodySchemes []*service.SchemeData
+ // QuerySchemes lists all the security requirement schemes that
+ // apply to the method and are encoded in the request query
+ // string.
+ QuerySchemes []*service.SchemeData
+
+ // server
+
+ // MountHandler is the name of the mount handler function.
+ MountHandler string
+ // HandlerInit is the name of the constructor function for the
+ // http handler function.
+ HandlerInit string
+ // RequestDecoder is the name of the request decoder function.
+ RequestDecoder string
+ // ResponseEncoder is the name of the response encoder function.
+ ResponseEncoder string
+ // ErrorEncoder is the name of the error encoder function.
+ ErrorEncoder string
+ // MultipartRequestDecoder indicates the request decoder for
+ // multipart content type.
+ MultipartRequestDecoder *MultipartData
+ // ServerStream holds the data to render the server struct which
+ // implements the server stream interface.
+ ServerStream *StreamData
+
+ // client
+
+ // ClientStruct is the name of the HTTP client struct.
+ ClientStruct string
+ // EndpointInit is the name of the constructor function for the
+ // client endpoint.
+ EndpointInit string
+ // RequestInit is the request builder function.
+ RequestInit *InitData
+ // RequestEncoder is the name of the request encoder function.
+ RequestEncoder string
+ // ResponseDecoder is the name of the response decoder function.
+ ResponseDecoder string
+ // MultipartRequestEncoder indicates the request encoder for
+ // multipart content type.
+ MultipartRequestEncoder *MultipartData
+ // ClientStream holds the data to render the client struct which
+ // implements the client stream interface.
+ ClientStream *StreamData
+ }
+
+ // FileServerData lists the data needed to generate file servers.
+ FileServerData struct {
+ // MountHandler is the name of the mount handler function.
+ MountHandler string
+ // RequestPaths is the set of HTTP paths to the server.
+ RequestPaths []string
+ // Root is the root server file path.
+ FilePath string
+ // Dir is true if the file server servers files under a
+ // directory, false if it serves a single file.
+ IsDir bool
+ // PathParam is the name of the parameter used to capture the
+ // path for file servers that serve files under a directory.
+ PathParam string
+ }
+
+ // PayloadData contains the payload information required to generate the
+ // transport decode (server) and encode (client) code.
+ PayloadData struct {
+ // Name is the name of the payload type.
+ Name string
+ // Ref is the fully qualified reference to the payload type.
+ Ref string
+ // Request contains the data for the corresponding HTTP request.
+ Request *RequestData
+ // DecoderReturnValue is a reference to the decoder return value
+ // if there is no payload constructor (i.e. if Init is nil).
+ DecoderReturnValue string
+ }
+
+ // ResultData contains the result information required to generate the
+ // transport decode (client) and encode (server) code.
+ ResultData struct {
+ // Name is the name of the result type.
+ Name string
+ // Ref is the reference to the result type.
+ Ref string
+ // IsStruct is true if the result type is a user type defining
+ // an object.
+ IsStruct bool
+ // Inits contains the data required to render the result
+ // constructors if any.
+ Inits []*InitData
+ // Responses contains the data for the corresponding HTTP
+ // responses.
+ Responses []*ResponseData
+ // View is the view used to render the result.
+ View string
+ // MustInit indicates if a variable holding the result type must be
+ // initialized. It is used by server response encoder to initialize
+ // the result variable only if there are multiple responses, or the
+ // response has a body or a header.
+ MustInit bool
+ }
+
+ // ErrorGroupData contains the error information required to generate
+ // the transport decode (client) and encode (server) code for all errors
+ // with responses using a given HTTP status code.
+ ErrorGroupData struct {
+ // StatusCode is the response HTTP status code.
+ StatusCode string
+ // Errors contains the information for each error.
+ Errors []*ErrorData
+ }
+
+ // ErrorData contains the error information required to generate the
+ // transport decode (client) and encode (server) code.
+ ErrorData struct {
+ // Name is the error name.
+ Name string
+ // Ref is a reference to the error type.
+ Ref string
+ // Response is the error response data.
+ Response *ResponseData
+ }
+
+ // RequestData describes a request.
+ RequestData struct {
+ // PathParams describes the information about params that are
+ // present in the request path.
+ PathParams []*ParamData
+ // QueryParams describes the information about the params that
+ // are present in the request query string.
+ QueryParams []*ParamData
+ // Headers contains the HTTP request headers used to build the
+ // method payload.
+ Headers []*HeaderData
+ // ServerBody describes the request body type used by server
+ // code. The type is generated using pointers for all fields so
+ // that it can be validated.
+ ServerBody *TypeData
+ // ClientBody describes the request body type used by client
+ // code. The type does NOT use pointers for every fields since
+ // no validation is required.
+ ClientBody *TypeData
+ // PayloadInit contains the data required to render the
+ // payload constructor used by server code if any.
+ PayloadInit *InitData
+ // MustValidate is true if the request body or at least one
+ // parameter or header requires validation.
+ MustValidate bool
+ // Multipart if true indicates the request is a multipart
+ // request.
+ Multipart bool
+ }
+
+ // ResponseData describes a response.
+ ResponseData struct {
+ // StatusCode is the return code of the response.
+ StatusCode string
+ // Description is the response description.
+ Description string
+ // Headers provides information about the headers in the
+ // response.
+ Headers []*HeaderData
+ // ContentType contains the value of the response
+ // "Content-Type" header.
+ ContentType string
+ // ErrorHeader contains the value of the response "goa-error"
+ // header if any.
+ ErrorHeader string
+ // ServerBody is the type of the response body used by server
+ // code, nil if body should be empty. The type does NOT use
+ // pointers for all fields. If the method result is a result
+ // type and the response data describes a success response, then
+ // this field contains a type for every view in the result type.
+ // The type name is suffixed with the name of the view (except
+ // for "default" view where no suffix is added). A constructor
+ // is also generated server side for each view to transform the
+ // result type to the corresponding response body type. If
+ // method result is not a result type or if the response
+ // describes an error response, then this field contains at most
+ // one item.
+ ServerBody []*TypeData
+ // ClientBody is the type of the response body used by client
+ // code, nil if body should be empty. The type uses pointers for
+ // all fields so they can be validated.
+ ClientBody *TypeData
+ // Init contains the data required to render the result or error
+ // constructor if any.
+ ResultInit *InitData
+ // TagName is the name of the attribute used to test whether the
+ // response is the one to use.
+ TagName string
+ // TagValue is the value the result attribute named by TagName
+ // must have for this response to be used.
+ TagValue string
+ // TagPointer is true if the tag attribute is a pointer.
+ TagPointer bool
+ // MustValidate is true if at least one header requires validation.
+ MustValidate bool
+ // ResultAttr sets the response body from the specified result
+ // type attribute. This field is set when the design uses
+ // Body("name") syntax to set the response body and the result
+ // type is an object.
+ ResultAttr string
+ // ViewedResult indicates whether the response body type is a
+ // result type.
+ ViewedResult *service.ViewedResultTypeData
+ }
+
+ // InitData contains the data required to render a constructor.
+ InitData struct {
+ // Name is the constructor function name.
+ Name string
+ // Description is the function description.
+ Description string
+ // ServerArgs is the list of constructor arguments for server
+ // side code.
+ ServerArgs []*InitArgData
+ // ClientArgs is the list of constructor arguments for client
+ // side code.
+ ClientArgs []*InitArgData
+ // CLIArgs is the list of arguments that should be initialized
+ // from CLI flags. This is used for implicit attributes which
+ // as the time of writing is only used for the basic auth
+ // username and password.
+ CLIArgs []*InitArgData
+ // ReturnTypeName is the qualified (including the package name)
+ // name of the payload, result or error type.
+ ReturnTypeName string
+ // ReturnTypeRef is the qualified (including the package name)
+ // reference to the payload, result or error type.
+ ReturnTypeRef string
+ // ReturnTypeAttribute is the name of the attribute initialized
+ // by this constructor when it only initializes one attribute
+ // (i.e. body was defined with Body("name") syntax).
+ ReturnTypeAttribute string
+ // ReturnIsStruct is true if the return type is a struct.
+ ReturnIsStruct bool
+ // ReturnIsPrimitivePointer indicates whether the return type is
+ // a primitive pointer.
+ ReturnIsPrimitivePointer bool
+ // ServerCode is the code that builds the payload from the
+ // request on the server when it contains user types.
+ ServerCode string
+ // ClientCode is the code that builds the payload or result type
+ // from the request or response state on the client when it
+ // contains user types.
+ ClientCode string
+ }
+
+ // InitArgData represents a single constructor argument.
+ InitArgData struct {
+ // Name is the argument name.
+ Name string
+ // Description is the argument description.
+ Description string
+ // Reference to the argument, e.g. "&body".
+ Ref string
+ // FieldName is the name of the data structure field that should
+ // be initialized with the argument if any.
+ FieldName string
+ // TypeName is the argument type name.
+ TypeName string
+ // TypeRef is the argument type reference.
+ TypeRef string
+ // Pointer is true if a pointer to the arg should be used.
+ Pointer bool
+ // Required is true if the arg is required to build the payload.
+ Required bool
+ // DefaultValue is the default value of the arg.
+ DefaultValue interface{}
+ // Validate contains the validation code for the argument
+ // value if any.
+ Validate string
+ // Example is a example value
+ Example interface{}
+ }
+
+ // RouteData describes a route.
+ RouteData struct {
+ // Verb is the HTTP method.
+ Verb string
+ // Path is the fullpath including wildcards.
+ Path string
+ // PathInit contains the information needed to render and call
+ // the path constructor for the route.
+ PathInit *InitData
+ }
+
+ // ParamData describes a HTTP request parameter.
+ ParamData struct {
+ // Name is the name of the mapping to the actual variable name.
+ Name string
+ // AttributeName is the name of the corresponding attribute.
+ AttributeName string
+ // Description is the parameter description
+ Description string
+ // FieldName is the name of the struct field that holds the
+ // param value.
+ FieldName string
+ // VarName is the name of the Go variable used to read or
+ // convert the param value.
+ VarName string
+ // ServiceField is true if there is a corresponding attribute in
+ // the service types.
+ ServiceField bool
+ // Type is the datatype of the variable.
+ Type expr.DataType
+ // TypeName is the name of the type.
+ TypeName string
+ // TypeRef is the reference to the type.
+ TypeRef string
+ // Required is true if the param is required.
+ Required bool
+ // Pointer is true if and only the param variable is a pointer.
+ Pointer bool
+ // StringSlice is true if the param type is array of strings.
+ StringSlice bool
+ // Slice is true if the param type is an array.
+ Slice bool
+ // MapStringSlice is true if the param type is a map of string
+ // slice.
+ MapStringSlice bool
+ // Map is true if the param type is a map.
+ Map bool
+ // Validate contains the validation code if any.
+ Validate string
+ // DefaultValue contains the default value if any.
+ DefaultValue interface{}
+ // Example is an example value.
+ Example interface{}
+ // MapQueryParams indicates that the query params must be mapped
+ // to the entire payload (empty string) or a payload attribute
+ // (attribute name).
+ MapQueryParams *string
+ }
+
+ // HeaderData describes a HTTP request or response header.
+ HeaderData struct {
+ // Name is the name of the header key.
+ Name string
+ // AttributeName is the name of the corresponding attribute.
+ AttributeName string
+ // Description is the header description.
+ Description string
+ // CanonicalName is the canonical header key.
+ CanonicalName string
+ // FieldName is the name of the struct field that holds the
+ // header value if any, empty string otherwise.
+ FieldName string
+ // VarName is the name of the Go variable used to read or
+ // convert the header value.
+ VarName string
+ // TypeName is the name of the type.
+ TypeName string
+ // TypeRef is the reference to the type.
+ TypeRef string
+ // Required is true if the header is required.
+ Required bool
+ // Pointer is true if and only the param variable is a pointer.
+ Pointer bool
+ // StringSlice is true if the param type is array of strings.
+ StringSlice bool
+ // Slice is true if the param type is an array.
+ Slice bool
+ // Type describes the datatype of the variable value. Mainly
+ // used for conversion.
+ Type expr.DataType
+ // Validate contains the validation code if any.
+ Validate string
+ // DefaultValue contains the default value if any.
+ DefaultValue interface{}
+ // Example is an example value.
+ Example interface{}
+ }
+
+ // TypeData contains the data needed to render a type definition.
+ TypeData struct {
+ // Name is the type name.
+ Name string
+ // VarName is the Go type name.
+ VarName string
+ // Description is the type human description.
+ Description string
+ // Init contains the data needed to render and call the type
+ // constructor if any.
+ Init *InitData
+ // Def is the type definition Go code.
+ Def string
+ // Ref is the reference to the type.
+ Ref string
+ // ValidateDef contains the validation code.
+ ValidateDef string
+ // ValidateRef contains the call to the validation code.
+ ValidateRef string
+ // Example is an example value for the type.
+ Example interface{}
+ // View is the view using which the type is rendered.
+ View string
+ }
+
+ // MultipartData contains the data needed to render multipart
+ // encoder/decoder.
+ MultipartData struct {
+ // FuncName is the name used to generate function type.
+ FuncName string
+ // InitName is the name of the constructor.
+ InitName string
+ // VarName is the name of the variable referring to the function.
+ VarName string
+ // ServiceName is the name of the service.
+ ServiceName string
+ // MethodName is the name of the method.
+ MethodName string
+ // Payload is the payload data required to generate
+ // encoder/decoder.
+ Payload *PayloadData
+ }
+
+ // StreamData contains the data needed to render struct type that
+ // implements the server and client stream interfaces.
+ StreamData struct {
+ // VarName is the name of the struct.
+ VarName string
+ // Type is type of the stream (server or client).
+ Type string
+ // Interface is the fully qualified name of the interface that
+ // the struct implements.
+ Interface string
+ // Endpoint is endpoint data that defines streaming
+ // payload/result.
+ Endpoint *EndpointData
+ // Payload is the streaming payload type sent via the stream.
+ Payload *TypeData
+ // Response is the successful response data for the streaming
+ // endpoint.
+ Response *ResponseData
+ // SendName is the name of the send function.
+ SendName string
+ // SendDesc is the description for the send function.
+ SendDesc string
+ // SendTypeName is the fully qualified type name sent through
+ // the stream.
+ SendTypeName string
+ // SendTypeRef is the fully qualified type ref sent through the
+ // stream.
+ SendTypeRef string
+ // RecvName is the name of the receive function.
+ RecvName string
+ // RecvDesc is the description for the recv function.
+ RecvDesc string
+ // RecvTypeName is the fully qualified type name received from
+ // the stream.
+ RecvTypeName string
+ // RecvTypeRef is the fully qualified type ref received from the
+ // stream.
+ RecvTypeRef string
+ // MustClose indicates whether to generate the Close() function
+ // for the stream.
+ MustClose bool
+ // PkgName is the service package name.
+ PkgName string
+ // Kind is the kind of the stream (payload, result or
+ // bidirectional).
+ Kind expr.StreamKind
+ }
+)
+
+// Get retrieves the transport data for the service with the given name
+// computing it if needed. It returns nil if there is no service with the given
+// name.
+func (d ServicesData) Get(name string) *ServiceData {
+ if data, ok := d[name]; ok {
+ return data
+ }
+ service := expr.Root.API.HTTP.Service(name)
+ if service == nil {
+ return nil
+ }
+ d[name] = d.analyze(service)
+ return d[name]
+}
+
+// Endpoint returns the service method transport data for the endpoint with the
+// given name, nil if there isn't one.
+func (svc *ServiceData) Endpoint(name string) *EndpointData {
+ for _, e := range svc.Endpoints {
+ if e.Method.Name == name {
+ return e
+ }
+ }
+ return nil
+}
+
+// analyze creates the data necessary to render the code of the given service.
+// It records the user types needed by the service definition in userTypes.
+func (d ServicesData) analyze(hs *expr.HTTPServiceExpr) *ServiceData {
+ svc := service.Services.Get(hs.ServiceExpr.Name)
+
+ rd := &ServiceData{
+ Service: svc,
+ ServerStruct: "Server",
+ MountPointStruct: "MountPoint",
+ ServerInit: "New",
+ MountServer: "Mount",
+ ServerService: "Service",
+ ClientStruct: "Client",
+ ServerTypeNames: make(map[string]bool),
+ ClientTypeNames: make(map[string]bool),
+ Scope: codegen.NewNameScope(),
+ }
+
+ for _, s := range hs.FileServers {
+ paths := make([]string, len(s.RequestPaths))
+ for i, p := range s.RequestPaths {
+ idx := strings.LastIndex(p, "/{")
+ if idx > 0 {
+ paths[i] = p[:idx]
+ } else {
+ paths[i] = p
+ }
+ }
+ var pp string
+ if s.IsDir() {
+ pp = expr.ExtractHTTPWildcards(s.RequestPaths[0])[0]
+ }
+ data := &FileServerData{
+ MountHandler: fmt.Sprintf("Mount%s", codegen.Goify(s.FilePath, true)),
+ RequestPaths: paths,
+ FilePath: s.FilePath,
+ IsDir: s.IsDir(),
+ PathParam: pp,
+ }
+ rd.FileServers = append(rd.FileServers, data)
+ }
+
+ for _, a := range hs.HTTPEndpoints {
+ ep := svc.Method(a.MethodExpr.Name)
+
+ var routes []*RouteData
+ i := 0
+ for _, r := range a.Routes {
+ for _, rpath := range r.FullPaths() {
+ params := expr.ExtractRouteWildcards(rpath)
+ var (
+ init *InitData
+ )
+ {
+ initArgs := make([]*InitArgData, len(params))
+ pathParamsObj := expr.AsObject(a.PathParams().Type)
+ suffix := ""
+ if i > 0 {
+ suffix = strconv.Itoa(i + 1)
+ }
+ i++
+ name := fmt.Sprintf("%s%sPath%s", ep.VarName, svc.StructName, suffix)
+ for j, arg := range params {
+ att := pathParamsObj.Attribute(arg)
+ pointer := a.Params.IsPrimitivePointer(arg, false)
+ name := rd.Scope.Unique(codegen.Goify(arg, false))
+ var vcode string
+ if att.Validation != nil {
+ ca := httpContext(att, "", rd.Scope, true, true)
+ ca.Required = true // path params are always required
+ vcode = codegen.RecursiveValidationCode(ca, name)
+ }
+ initArgs[j] = &InitArgData{
+ Name: name,
+ Description: att.Description,
+ Ref: name,
+ FieldName: codegen.Goify(arg, true),
+ TypeName: rd.Scope.GoTypeName(att),
+ TypeRef: rd.Scope.GoTypeRef(att),
+ Pointer: pointer,
+ Required: true,
+ Example: att.Example(expr.Root.API.Random()),
+ Validate: vcode,
+ }
+ }
+
+ var buffer bytes.Buffer
+ pf := expr.HTTPWildcardRegex.ReplaceAllString(rpath, "/%v")
+ err := pathInitTmpl.Execute(&buffer, map[string]interface{}{
+ "Args": initArgs,
+ "PathParams": pathParamsObj,
+ "PathFormat": pf,
+ })
+ if err != nil {
+ panic(err)
+ }
+ init = &InitData{
+ Name: name,
+ Description: fmt.Sprintf("%s returns the URL path to the %s service %s HTTP endpoint. ", name, svc.Name, ep.Name),
+ ServerArgs: initArgs,
+ ClientArgs: initArgs,
+ ReturnTypeName: "string",
+ ReturnTypeRef: "string",
+ ServerCode: buffer.String(),
+ ClientCode: buffer.String(),
+ }
+ }
+
+ routes = append(routes, &RouteData{
+ Verb: strings.ToUpper(r.Method),
+ Path: rpath,
+ PathInit: init,
+ })
+ }
+ }
+
+ payload := buildPayloadData(a, rd)
+
+ var (
+ hsch []*service.SchemeData
+ bosch []*service.SchemeData
+ qsch []*service.SchemeData
+ basch *service.SchemeData
+ )
+ {
+ for _, req := range ep.Requirements {
+ for _, s := range req.Schemes {
+ switch s.Type {
+ case "Basic":
+ basch = s
+ default:
+ switch s.In {
+ case "query":
+ qsch = appendUnique(qsch, s)
+ case "header":
+ hsch = appendUnique(hsch, s)
+ default:
+ bosch = appendUnique(bosch, s)
+ }
+ }
+ }
+ }
+ }
+
+ var requestEncoder string
+ {
+ if payload.Request.ClientBody != nil || len(payload.Request.Headers) > 0 || len(payload.Request.QueryParams) > 0 || basch != nil {
+ requestEncoder = fmt.Sprintf("Encode%sRequest", ep.VarName)
+ }
+ }
+
+ var requestInit *InitData
+ {
+ var (
+ name string
+ args []*InitArgData
+ payloadRef string
+ )
+ {
+ name = fmt.Sprintf("Build%sRequest", ep.VarName)
+ for _, ca := range routes[0].PathInit.ClientArgs {
+ if ca.FieldName != "" {
+ args = append(args, ca)
+ }
+ }
+ if len(routes[0].PathInit.ClientArgs) > 0 && a.MethodExpr.Payload.Type != expr.Empty {
+ payloadRef = svc.Scope.GoFullTypeRef(a.MethodExpr.Payload, svc.PkgName)
+ }
+ }
+ data := map[string]interface{}{
+ "PayloadRef": payloadRef,
+ "HasFields": expr.IsObject(a.MethodExpr.Payload.Type),
+ "ServiceName": svc.Name,
+ "EndpointName": ep.Name,
+ "Args": args,
+ "PathInit": routes[0].PathInit,
+ "Verb": routes[0].Verb,
+ "IsStreaming": a.MethodExpr.IsStreaming(),
+ }
+ var buf bytes.Buffer
+ if err := requestInitTmpl.Execute(&buf, data); err != nil {
+ panic(err) // bug
+ }
+ requestInit = &InitData{
+ Name: name,
+ Description: fmt.Sprintf("%s instantiates a HTTP request object with method and path set to call the %q service %q endpoint", name, svc.Name, ep.Name),
+ ClientCode: buf.String(),
+ ClientArgs: []*InitArgData{{
+ Name: "v",
+ Ref: "v",
+ TypeRef: "interface{}",
+ }},
+ }
+ }
+
+ ad := &EndpointData{
+ Method: ep,
+ ServiceName: svc.Name,
+ ServiceVarName: svc.VarName,
+ ServicePkgName: svc.PkgName,
+ Payload: payload,
+ Result: buildResultData(a, rd),
+ Errors: buildErrorsData(a, rd),
+ HeaderSchemes: hsch,
+ BodySchemes: bosch,
+ QuerySchemes: qsch,
+ BasicScheme: basch,
+ Routes: routes,
+ MountHandler: fmt.Sprintf("Mount%sHandler", ep.VarName),
+ HandlerInit: fmt.Sprintf("New%sHandler", ep.VarName),
+ RequestDecoder: fmt.Sprintf("Decode%sRequest", ep.VarName),
+ ResponseEncoder: fmt.Sprintf("Encode%sResponse", ep.VarName),
+ ErrorEncoder: fmt.Sprintf("Encode%sError", ep.VarName),
+ ClientStruct: "Client",
+ EndpointInit: ep.VarName,
+ RequestInit: requestInit,
+ RequestEncoder: requestEncoder,
+ ResponseDecoder: fmt.Sprintf("Decode%sResponse", ep.VarName),
+ }
+ buildStreamData(ad, a, rd)
+
+ if a.MultipartRequest {
+ ad.MultipartRequestDecoder = &MultipartData{
+ FuncName: fmt.Sprintf("%s%sDecoderFunc", svc.StructName, ep.VarName),
+ InitName: fmt.Sprintf("New%s%sDecoder", svc.StructName, ep.VarName),
+ VarName: fmt.Sprintf("%s%sDecoderFn", svc.Name, ep.VarName),
+ ServiceName: svc.Name,
+ MethodName: ep.Name,
+ Payload: ad.Payload,
+ }
+ ad.MultipartRequestEncoder = &MultipartData{
+ FuncName: fmt.Sprintf("%s%sEncoderFunc", svc.StructName, ep.VarName),
+ InitName: fmt.Sprintf("New%s%sEncoder", svc.StructName, ep.VarName),
+ VarName: fmt.Sprintf("%s%sEncoderFn", svc.Name, ep.VarName),
+ ServiceName: svc.Name,
+ MethodName: ep.Name,
+ Payload: ad.Payload,
+ }
+ }
+
+ rd.Endpoints = append(rd.Endpoints, ad)
+ }
+
+ for _, a := range hs.HTTPEndpoints {
+ collectUserTypes(a.Body.Type, func(ut expr.UserType) {
+ if d := attributeTypeData(httpTypeContext(ut, "", rd.Scope, true, true), true, true, rd); d != nil {
+ rd.ServerBodyAttributeTypes = append(rd.ServerBodyAttributeTypes, d)
+ }
+ if d := attributeTypeData(httpTypeContext(ut, "", rd.Scope, true, false), true, false, rd); d != nil {
+ rd.ClientBodyAttributeTypes = append(rd.ClientBodyAttributeTypes, d)
+ }
+ })
+
+ if a.MethodExpr.StreamingPayload.Type != expr.Empty {
+ collectUserTypes(a.StreamingBody.Type, func(ut expr.UserType) {
+ if d := attributeTypeData(httpTypeContext(ut, "", rd.Scope, true, true), true, true, rd); d != nil {
+ rd.ServerBodyAttributeTypes = append(rd.ServerBodyAttributeTypes, d)
+ }
+ if d := attributeTypeData(httpTypeContext(ut, "", rd.Scope, true, false), true, false, rd); d != nil {
+ rd.ClientBodyAttributeTypes = append(rd.ClientBodyAttributeTypes, d)
+ }
+ })
+ }
+
+ if res := a.MethodExpr.Result; res != nil {
+ for _, v := range a.Responses {
+ collectUserTypes(v.Body.Type, func(ut expr.UserType) {
+ // NOTE: ServerBodyAttributeTypes for response body types are
+ // collected in buildResponseBodyType because we have to generate
+ // body types for each view in a result type.
+ if d := attributeTypeData(httpTypeContext(ut, "", rd.Scope, false, false), false, false, rd); d != nil {
+ rd.ClientBodyAttributeTypes = append(rd.ClientBodyAttributeTypes, d)
+ }
+ })
+ }
+ }
+
+ for _, v := range a.HTTPErrors {
+ collectUserTypes(v.Response.Body.Type, func(ut expr.UserType) {
+ // NOTE: ServerBodyAttributeTypes for error response body types are
+ // collected in buildResponseBodyType because we have to generate
+ // body types for each view in a result type.
+ if d := attributeTypeData(httpTypeContext(ut, "", rd.Scope, false, false), false, false, rd); d != nil {
+ rd.ClientBodyAttributeTypes = append(rd.ClientBodyAttributeTypes, d)
+ }
+ })
+ }
+ }
+
+ return rd
+}
+
+// buildPayloadData returns the data structure used to describe the endpoint
+// payload including the HTTP request details. It also returns the user types
+// used by the request body type recursively if any.
+func buildPayloadData(e *expr.HTTPEndpointExpr, sd *ServiceData) *PayloadData {
+ var (
+ payload = e.MethodExpr.Payload
+ svc = sd.Service
+ body = e.Body.Type
+ ep = svc.Method(e.MethodExpr.Name)
+ svrBody = httpContext(e.Body, "", sd.Scope, true, true)
+ cliBody = httpContext(e.Body, "", sd.Scope, true, false)
+ payloadCA = service.TypeContext(e.MethodExpr.Payload, svc.PkgName, svc.Scope)
+
+ request *RequestData
+ mapQueryParam *ParamData
+ )
+ {
+ var (
+ serverBodyData = buildRequestBodyType(svrBody, payloadCA, e, true, sd)
+ clientBodyData = buildRequestBodyType(cliBody, payloadCA, e, false, sd)
+ paramsData = extractPathParams(e.PathParams(), payloadCA, sd.Scope)
+ queryData = extractQueryParams(e.QueryParams(), payloadCA, sd.Scope)
+ headersData = extractHeaders(e.Headers, payloadCA, sd.Scope)
+
+ mustValidate bool
+ )
+ {
+ if e.MapQueryParams != nil {
+ var (
+ fieldName string
+ name = "query"
+ required = true
+ pAtt = payload
+ ca = payloadCA
+ )
+ if n := *e.MapQueryParams; n != "" {
+ pAtt = expr.AsObject(payload.Type).Attribute(n)
+ required = payload.IsRequired(n)
+ name = n
+ fieldName = codegen.Goify(name, true)
+ ca = ca.Dup(pAtt, required)
+ }
+ varn := codegen.Goify(name, false)
+ validate := codegen.RecursiveValidationCode(ca, varn)
+ mapQueryParam = &ParamData{
+ Name: name,
+ VarName: varn,
+ FieldName: fieldName,
+ Required: required,
+ Type: pAtt.Type,
+ TypeName: sd.Scope.GoTypeName(pAtt),
+ TypeRef: sd.Scope.GoTypeRef(pAtt),
+ Map: expr.AsMap(payload.Type) != nil,
+ Validate: validate,
+ DefaultValue: pAtt.DefaultValue,
+ Example: pAtt.Example(expr.Root.API.Random()),
+ MapQueryParams: e.MapQueryParams,
+ }
+ queryData = append(queryData, mapQueryParam)
+ }
+ if serverBodyData != nil {
+ sd.ServerTypeNames[serverBodyData.Name] = false
+ sd.ClientTypeNames[serverBodyData.Name] = false
+ }
+ for _, p := range paramsData {
+ if p.Validate != "" || needConversion(p.Type) {
+ mustValidate = true
+ break
+ }
+ }
+ if !mustValidate {
+ for _, q := range queryData {
+ if q.Validate != "" || q.Required || needConversion(q.Type) {
+ mustValidate = true
+ break
+ }
+ }
+ }
+ if !mustValidate {
+ for _, h := range headersData {
+ if h.Validate != "" || h.Required || needConversion(h.Type) {
+ mustValidate = true
+ break
+ }
+ }
+ }
+ }
+ request = &RequestData{
+ PathParams: paramsData,
+ QueryParams: queryData,
+ Headers: headersData,
+ ServerBody: serverBodyData,
+ ClientBody: clientBodyData,
+ MustValidate: mustValidate,
+ Multipart: e.MultipartRequest,
+ }
+ }
+
+ var init *InitData
+ if needInit(payload.Type) {
+ // generate constructor function to transform request body,
+ // params, and headers into the method payload type
+ var (
+ name string
+ desc string
+ isObject bool
+ clientArgs []*InitArgData
+ serverArgs []*InitArgData
+ )
+ n := codegen.Goify(ep.Name, true)
+ p := codegen.Goify(ep.Payload, true)
+ // Raw payload object has type name prefixed with endpoint name. No need to
+ // prefix the type name again.
+ if strings.HasPrefix(p, n) {
+ p = svc.Scope.HashedUnique(payload.Type, p)
+ name = fmt.Sprintf("New%s", p)
+ } else {
+ name = fmt.Sprintf("New%s%s", n, p)
+ }
+ desc = fmt.Sprintf("%s builds a %s service %s endpoint payload.",
+ name, svc.Name, e.Name())
+ isObject = expr.IsObject(payload.Type)
+ if body != expr.Empty {
+ var (
+ svcode string
+ cvcode string
+ )
+ if ut, ok := body.(expr.UserType); ok {
+ if val := ut.Attribute().Validation; val != nil {
+ svrBody := httpContext(ut.Attribute(), "", sd.Scope, true, true)
+ cliBody := httpContext(ut.Attribute(), "", sd.Scope, true, false)
+ svcode = codegen.RecursiveValidationCode(svrBody, "body")
+ cvcode = codegen.RecursiveValidationCode(cliBody, "body")
+ }
+ }
+ serverArgs = []*InitArgData{{
+ Name: "body",
+ Ref: sd.Scope.GoVar("body", body),
+ TypeName: sd.Scope.GoTypeName(&expr.AttributeExpr{Type: body}),
+ TypeRef: sd.Scope.GoTypeRef(&expr.AttributeExpr{Type: body}),
+ Required: true,
+ Example: e.Body.Example(expr.Root.API.Random()),
+ Validate: svcode,
+ }}
+ clientArgs = []*InitArgData{{
+ Name: "body",
+ Ref: sd.Scope.GoVar("body", body),
+ TypeName: sd.Scope.GoTypeName(&expr.AttributeExpr{Type: body}),
+ TypeRef: sd.Scope.GoTypeRef(&expr.AttributeExpr{Type: body}),
+ Required: true,
+ Example: e.Body.Example(expr.Root.API.Random()),
+ Validate: cvcode,
+ }}
+ }
+ var args []*InitArgData
+ for _, p := range request.PathParams {
+ args = append(args, &InitArgData{
+ Name: p.VarName,
+ Description: p.Description,
+ Ref: p.VarName,
+ FieldName: p.FieldName,
+ TypeName: p.TypeName,
+ TypeRef: p.TypeRef,
+ // special case for path params that are not
+ // pointers (because path params never are) but
+ // assigned to fields that are.
+ Pointer: !p.Required && !p.Pointer && payload.IsPrimitivePointer(p.Name, true),
+ Required: p.Required,
+ Validate: p.Validate,
+ Example: p.Example,
+ })
+ }
+ for _, p := range request.QueryParams {
+ args = append(args, &InitArgData{
+ Name: p.VarName,
+ Ref: p.VarName,
+ FieldName: p.FieldName,
+ TypeName: p.TypeName,
+ TypeRef: p.TypeRef,
+ Required: p.Required,
+ DefaultValue: p.DefaultValue,
+ Validate: p.Validate,
+ Example: p.Example,
+ })
+ }
+ for _, h := range request.Headers {
+ args = append(args, &InitArgData{
+ Name: h.VarName,
+ Ref: h.VarName,
+ FieldName: h.FieldName,
+ TypeName: h.TypeName,
+ TypeRef: h.TypeRef,
+ Required: h.Required,
+ DefaultValue: h.DefaultValue,
+ Validate: h.Validate,
+ Example: h.Example,
+ })
+ }
+ serverArgs = append(serverArgs, args...)
+ clientArgs = append(clientArgs, args...)
+
+ var (
+ cliArgs []*InitArgData
+ )
+ for _, r := range ep.Requirements {
+ done := false
+ for _, sc := range r.Schemes {
+ if sc.Type == "Basic" {
+ uatt := e.MethodExpr.Payload.Find(sc.UsernameAttr)
+ uarg := &InitArgData{
+ Name: sc.UsernameAttr,
+ FieldName: sc.UsernameField,
+ Description: uatt.Description,
+ Ref: sc.UsernameAttr,
+ Required: sc.UsernameRequired,
+ TypeName: svc.Scope.GoTypeName(uatt),
+ TypeRef: svc.Scope.GoTypeRef(uatt),
+ Pointer: sc.UsernamePointer,
+ Validate: codegen.RecursiveValidationCode(payloadCA.Dup(uatt, sc.UsernameRequired), sc.UsernameAttr),
+ Example: uatt.Example(expr.Root.API.Random()),
+ }
+ patt := e.MethodExpr.Payload.Find(sc.PasswordAttr)
+ parg := &InitArgData{
+ Name: sc.PasswordAttr,
+ FieldName: sc.PasswordField,
+ Description: patt.Description,
+ Ref: sc.PasswordAttr,
+ Required: sc.PasswordRequired,
+ TypeName: svc.Scope.GoTypeName(patt),
+ TypeRef: svc.Scope.GoTypeRef(patt),
+ Pointer: sc.PasswordPointer,
+ Validate: codegen.RecursiveValidationCode(payloadCA.Dup(patt, sc.PasswordRequired), sc.PasswordAttr),
+ Example: patt.Example(expr.Root.API.Random()),
+ }
+ cliArgs = []*InitArgData{uarg, parg}
+ done = true
+ break
+ }
+ }
+ if done {
+ break
+ }
+ }
+
+ var (
+ serverCode, clientCode string
+ err error
+ origin string
+
+ ca = payloadCA
+ )
+ if body != expr.Empty {
+ // If design uses Body("name") syntax then need to use payload
+ // attribute to transform.
+ if o, ok := e.Body.Meta["origin:attribute"]; ok {
+ origin = o[0]
+ pAtt := expr.AsObject(payload.Type).Attribute(origin)
+ ca = ca.Dup(pAtt, payload.IsRequired(origin))
+ }
+
+ var (
+ helpers []*codegen.TransformFunctionData
+ )
+ serverCode, helpers, err = unmarshal(svrBody, ca, "body", "v")
+ if err == nil {
+ sd.ServerTransformHelpers = codegen.AppendHelpers(sd.ServerTransformHelpers, helpers)
+ }
+ // The client code for building the method payload from
+ // a request body is used by the CLI tool to build the
+ // payload given to the client endpoint. It differs
+ // because the body type there does not use pointers for
+ // all fields (no need to validate).
+ clientCode, helpers, err = marshal(cliBody, ca, "body", "v")
+ if err == nil {
+ sd.ClientTransformHelpers = codegen.AppendHelpers(sd.ClientTransformHelpers, helpers)
+ }
+ } else if expr.IsArray(payload.Type) || expr.IsMap(payload.Type) {
+ if params := expr.AsObject(e.Params.Type); len(*params) > 0 {
+ var helpers []*codegen.TransformFunctionData
+ serverCode, helpers, err = unmarshal(
+ svrBody.Dup((*params)[0].Attribute, true),
+ payloadCA,
+ codegen.Goify((*params)[0].Name, false), "v")
+ if err == nil {
+ sd.ServerTransformHelpers = codegen.AppendHelpers(sd.ServerTransformHelpers, helpers)
+ }
+ clientCode, helpers, err = marshal(
+ cliBody.Dup((*params)[0].Attribute, true),
+ payloadCA,
+ codegen.Goify((*params)[0].Name, false), "v")
+ if err == nil {
+ sd.ClientTransformHelpers = codegen.AppendHelpers(sd.ClientTransformHelpers, helpers)
+ }
+ }
+ }
+ if err != nil {
+ fmt.Println(err.Error()) // TBD validate DSL so errors are not possible
+ }
+ init = &InitData{
+ Name: name,
+ Description: desc,
+ ServerArgs: serverArgs,
+ ClientArgs: clientArgs,
+ CLIArgs: cliArgs,
+ ReturnTypeName: svc.Scope.GoFullTypeName(payload, svc.PkgName),
+ ReturnTypeRef: svc.Scope.GoFullTypeRef(payload, svc.PkgName),
+ ReturnIsStruct: isObject,
+ ReturnTypeAttribute: codegen.Goify(origin, true),
+ ServerCode: serverCode,
+ ClientCode: clientCode,
+ }
+ }
+ request.PayloadInit = init
+
+ var (
+ returnValue string
+ name string
+ ref string
+ )
+ {
+ if payload.Type != expr.Empty {
+ name = svc.Scope.GoFullTypeName(payload, svc.PkgName)
+ ref = svc.Scope.GoFullTypeRef(payload, svc.PkgName)
+ }
+ if init == nil {
+ if o := expr.AsObject(e.Params.Type); o != nil && len(*o) > 0 {
+ returnValue = codegen.Goify((*o)[0].Name, false)
+ } else if o := expr.AsObject(e.Headers.Type); o != nil && len(*o) > 0 {
+ returnValue = codegen.Goify((*o)[0].Name, false)
+ } else if e.MapQueryParams != nil && *e.MapQueryParams == "" {
+ returnValue = mapQueryParam.Name
+ }
+ }
+ }
+
+ return &PayloadData{
+ Name: name,
+ Ref: ref,
+ Request: request,
+ DecoderReturnValue: returnValue,
+ }
+}
+
+// buildResultData builds the result data for the given service endpoint.
+func buildResultData(e *expr.HTTPEndpointExpr, sd *ServiceData) *ResultData {
+ var (
+ svc = sd.Service
+ ep = svc.Method(e.MethodExpr.Name)
+ result = e.MethodExpr.Result
+ resultCA = service.TypeContext(result, svc.PkgName, svc.Scope)
+
+ name string
+ ref string
+ view string
+ )
+ {
+ view = "default"
+ if result.Meta != nil {
+ if v, ok := result.Meta["view"]; ok {
+ view = v[0]
+ }
+ }
+ if result.Type != expr.Empty {
+ name = svc.Scope.GoFullTypeName(result, svc.PkgName)
+ ref = svc.Scope.GoFullTypeRef(result, svc.PkgName)
+ }
+ }
+
+ var (
+ mustInit bool
+ responses []*ResponseData
+ )
+ {
+ viewed := false
+ if ep.ViewedResult != nil {
+ result = expr.AsObject(ep.ViewedResult.Type).Attribute("projected")
+ resultCA = service.ProjectedTypeContext(result, svc.ViewsPkg, svc.ViewScope)
+ viewed = true
+ }
+ responses = buildResponses(e, resultCA, viewed, sd)
+ for _, r := range responses {
+ // response has a body or headers or tag
+ if len(r.ServerBody) > 0 || len(r.Headers) > 0 || r.TagName != "" {
+ mustInit = true
+ }
+ }
+ }
+ return &ResultData{
+ IsStruct: expr.IsObject(result.Type),
+ Name: name,
+ Ref: ref,
+ Responses: responses,
+ View: view,
+ MustInit: mustInit,
+ }
+}
+
+// buildResponses builds the response data for all the responses in the
+// endpoint expression. The response headers and body for each response
+// are inferred from the method's result expression if not specified
+// explicitly.
+//
+// resultCA is the result type contextual attribute. It can be a service
+// result type or a projected type (if result uses views).
+//
+// viewed parameter indicates if the method result uses views.
+func buildResponses(e *expr.HTTPEndpointExpr, resultCA *codegen.ContextualAttribute, viewed bool, sd *ServiceData) []*ResponseData {
+ var (
+ responses []*ResponseData
+ scope *codegen.NameScope
+
+ svc = sd.Service
+ md = svc.Method(e.Name())
+ result = resultCA.Attribute.Expr()
+ )
+ {
+ scope = svc.Scope
+ if viewed {
+ scope = svc.ViewScope
+ }
+ notag := -1
+ for i, resp := range e.Responses {
+ if resp.Tag[0] == "" {
+ if notag > -1 {
+ continue // we don't want more than one response with no tag
+ }
+ notag = i
+ }
+ var (
+ headersData []*HeaderData
+ serverBodyData []*TypeData
+ clientBodyData *TypeData
+ init *InitData
+ origin string
+ mustValidate bool
+
+ resCA = resultCA
+ resAttr = result
+ svrBody = httpContext(resp.Body, "", sd.Scope, false, true)
+ cliBody = httpContext(resp.Body, "", sd.Scope, false, false)
+ )
+ {
+ headersData = extractHeaders(resp.Headers, resultCA, scope)
+ if resp.Body.Type != expr.Empty {
+ // If design uses Body("name") syntax we need to use the
+ // corresponding attribute in the result type for body
+ // transformation.
+ if o, ok := resp.Body.Meta["origin:attribute"]; ok {
+ origin = o[0]
+ rAttr := expr.AsObject(resAttr.Type).Attribute(origin)
+ resCA = resCA.Dup(rAttr, resAttr.IsRequired(origin))
+ }
+ }
+ if viewed {
+ vname := ""
+ if origin != "" {
+ // Response body is explicitly set to an attribute in the method
+ // result type. No need to do any view-based projections server side.
+ if sbd := buildResponseBodyType(svrBody, resultCA, e, true, &vname, sd); sbd != nil {
+ serverBodyData = append(serverBodyData, sbd)
+ }
+ } else if v, ok := e.MethodExpr.Result.Meta["view"]; ok && len(v) > 0 {
+ // Design explicitly sets the view to render the result.
+ // We generate only one server body type which will be rendered
+ // using the specified view.
+ if sbd := buildResponseBodyType(svrBody, resultCA, e, true, &v[0], sd); sbd != nil {
+ serverBodyData = append(serverBodyData, sbd)
+ }
+ } else {
+ // If a method result uses views (i.e., a result type), we generate
+ // one response body type per view defined in the result type. The
+ // generated body type names are suffixed with the name of the view
+ // (except for "default" view). Constructors are also generated to
+ // create a view-specific body type from the method result. This
+ // makes it possible for the server side to return only the
+ // attributes defined in the view in the response (NOTE: a required
+ // attribute in the result type may not be present in all its views)
+ for _, view := range md.ViewedResult.Views {
+ if sbd := buildResponseBodyType(svrBody, resultCA, e, true, &view.Name, sd); sbd != nil {
+ serverBodyData = append(serverBodyData, sbd)
+ }
+ }
+ }
+ clientBodyData = buildResponseBodyType(cliBody, resultCA, e, false, &vname, sd)
+ } else {
+ if sbd := buildResponseBodyType(svrBody, resultCA, e, true, nil, sd); sbd != nil {
+ serverBodyData = append(serverBodyData, sbd)
+ }
+ clientBodyData = buildResponseBodyType(cliBody, resultCA, e, false, nil, sd)
+ }
+ if clientBodyData != nil {
+ sd.ClientTypeNames[clientBodyData.Name] = false
+ }
+ for _, h := range headersData {
+ if h.Validate != "" || h.Required || needConversion(h.Type) {
+ mustValidate = true
+ break
+ }
+ }
+ if needInit(result.Type) {
+ // generate constructor function to transform response body
+ // and headers into the method result type
+ var (
+ name string
+ desc string
+ code string
+ err error
+ pointer bool
+ clientArgs []*InitArgData
+ helpers []*codegen.TransformFunctionData
+ )
+ {
+ status := codegen.Goify(http.StatusText(resp.StatusCode), true)
+ n := codegen.Goify(md.Name, true)
+ r := codegen.Goify(md.Result, true)
+ // Raw result object has type name prefixed with endpoint name. No need to
+ // prefix the type name again.
+ if strings.HasPrefix(r, n) {
+ r = scope.HashedUnique(result.Type, r)
+ name = fmt.Sprintf("New%s%s", r, status)
+ } else {
+ name = fmt.Sprintf("New%s%s%s", n, r, status)
+ }
+ desc = fmt.Sprintf("%s builds a %q service %q endpoint result from a HTTP %q response.", name, svc.Name, e.Name(), status)
+ if resp.Body.Type != expr.Empty {
+ if origin != "" {
+ pointer = result.IsPrimitivePointer(origin, true)
+ }
+ ref := "body"
+ if expr.IsObject(resp.Body.Type) {
+ ref = "&body"
+ pointer = false
+ }
+ var vcode string
+ if ut, ok := resp.Body.Type.(expr.UserType); ok {
+ if val := ut.Attribute().Validation; val != nil {
+ vcode = codegen.RecursiveValidationCode(cliBody.Dup(ut.Attribute(), true), "body")
+ }
+ }
+ clientArgs = []*InitArgData{{
+ Name: "body",
+ Ref: ref,
+ TypeRef: sd.Scope.GoTypeRef(resp.Body),
+ Validate: vcode,
+ }}
+ // If the method result is a
+ // * result type - we unmarshal the client response body to the
+ // corresponding type in the views package so that view-specific
+ // validation logic can be applied.
+ // * user type - we unmarshal the client response body to the
+ // corresponding type in the service package after validating the
+ // response body. Here, the transformation code must rely that the
+ // required attributes are set in the response body (otherwise
+ // validation would fail).
+ code, helpers, err = unmarshal(cliBody, resCA, "body", "v")
+ if err == nil {
+ sd.ClientTransformHelpers = codegen.AppendHelpers(sd.ClientTransformHelpers, helpers)
+ }
+ } else if expr.IsArray(result.Type) || expr.IsMap(result.Type) {
+ if params := expr.AsObject(e.QueryParams().Type); len(*params) > 0 {
+ code, helpers, err = unmarshal(
+ cliBody.Dup((*params)[0].Attribute, true),
+ resultCA,
+ codegen.Goify((*params)[0].Name, false), "v")
+ if err == nil {
+ sd.ClientTransformHelpers = codegen.AppendHelpers(sd.ClientTransformHelpers, helpers)
+ }
+ }
+ }
+ if err != nil {
+ fmt.Println(err.Error()) // TBD validate DSL so errors are not possible
+ }
+ for _, h := range headersData {
+ clientArgs = append(clientArgs, &InitArgData{
+ Name: h.VarName,
+ Ref: h.VarName,
+ FieldName: h.FieldName,
+ TypeRef: h.TypeRef,
+ Validate: h.Validate,
+ Example: h.Example,
+ })
+ }
+ }
+ init = &InitData{
+ Name: name,
+ Description: desc,
+ ClientArgs: clientArgs,
+ ReturnTypeName: resultCA.Attribute.Name(),
+ ReturnTypeRef: resultCA.Attribute.Ref(),
+ ReturnIsStruct: expr.IsObject(result.Type),
+ ReturnTypeAttribute: codegen.Goify(origin, true),
+ ReturnIsPrimitivePointer: pointer,
+ ClientCode: code,
+ }
+ }
+
+ var (
+ tagName string
+ tagVal string
+ tagPtr bool
+ )
+ {
+ if resp.Tag[0] != "" {
+ tagName = codegen.Goify(resp.Tag[0], true)
+ tagVal = resp.Tag[1]
+ tagPtr = viewed || result.IsPrimitivePointer(resp.Tag[0], true)
+ }
+ }
+ responses = append(responses, &ResponseData{
+ StatusCode: statusCodeToHTTPConst(resp.StatusCode),
+ Description: resp.Description,
+ Headers: headersData,
+ ContentType: resp.ContentType,
+ ServerBody: serverBodyData,
+ ClientBody: clientBodyData,
+ ResultInit: init,
+ TagName: tagName,
+ TagValue: tagVal,
+ TagPointer: tagPtr,
+ MustValidate: mustValidate,
+ ResultAttr: codegen.Goify(origin, true),
+ ViewedResult: md.ViewedResult,
+ })
+ }
+ }
+ count := len(responses)
+ if notag >= 0 && notag < count-1 {
+ // Make sure tagless response is last
+ responses[notag], responses[count-1] = responses[count-1], responses[notag]
+ }
+ }
+ return responses
+}
+
+// buildErrorsData builds the error data for all the error responses in the
+// endpoint expression. The response headers and body for each response
+// are inferred from the method's error expression if not specified
+// explicitly.
+func buildErrorsData(e *expr.HTTPEndpointExpr, sd *ServiceData) []*ErrorGroupData {
+ var (
+ svc = sd.Service
+ )
+
+ data := make(map[string][]*ErrorData)
+ for _, v := range e.HTTPErrors {
+ var (
+ init *InitData
+ body = v.Response.Body.Type
+ errCA = service.TypeContext(v.ErrorExpr.AttributeExpr, svc.PkgName, svc.Scope)
+ svrBody = httpContext(v.Response.Body, "", sd.Scope, false, true)
+ cliBody = httpContext(v.Response.Body, "", sd.Scope, false, false)
+ )
+ if needInit(v.ErrorExpr.Type) {
+ var (
+ name string
+ desc string
+ isObject bool
+ args []*InitArgData
+ )
+ {
+ ep := svc.Method(e.MethodExpr.Name)
+ name = fmt.Sprintf("New%s%s", codegen.Goify(ep.Name, true), codegen.Goify(v.ErrorExpr.Name, true))
+ desc = fmt.Sprintf("%s builds a %s service %s endpoint %s error.",
+ name, svc.Name, e.Name(), v.ErrorExpr.Name)
+ if body != expr.Empty {
+ isObject = expr.IsObject(body)
+ ref := "body"
+ if isObject {
+ ref = "&body"
+ }
+ args = []*InitArgData{{
+ Name: "body",
+ Ref: ref,
+ TypeRef: sd.Scope.GoTypeRef(v.Response.Body),
+ }}
+ }
+ for _, h := range extractHeaders(v.Response.Headers, errCA, sd.Scope) {
+ args = append(args, &InitArgData{
+ Name: h.VarName,
+ Ref: h.VarName,
+ FieldName: h.FieldName,
+ TypeRef: h.TypeRef,
+ Validate: h.Validate,
+ Example: h.Example,
+ })
+ }
+ }
+
+ var (
+ code string
+ origin string
+ err error
+
+ herr = v.ErrorExpr
+ )
+ {
+ if body != expr.Empty {
+ // If design uses Body("name") syntax then need to use payload
+ // attribute to transform.
+ if o, ok := v.Response.Body.Meta["origin:attribute"]; ok {
+ origin = o[0]
+ eAtt := expr.AsObject(v.ErrorExpr.Type).Attribute(origin)
+ errCA = errCA.Dup(eAtt, v.ErrorExpr.IsRequired(origin))
+ }
+
+ var helpers []*codegen.TransformFunctionData
+ code, helpers, err = unmarshal(cliBody, errCA, "body", "v")
+ if err == nil {
+ sd.ClientTransformHelpers = codegen.AppendHelpers(sd.ClientTransformHelpers, helpers)
+ }
+ } else if expr.IsArray(herr.Type) || expr.IsMap(herr.Type) {
+ if params := expr.AsObject(e.QueryParams().Type); len(*params) > 0 {
+ var helpers []*codegen.TransformFunctionData
+ code, helpers, err = unmarshal(
+ cliBody.Dup((*params)[0].Attribute, true), errCA,
+ codegen.Goify((*params)[0].Name, false), "v")
+ if err == nil {
+ sd.ClientTransformHelpers = codegen.AppendHelpers(sd.ClientTransformHelpers, helpers)
+ }
+ }
+ }
+ if err != nil {
+ fmt.Println(err.Error()) // TBD validate DSL so errors are not possible
+ }
+ }
+
+ init = &InitData{
+ Name: name,
+ Description: desc,
+ ClientArgs: args,
+ ReturnTypeName: svc.Scope.GoFullTypeName(v.ErrorExpr.AttributeExpr, svc.PkgName),
+ ReturnTypeRef: svc.Scope.GoFullTypeRef(v.ErrorExpr.AttributeExpr, svc.PkgName),
+ ReturnIsStruct: isObject,
+ ReturnTypeAttribute: codegen.Goify(origin, true),
+ ClientCode: code,
+ }
+ }
+
+ var (
+ responseData *ResponseData
+ )
+ {
+ var (
+ serverBodyData []*TypeData
+ clientBodyData *TypeData
+ )
+ {
+ if sbd := buildResponseBodyType(svrBody, errCA, e, true, nil, sd); sbd != nil {
+ serverBodyData = append(serverBodyData, sbd)
+ }
+ clientBodyData = buildResponseBodyType(cliBody, errCA, e, false, nil, sd)
+ if clientBodyData != nil {
+ sd.ClientTypeNames[clientBodyData.Name] = false
+ clientBodyData.Description = fmt.Sprintf("%s is the type of the %q service %q endpoint HTTP response body for the %q error.",
+ clientBodyData.VarName, svc.Name, e.Name(), v.Name)
+ serverBodyData[0].Description = fmt.Sprintf("%s is the type of the %q service %q endpoint HTTP response body for the %q error.",
+ serverBodyData[0].VarName, svc.Name, e.Name(), v.Name)
+ }
+ }
+
+ headers := extractHeaders(v.Response.Headers, errCA, sd.Scope)
+ responseData = &ResponseData{
+ StatusCode: statusCodeToHTTPConst(v.Response.StatusCode),
+ Headers: headers,
+ ErrorHeader: v.Name,
+ ServerBody: serverBodyData,
+ ClientBody: clientBodyData,
+ ResultInit: init,
+ }
+ }
+
+ ref := svc.Scope.GoFullTypeRef(v.ErrorExpr.AttributeExpr, svc.PkgName)
+ data[ref] = append(data[ref], &ErrorData{
+ Name: v.Name,
+ Response: responseData,
+ Ref: ref,
+ })
+ }
+ keys := make([]string, len(data))
+ i := 0
+ for k := range data {
+ keys[i] = k
+ i++
+ }
+ sort.Strings(keys)
+ var vals []*ErrorGroupData
+ for _, k := range keys {
+ es := data[k]
+ for _, e := range es {
+ found := false
+ for _, eg := range vals {
+ if eg.StatusCode == e.Response.StatusCode {
+ eg.Errors = append(eg.Errors, e)
+ found = true
+ break
+ }
+ }
+ if !found {
+ vals = append(vals,
+ &ErrorGroupData{
+ StatusCode: e.Response.StatusCode,
+ Errors: []*ErrorData{e},
+ })
+ }
+ }
+ }
+ return vals
+}
+
+func buildStreamData(ed *EndpointData, e *expr.HTTPEndpointExpr, sd *ServiceData) {
+ if !e.MethodExpr.IsStreaming() {
+ return
+ }
+ var (
+ svrSendTypeName string
+ svrSendTypeRef string
+ svrRecvTypeName string
+ svrRecvTypeRef string
+ svrSendDesc string
+ svrRecvDesc string
+ svrPayload *TypeData
+ cliSendDesc string
+ cliRecvDesc string
+ cliPayload *TypeData
+
+ m = e.MethodExpr
+ md = ed.Method
+ svc = sd.Service
+ spayload = m.StreamingPayload
+ spayloadCA = service.TypeContext(spayload, svc.PkgName, svc.Scope)
+ )
+ {
+ svrSendTypeName = ed.Result.Name
+ svrSendTypeRef = ed.Result.Ref
+ svrSendDesc = fmt.Sprintf("%s streams instances of %q to the %q endpoint websocket connection.", md.ServerStream.SendName, svrSendTypeName, md.Name)
+ cliRecvDesc = fmt.Sprintf("%s reads instances of %q from the %q endpoint websocket connection.", md.ClientStream.RecvName, svrSendTypeName, md.Name)
+ if e.MethodExpr.Stream == expr.ClientStreamKind || e.MethodExpr.Stream == expr.BidirectionalStreamKind {
+ svrRecvTypeName = sd.Scope.GoFullTypeName(e.MethodExpr.StreamingPayload, svc.PkgName)
+ svrRecvTypeRef = sd.Scope.GoFullTypeRef(e.MethodExpr.StreamingPayload, svc.PkgName)
+ svrBody := httpContext(e.StreamingBody, "", sd.Scope, true, true)
+ cliBody := httpContext(e.StreamingBody, "", sd.Scope, true, false)
+ svrPayload = buildRequestBodyType(svrBody, spayloadCA, e, true, sd)
+ if needInit(spayload.Type) {
+ body := e.StreamingBody.Type
+ // generate constructor function to transform request body,
+ // into the method streaming payload type
+ var (
+ name string
+ desc string
+ isObject bool
+ serverArgs []*InitArgData
+ serverCode string
+ err error
+ )
+ {
+ n := codegen.Goify(m.Name, true)
+ p := codegen.Goify(svrPayload.Name, true)
+ // Raw payload object has type name prefixed with endpoint name. No need to
+ // prefix the type name again.
+ if strings.HasPrefix(p, n) {
+ name = fmt.Sprintf("New%s", p)
+ } else {
+ name = fmt.Sprintf("New%s%s", n, p)
+ }
+ desc = fmt.Sprintf("%s builds a %s service %s endpoint payload.", name, svc.Name, m.Name)
+ isObject = expr.IsObject(spayload.Type)
+ if body != expr.Empty {
+ var (
+ ref string
+ svcode string
+ )
+ {
+ ref = "body"
+ if expr.IsObject(body) {
+ ref = "&body"
+ }
+ if ut, ok := body.(expr.UserType); ok {
+ if val := ut.Attribute().Validation; val != nil {
+ svcode = codegen.RecursiveValidationCode(svrBody.Dup(ut.Attribute(), true), "body")
+ }
+ }
+ }
+ serverArgs = []*InitArgData{{
+ Name: "body",
+ Ref: ref,
+ TypeName: sd.Scope.GoTypeName(e.StreamingBody),
+ TypeRef: sd.Scope.GoTypeRef(e.StreamingBody),
+ Required: true,
+ Example: e.Body.Example(expr.Root.API.Random()),
+ Validate: svcode,
+ }}
+ }
+ if body != expr.Empty {
+ var helpers []*codegen.TransformFunctionData
+ serverCode, helpers, err = marshal(cliBody, spayloadCA, "body", "v")
+ if err == nil {
+ sd.ServerTransformHelpers = codegen.AppendHelpers(sd.ServerTransformHelpers, helpers)
+ }
+ }
+ if err != nil {
+ fmt.Println(err.Error()) // TBD validate DSL so errors are not possible
+ }
+ }
+ svrPayload.Init = &InitData{
+ Name: name,
+ Description: desc,
+ ServerArgs: serverArgs,
+ ReturnTypeName: svc.Scope.GoFullTypeName(spayload, svc.PkgName),
+ ReturnTypeRef: svc.Scope.GoFullTypeRef(spayload, svc.PkgName),
+ ReturnIsStruct: isObject,
+ ServerCode: serverCode,
+ }
+ }
+ cliPayload = buildRequestBodyType(cliBody, spayloadCA, e, false, sd)
+ if cliPayload != nil {
+ sd.ClientTypeNames[cliPayload.Name] = false
+ sd.ServerTypeNames[cliPayload.Name] = false
+ }
+ if e.MethodExpr.Stream == expr.ClientStreamKind {
+ svrSendDesc = fmt.Sprintf("%s streams instances of %q to the %q endpoint websocket connection and closes the connection.", md.ServerStream.SendName, svrSendTypeName, md.Name)
+ cliRecvDesc = fmt.Sprintf("%s stops sending messages to the %q endpoint websocket connection and reads instances of %q from the connection.", md.ClientStream.RecvName, md.Name, svrSendTypeName)
+ }
+ svrRecvDesc = fmt.Sprintf("%s reads instances of %q from the %q endpoint websocket connection.", md.ServerStream.RecvName, svrRecvTypeName, md.Name)
+ cliSendDesc = fmt.Sprintf("%s streams instances of %q to the %q endpoint websocket connection.", md.ClientStream.SendName, svrRecvTypeName, md.Name)
+ }
+ }
+ ed.ServerStream = &StreamData{
+ VarName: md.ServerStream.VarName,
+ Interface: fmt.Sprintf("%s.%s", svc.PkgName, md.ServerStream.Interface),
+ Endpoint: ed,
+ Payload: svrPayload,
+ Response: ed.Result.Responses[0],
+ PkgName: svc.PkgName,
+ Type: "server",
+ Kind: md.ServerStream.Kind,
+ SendName: md.ServerStream.SendName,
+ SendDesc: svrSendDesc,
+ SendTypeName: svrSendTypeName,
+ SendTypeRef: svrSendTypeRef,
+ RecvName: md.ServerStream.RecvName,
+ RecvDesc: svrRecvDesc,
+ RecvTypeName: svrRecvTypeName,
+ RecvTypeRef: svrRecvTypeRef,
+ MustClose: md.ServerStream.MustClose,
+ }
+ ed.ClientStream = &StreamData{
+ VarName: md.ClientStream.VarName,
+ Interface: fmt.Sprintf("%s.%s", svc.PkgName, md.ClientStream.Interface),
+ Endpoint: ed,
+ Payload: cliPayload,
+ Response: ed.Result.Responses[0],
+ PkgName: svc.PkgName,
+ Type: "client",
+ Kind: md.ClientStream.Kind,
+ SendName: md.ClientStream.SendName,
+ SendDesc: cliSendDesc,
+ SendTypeName: svrRecvTypeName,
+ SendTypeRef: svrRecvTypeRef,
+ RecvName: md.ClientStream.RecvName,
+ RecvDesc: cliRecvDesc,
+ RecvTypeName: svrSendTypeName,
+ RecvTypeRef: svrSendTypeRef,
+ MustClose: md.ClientStream.MustClose,
+ }
+}
+
+// buildRequestBodyType builds the TypeData for a request body. The data makes
+// it possible to generate a function on the client side that creates the body
+// from the service method payload.
+//
+// bodyCA is the HTTP request body context
+//
+// attCA is the payload attribute context
+//
+// e is the HTTP endpoint expression
+//
+// svr is true if the function is generated for server side code.
+//
+// sd is the service data
+//
+func buildRequestBodyType(bodyCA, attCA *codegen.ContextualAttribute, e *expr.HTTPEndpointExpr, svr bool, sd *ServiceData) *TypeData {
+ body := bodyCA.Attribute.Expr()
+ att := attCA.Attribute.Expr()
+ if body.Type == expr.Empty {
+ return nil
+ }
+ var (
+ name string
+ varname string
+ desc string
+ def string
+ ref string
+ validateDef string
+ validateRef string
+
+ svc = sd.Service
+ )
+ {
+ name = body.Type.Name()
+ ref = sd.Scope.GoTypeRef(body)
+ if ut, ok := body.Type.(expr.UserType); ok {
+ varname = codegen.Goify(ut.Name(), true)
+ def = bodyCA.Dup(ut.Attribute(), true).Def()
+ desc = fmt.Sprintf("%s is the type of the %q service %q endpoint HTTP request body.",
+ varname, svc.Name, e.Name())
+ if svr {
+ // generate validation code for unmarshaled type (server-side).
+ validateDef = codegen.RecursiveValidationCode(bodyCA.Dup(ut.Attribute(), true), "body")
+ if validateDef != "" {
+ validateRef = fmt.Sprintf("err = Validate%s(&body)", varname)
+ }
+ }
+ } else {
+ varname = sd.Scope.GoTypeRef(body)
+ validateRef = codegen.RecursiveValidationCode(bodyCA, "body")
+ desc = body.Description
+ }
+ }
+ var init *InitData
+ {
+ if !svr && att.Type != expr.Empty && needInit(body.Type) {
+ var (
+ name string
+ desc string
+ code string
+ origin string
+ err error
+ helpers []*codegen.TransformFunctionData
+
+ sourceVar = "p"
+ svc = sd.Service
+ ca = attCA
+ )
+ {
+ name = fmt.Sprintf("New%s", codegen.Goify(sd.Scope.GoTypeName(body), true))
+ desc = fmt.Sprintf("%s builds the HTTP request body from the payload of the %q endpoint of the %q service.",
+ name, e.Name(), svc.Name)
+ src := sourceVar
+ // If design uses Body("name") syntax then need to use payload attribute
+ // to transform.
+ if o, ok := body.Meta["origin:attribute"]; ok {
+ srcObj := expr.AsObject(att.Type)
+ origin = o[0]
+ srcAtt := srcObj.Attribute(origin)
+ ca = ca.Dup(srcAtt, att.IsRequired(origin))
+ src += "." + codegen.Goify(origin, true)
+ }
+ code, helpers, err = marshal(ca, bodyCA, src, "body")
+ if err != nil {
+ fmt.Println(err.Error()) // TBD validate DSL so errors are not possible
+ }
+ sd.ClientTransformHelpers = codegen.AppendHelpers(sd.ClientTransformHelpers, helpers)
+ }
+ arg := InitArgData{
+ Name: sourceVar,
+ Ref: sourceVar,
+ TypeRef: attCA.Attribute.Ref(),
+ Validate: validateDef,
+ Example: att.Example(expr.Root.API.Random()),
+ }
+ init = &InitData{
+ Name: name,
+ Description: desc,
+ ReturnTypeRef: sd.Scope.GoTypeRef(body),
+ ReturnTypeAttribute: codegen.Goify(origin, true),
+ ClientCode: code,
+ ClientArgs: []*InitArgData{&arg},
+ }
+ }
+ }
+ return &TypeData{
+ Name: name,
+ VarName: varname,
+ Description: desc,
+ Def: def,
+ Ref: ref,
+ Init: init,
+ ValidateDef: validateDef,
+ ValidateRef: validateRef,
+ Example: body.Example(expr.Root.API.Random()),
+ }
+}
+
+// buildResponseBodyType builds the TypeData for a response body. The data
+// makes it possible to generate a function that creates the server response
+// body from the service method result/projected result or error.
+//
+// bodyCA is the response (success or error) HTTP body context.
+//
+// attCA is the result/projected type context.
+//
+// svr is true if the function is generated for server side code
+//
+// view is the view name to add as a suffix to the type name.
+//
+func buildResponseBodyType(bodyCA, attCA *codegen.ContextualAttribute, e *expr.HTTPEndpointExpr, svr bool, view *string, sd *ServiceData) *TypeData {
+ body := bodyCA.Attribute.Expr()
+ att := attCA.Attribute.Expr()
+ if body.Type == expr.Empty {
+ return nil
+ }
+ var (
+ name string
+ varname string
+ desc string
+ def string
+ ref string
+ validateDef string
+ validateRef string
+ viewName string
+ mustInit bool
+
+ svc = sd.Service
+ )
+ {
+ // For server code, we project the response body type if the type is a result
+ // type and generate a type for each view in the result type. This makes it
+ // possible to return only the attributes in the view in the server response.
+ if svr && view != nil && *view != "" {
+ viewName = *view
+ body = expr.DupAtt(body)
+ if rt, ok := body.Type.(*expr.ResultTypeExpr); ok {
+ var err error
+ rt, err = expr.Project(rt, *view)
+ if err != nil {
+ panic(err)
+ }
+ body.Type = rt
+ sd.ServerTypeNames[rt.Name()] = false
+ }
+ bodyCA = bodyCA.Dup(body, bodyCA.Required)
+ }
+
+ name = body.Type.Name()
+ ref = sd.Scope.GoTypeRef(body)
+ mustInit = att.Type != expr.Empty && needInit(body.Type)
+
+ if ut, ok := body.Type.(expr.UserType); ok {
+ // response body is a user type.
+ varname = codegen.Goify(ut.Name(), true)
+ def = bodyCA.Dup(ut.Attribute(), true).Def()
+ desc = fmt.Sprintf("%s is the type of the %q service %q endpoint HTTP response body.",
+ varname, svc.Name, e.Name())
+ if !svr && view == nil {
+ // generate validation code for unmarshaled type (client-side).
+ validateDef = codegen.RecursiveValidationCode(bodyCA, "body")
+ if validateDef != "" {
+ validateRef = fmt.Sprintf("err = Validate%s(&body)", varname)
+ }
+ }
+ } else if !expr.IsPrimitive(body.Type) && mustInit {
+ // response body is an array or map type.
+ name = codegen.Goify(e.Name(), true) + "ResponseBody"
+ varname = name
+ desc = fmt.Sprintf("%s is the type of the %q service %q endpoint HTTP response body.",
+ varname, svc.Name, e.Name())
+ def = bodyCA.Def()
+ validateRef = codegen.RecursiveValidationCode(bodyCA, "body")
+ } else {
+ // response body is a primitive type.
+ varname = sd.Scope.GoTypeRef(body)
+ validateRef = codegen.RecursiveValidationCode(bodyCA, "body")
+ desc = body.Description
+ }
+ }
+ if svr {
+ sd.ServerTypeNames[name] = false
+ // We collect the server body types need to generate a response body type
+ // here because the response body type would be different from the actual
+ // type in the HTTPResponseExpr since we projected the body type above.
+ // For client side, we don't have to generate a separate body type per
+ // view. Hence the client types are collected in "analyze" function.
+ collectUserTypes(body.Type, func(ut expr.UserType) {
+ if d := attributeTypeData(httpTypeContext(ut, "", sd.Scope, false, true), false, true, sd); d != nil {
+ sd.ServerBodyAttributeTypes = append(sd.ServerBodyAttributeTypes, d)
+ }
+ })
+
+ }
+
+ var init *InitData
+ {
+ if svr && mustInit {
+ var (
+ name string
+ desc string
+ code string
+ origin string
+ err error
+ helpers []*codegen.TransformFunctionData
+
+ sourceVar = "res"
+ svc = sd.Service
+ ca = attCA
+ )
+ {
+ name = fmt.Sprintf("New%s", codegen.Goify(sd.Scope.GoTypeName(body), true))
+ desc = fmt.Sprintf("%s builds the HTTP response body from the result of the %q endpoint of the %q service.",
+ name, e.Name(), svc.Name)
+ src := sourceVar
+ // If design uses Body("name") syntax then need to use result attribute
+ // to transform.
+ if o, ok := body.Meta["origin:attribute"]; ok {
+ srcObj := expr.AsObject(att.Type)
+ origin = o[0]
+ srcAtt := srcObj.Attribute(origin)
+ ca = ca.Dup(srcAtt, att.IsRequired(origin))
+ src += "." + codegen.Goify(origin, true)
+ }
+ code, helpers, err = marshal(ca, bodyCA, src, "body")
+ if err != nil {
+ fmt.Println(err.Error()) // TBD validate DSL so errors are not possible
+ }
+ sd.ServerTransformHelpers = codegen.AppendHelpers(sd.ServerTransformHelpers, helpers)
+ }
+ ref := sourceVar
+ if view != nil {
+ ref += ".Projected"
+ }
+ arg := InitArgData{
+ Name: sourceVar,
+ Ref: ref,
+ TypeRef: attCA.Attribute.Ref(),
+ Validate: validateDef,
+ Example: att.Example(expr.Root.API.Random()),
+ }
+ init = &InitData{
+ Name: name,
+ Description: desc,
+ ReturnTypeRef: sd.Scope.GoTypeRef(body),
+ ReturnTypeAttribute: codegen.Goify(origin, true),
+ ServerCode: code,
+ ServerArgs: []*InitArgData{&arg},
+ }
+ }
+ }
+ return &TypeData{
+ Name: name,
+ VarName: varname,
+ Description: desc,
+ Def: def,
+ Ref: ref,
+ Init: init,
+ ValidateDef: validateDef,
+ ValidateRef: validateRef,
+ Example: body.Example(expr.Root.API.Random()),
+ View: viewName,
+ }
+}
+
+func extractPathParams(a *expr.MappedAttributeExpr, serviceCA *codegen.ContextualAttribute, scope *codegen.NameScope) []*ParamData {
+ var params []*ParamData
+ codegen.WalkMappedAttr(a, func(name, elem string, required bool, c *expr.AttributeExpr) error {
+ var (
+ varn = scope.Unique(codegen.Goify(name, false))
+ arr = expr.AsArray(c.Type)
+ ca = serviceCA.Dup(c, true)
+ )
+ fieldName := codegen.Goify(name, true)
+ if !expr.IsObject(serviceCA.Attribute.Expr().Type) {
+ fieldName = ""
+ }
+ params = append(params, &ParamData{
+ Name: elem,
+ AttributeName: name,
+ Description: c.Description,
+ FieldName: fieldName,
+ VarName: varn,
+ Required: required,
+ Type: c.Type,
+ TypeName: scope.GoTypeName(c),
+ TypeRef: scope.GoTypeRef(c),
+ Pointer: false,
+ Slice: arr != nil,
+ StringSlice: arr != nil && arr.ElemType.Type.Kind() == expr.StringKind,
+ Map: false,
+ MapStringSlice: false,
+ Validate: codegen.RecursiveValidationCode(ca, varn),
+ DefaultValue: c.DefaultValue,
+ Example: c.Example(expr.Root.API.Random()),
+ })
+ return nil
+ })
+
+ return params
+}
+
+func extractQueryParams(a *expr.MappedAttributeExpr, serviceCA *codegen.ContextualAttribute, scope *codegen.NameScope) []*ParamData {
+ var params []*ParamData
+ codegen.WalkMappedAttr(a, func(name, elem string, required bool, c *expr.AttributeExpr) error {
+ var (
+ varn = scope.Unique(codegen.Goify(name, false))
+ arr = expr.AsArray(c.Type)
+ mp = expr.AsMap(c.Type)
+ typeRef = scope.GoTypeRef(c)
+ ca = serviceCA.Dup(c, required)
+
+ pointer bool
+ )
+ if pointer = a.IsPrimitivePointer(name, true); pointer {
+ typeRef = "*" + typeRef
+ }
+ fieldName := codegen.Goify(name, true)
+ if !expr.IsObject(serviceCA.Attribute.Expr().Type) {
+ fieldName = ""
+ }
+ params = append(params, &ParamData{
+ Name: elem,
+ AttributeName: name,
+ Description: c.Description,
+ FieldName: fieldName,
+ VarName: varn,
+ Required: required,
+ Type: c.Type,
+ TypeName: scope.GoTypeName(c),
+ TypeRef: typeRef,
+ Pointer: pointer,
+ Slice: arr != nil,
+ StringSlice: arr != nil && arr.ElemType.Type.Kind() == expr.StringKind,
+ Map: mp != nil,
+ MapStringSlice: mp != nil &&
+ mp.KeyType.Type.Kind() == expr.StringKind &&
+ mp.ElemType.Type.Kind() == expr.ArrayKind &&
+ expr.AsArray(mp.ElemType.Type).ElemType.Type.Kind() == expr.StringKind,
+ Validate: codegen.RecursiveValidationCode(ca, varn),
+ DefaultValue: c.DefaultValue,
+ Example: c.Example(expr.Root.API.Random()),
+ })
+ return nil
+ })
+
+ return params
+}
+
+func extractHeaders(a *expr.MappedAttributeExpr, serviceCA *codegen.ContextualAttribute, scope *codegen.NameScope) []*HeaderData {
+ var headers []*HeaderData
+ codegen.WalkMappedAttr(a, func(name, elem string, _ bool, _ *expr.AttributeExpr) error {
+ var (
+ hattr *expr.AttributeExpr
+ hattrCA *codegen.ContextualAttribute
+ required bool
+
+ svcAtt = serviceCA.Attribute.Expr()
+ )
+ {
+ required = svcAtt.IsRequired(name)
+ if hattr = svcAtt.Find(name); hattr == nil {
+ required = true
+ hattr = svcAtt
+ }
+ hattrCA = serviceCA.Dup(hattr, required)
+ }
+ var (
+ varn = scope.Unique(codegen.Goify(name, false))
+ arr = expr.AsArray(hattr.Type)
+ typeRef = scope.GoTypeRef(hattr)
+
+ fieldName string
+ pointer bool
+ )
+ {
+ if pointer = hattrCA.IsPointer() && expr.IsPrimitive(hattr.Type); pointer {
+ typeRef = "*" + typeRef
+ }
+ fieldName = codegen.Goify(name, true)
+ if !expr.IsObject(svcAtt.Type) {
+ fieldName = ""
+ }
+ }
+ headers = append(headers, &HeaderData{
+ Name: elem,
+ AttributeName: name,
+ Description: hattr.Description,
+ CanonicalName: http.CanonicalHeaderKey(elem),
+ FieldName: fieldName,
+ VarName: varn,
+ TypeName: scope.GoTypeName(hattr),
+ TypeRef: typeRef,
+ Required: required,
+ Pointer: pointer,
+ Slice: arr != nil,
+ StringSlice: arr != nil && arr.ElemType.Type.Kind() == expr.StringKind,
+ Type: hattr.Type,
+ Validate: codegen.RecursiveValidationCode(hattrCA, varn),
+ DefaultValue: hattrCA.DefaultValue(),
+ Example: hattr.Example(expr.Root.API.Random()),
+ })
+ return nil
+ })
+ return headers
+}
+
+// collectUserTypes traverses the given data type recursively and calls back the
+// given function for each attribute using a user type.
+func collectUserTypes(dt expr.DataType, cb func(expr.UserType), seen ...map[string]struct{}) {
+ if dt == expr.Empty {
+ return
+ }
+ var s map[string]struct{}
+ if len(seen) > 0 {
+ s = seen[0]
+ } else {
+ s = make(map[string]struct{})
+ }
+ switch actual := dt.(type) {
+ case *expr.Object:
+ for _, nat := range *actual {
+ collectUserTypes(nat.Attribute.Type, cb, seen...)
+ }
+ case *expr.Array:
+ collectUserTypes(actual.ElemType.Type, cb, seen...)
+ case *expr.Map:
+ collectUserTypes(actual.KeyType.Type, cb, seen...)
+ collectUserTypes(actual.ElemType.Type, cb, seen...)
+ case expr.UserType:
+ if _, ok := s[actual.ID()]; ok {
+ return
+ }
+ s[actual.ID()] = struct{}{}
+ cb(actual)
+ collectUserTypes(actual.Attribute().Type, cb, s)
+ }
+}
+
+func attributeTypeData(attCA *codegen.ContextualAttribute, req, server bool, rd *ServiceData) *TypeData {
+ att := attCA.Attribute.Expr()
+ ut := att.Type.(expr.UserType)
+ if ut == expr.Empty {
+ return nil
+ }
+ seen := rd.ServerTypeNames
+ if !server {
+ seen = rd.ClientTypeNames
+ }
+ if _, ok := seen[ut.Name()]; ok {
+ return nil
+ }
+ seen[ut.Name()] = false
+
+ var (
+ name string
+ desc string
+ validate string
+ validateRef string
+
+ ca = attCA.Dup(ut.Attribute(), true)
+ )
+ {
+ name = attCA.Attribute.Name()
+ ctx := "request"
+ if !req {
+ ctx = "response"
+ }
+ desc = name + " is used to define fields on " + ctx + " body types."
+
+ validate = codegen.RecursiveValidationCode(ca, "body")
+ if validate != "" {
+ validateRef = fmt.Sprintf("err = Validate%s(v)", name)
+ }
+ }
+ return &TypeData{
+ Name: ut.Name(),
+ VarName: name,
+ Description: desc,
+ Def: ca.Def(),
+ Ref: attCA.Attribute.Ref(),
+ ValidateDef: validate,
+ ValidateRef: validateRef,
+ Example: att.Example(expr.Root.API.Random()),
+ }
+}
+
+// httpAttribute implements the Attributor interface that produces Go code.
+// It overrides the Definer interface to produce type definition with
+// encoding tags.
+type httpAttribute struct {
+ *codegen.GoAttribute
+}
+
+// Dup creates a copy of GoAttribute by setting the underlying attribute
+// expression.
+func (h *httpAttribute) Dup(att *expr.AttributeExpr) codegen.Attributor {
+ return &httpAttribute{
+ GoAttribute: h.GoAttribute.Dup(att).(*codegen.GoAttribute),
+ }
+}
+
+// Def returns a valid Go definition for the attribute.
+func (h *httpAttribute) Def(pointer, useDefault bool) string {
+ return goTypeDef(h.NameScope, h.Attribute, pointer, useDefault)
+}
+
+// httpTypeContext returns a contextual attribute for HTTP types (body,
+// params, headers).
+//
+// typ is the type for which the context is applied
+//
+// pkg is the package name where the body type exists
+//
+// scope is the named scope
+//
+// request if true indicates that the type is a request type, else response
+// type
+//
+// svr if true indicates that the type is a server type, else client type
+//
+func httpTypeContext(typ expr.DataType, pkg string, scope *codegen.NameScope, request, svr bool) *codegen.ContextualAttribute {
+ return httpContext(&expr.AttributeExpr{Type: typ}, pkg, scope, request, svr)
+}
+
+// httpContext returns a Go contextual attribute for HTTP attributes.
+//
+// att is the attribute for which the context is applied
+//
+// pkg is the package name where the body type exists
+//
+// scope is the named scope
+//
+// request if true indicates that the type is a request type, else response
+// type
+//
+// svr if true indicates that the type is a server type, else client type
+//
+func httpContext(att *expr.AttributeExpr, pkg string, scope *codegen.NameScope, request, svr bool) *codegen.ContextualAttribute {
+ marshal := !request && svr || request && !svr
+ return &codegen.ContextualAttribute{
+ Attribute: &httpAttribute{
+ GoAttribute: codegen.NewGoAttribute(att, pkg, scope).(*codegen.GoAttribute),
+ },
+ Pointer: !marshal,
+ UseDefault: marshal,
+ }
+}
+
+// unmarshal initializes a data structure defined by target type from a data
+// structure defined by source type. The attributes in the source data
+// structure are pointers and the attributes in the target data structure that
+// have default values are non-pointers. Fields in target type are initialized
+// with their default values (if any).
+//
+// source, target are the source and target contextual attributes used
+// in the transformation
+//
+// sourceVar, targetVar are the variable names for source and target used in
+// the transformation code
+//
+func unmarshal(source, target *codegen.ContextualAttribute, sourceVar, targetVar string) (string, []*codegen.TransformFunctionData, error) {
+ return codegen.GoTransform(source, target, sourceVar, targetVar, "unmarshal")
+}
+
+// marshal initializes a data structure defined by target type from a data
+// structure defined by source type. The fields in the source and target
+// data structure use non-pointers for attributes with default values.
+//
+// source, target are the source and target contextual attributes used
+// in the transformation
+//
+// sourceVar, targetVar are the variable names for source and target used in
+// the transformation code
+//
+func marshal(source, target *codegen.ContextualAttribute, sourceVar, targetVar string) (string, []*codegen.TransformFunctionData, error) {
+ return codegen.GoTransform(source, target, sourceVar, targetVar, "marshal")
+}
+
+func appendUnique(s []*service.SchemeData, d *service.SchemeData) []*service.SchemeData {
+ found := false
+ for _, se := range s {
+ if se.Name == d.Name {
+ found = true
+ break
+ }
+ }
+ if found {
+ return s
+ }
+ return append(s, d)
+}
+
+// needConversion returns true if the type needs to be converted from a string.
+func needConversion(dt expr.DataType) bool {
+ if dt == expr.Empty {
+ return false
+ }
+ switch actual := dt.(type) {
+ case expr.Primitive:
+ if actual.Kind() == expr.StringKind ||
+ actual.Kind() == expr.AnyKind ||
+ actual.Kind() == expr.BytesKind {
+ return false
+ }
+ return true
+ case *expr.Array:
+ return needConversion(actual.ElemType.Type)
+ case *expr.Map:
+ return needConversion(actual.KeyType.Type) ||
+ needConversion(actual.ElemType.Type)
+ default:
+ return true
+ }
+}
+
+// needInit returns true if and only if the given type is or makes use of user
+// types.
+func needInit(dt expr.DataType) bool {
+ if dt == expr.Empty {
+ return false
+ }
+ switch actual := dt.(type) {
+ case expr.Primitive:
+ return false
+ case *expr.Array:
+ return needInit(actual.ElemType.Type)
+ case *expr.Map:
+ return needInit(actual.KeyType.Type) ||
+ needInit(actual.ElemType.Type)
+ case *expr.Object:
+ for _, nat := range *actual {
+ if needInit(nat.Attribute.Type) {
+ return true
+ }
+ }
+ return false
+ case expr.UserType:
+ return true
+ default:
+ panic(fmt.Sprintf("unknown data type %T", actual)) // bug
+ }
+}
+
+// upgradeParams returns the data required to render the websocket_upgrade
+// template.
+func upgradeParams(e *EndpointData, fn string) map[string]interface{} {
+ return map[string]interface{}{
+ "ViewedResult": e.Method.ViewedResult,
+ "Function": fn,
+ }
+}
+
+const (
+ // pathInitT is the template used to render the code of path constructors.
+ pathInitT = `
+{{- if .Args }}
+ {{- range $i, $arg := .Args }}
+ {{- $typ := (index $.PathParams $i).Attribute.Type }}
+ {{- if eq $typ.Name "array" }}
+ {{ .Name }}Slice := make([]string, len({{ .Name }}))
+ for i, v := range {{ .Name }} {
+ {{ .Name }}Slice[i] = {{ template "slice_conversion" $typ.ElemType.Type.Name }}
+ }
+ {{- end }}
+ {{- end }}
+ return fmt.Sprintf("{{ .PathFormat }}", {{ range $i, $arg := .Args }}
+ {{- if eq (index $.PathParams $i).Attribute.Type.Name "array" }}strings.Join({{ .Name }}Slice, ", ")
+ {{- else }}{{ .Name }}
+ {{- end }}, {{ end }})
+{{- else }}
+ return "{{ .PathFormat }}"
+{{- end }}
+
+{{- define "slice_conversion" }}
+ {{- if eq . "string" }} url.QueryEscape(v)
+ {{- else if eq . "int" "int32" }} strconv.FormatInt(int64(v), 10)
+ {{- else if eq . "int64" }} strconv.FormatInt(v, 10)
+ {{- else if eq . "uint" "uint32" }} strconv.FormatUint(uint64(v), 10)
+ {{- else if eq . "uint64" }} strconv.FormatUint(v, 10)
+ {{- else if eq . "float32" }} strconv.FormatFloat(float64(v), 'f', -1, 32)
+ {{- else if eq . "float64" }} strconv.FormatFloat(v, 'f', -1, 64)
+ {{- else if eq . "boolean" }} strconv.FormatBool(v)
+ {{- else if eq . "bytes" }} url.QueryEscape(string(v))
+ {{- else }} url.QueryEscape(fmt.Sprintf("%v", v))
+ {{- end }}
+{{- end }}`
+
+ // requestInitT is the template used to render the code of HTTP
+ // request constructors.
+ requestInitT = `
+{{- if .PathInit.ClientArgs }}
+ var (
+ {{- range .PathInit.ClientArgs }}
+ {{ .Name }} {{ .TypeRef }}
+ {{- end }}
+ )
+{{- end }}
+{{- if and .PayloadRef .Args }}
+ {
+ p, ok := v.({{ .PayloadRef }})
+ if !ok {
+ return nil, goahttp.ErrInvalidType("{{ .ServiceName }}", "{{ .EndpointName }}", "{{ .PayloadRef }}", v)
+ }
+ {{- range .Args }}
+ {{- if .Pointer }}
+ if p{{ if $.HasFields }}.{{ .FieldName }}{{ end }} != nil {
+ {{- end }}
+ {{ .Name }} = {{ if .Pointer }}*{{ end }}p{{ if $.HasFields }}.{{ .FieldName }}{{ end }}
+ {{- if .Pointer }}
+ }
+ {{- end }}
+ {{- end }}
+ }
+{{- end }}
+ {{- if .IsStreaming }}
+ scheme := c.scheme
+ switch c.scheme {
+ case "http":
+ scheme = "ws"
+ case "https":
+ scheme = "wss"
+ }
+ {{- end }}
+ u := &url.URL{Scheme: {{ if .IsStreaming }}scheme{{ else }}c.scheme{{ end }}, Host: c.host, Path: {{ .PathInit.Name }}({{ range .PathInit.ClientArgs }}{{ .Ref }}, {{ end }})}
+ req, err := http.NewRequest("{{ .Verb }}", u.String(), nil)
+ if err != nil {
+ return nil, goahttp.ErrInvalidURL("{{ .ServiceName }}", "{{ .EndpointName }}", u.String(), err)
+ }
+ if ctx != nil {
+ req = req.WithContext(ctx)
+ }
+
+ return req, nil`
+
+ // streamStructTypeT renders the server and client struct types that
+ // implements the client and server stream interfaces. The data to render
+ // input: StreamData
+ streamStructTypeT = `{{ printf "%s implements the %s interface." .VarName .Interface | comment }}
+type {{ .VarName }} struct {
+{{- if eq .Type "server" }}
+ once sync.Once
+ {{ comment "upgrader is the websocket connection upgrader." }}
+ upgrader goahttp.Upgrader
+ {{ comment "connConfigFn is the websocket connection configurer." }}
+ connConfigFn goahttp.ConnConfigureFunc
+ {{ comment "w is the HTTP response writer used in upgrading the connection." }}
+ w http.ResponseWriter
+ {{ comment "r is the HTTP request." }}
+ r *http.Request
+{{- end }}
+ {{ comment "conn is the underlying websocket connection." }}
+ conn *websocket.Conn
+ {{- if .Endpoint.Method.ViewedResult }}
+ {{- if not .Endpoint.Method.ViewedResult.ViewName }}
+ {{ printf "view is the view to render %s result type before sending to the websocket connection." .SendTypeName | comment }}
+ view string
+ {{- end }}
+ {{- end }}
+}
+`
+
+ // streamSendT renders the function implementing the Send method in
+ // stream interface.
+ // input: StreamData
+ streamSendT = `{{ comment .SendDesc }}
+func (s *{{ .VarName }}) {{ .SendName }}(v {{ .SendTypeRef }}) error {
+{{- if eq .Type "server" }}
+ {{- if eq .SendName "Send" }}
+ var err error
+ {{- template "websocket_upgrade" (upgradeParams .Endpoint .SendName) }}
+ {{- else }} {{/* SendAndClose */}}
+ defer s.conn.Close()
+ {{- end }}
+ {{- if .Endpoint.Method.ViewedResult }}
+ {{- if .Endpoint.Method.ViewedResult.ViewName }}
+ res := {{ .PkgName }}.{{ .Endpoint.Method.ViewedResult.Init.Name }}(v, {{ printf "%q" .Endpoint.Method.ViewedResult.ViewName }})
+ {{- else }}
+ res := {{ .PkgName }}.{{ .Endpoint.Method.ViewedResult.Init.Name }}(v, s.view)
+ {{- end }}
+ {{- else }}
+ res := v
+ {{- end }}
+ {{- $servBodyLen := len .Response.ServerBody }}
+ {{- if gt $servBodyLen 0 }}
+ {{- if (index .Response.ServerBody 0).Init }}
+ {{- if .Endpoint.Method.ViewedResult }}
+ {{- if .Endpoint.Method.ViewedResult.ViewName }}
+ {{- $vsb := (viewedServerBody $.Response.ServerBody .Endpoint.Method.ViewedResult.ViewName) }}
+ body := {{ $vsb.Init.Name }}({{ range $vsb.Init.ServerArgs }}{{ .Ref }}, {{ end }})
+ {{- else }}
+ var body interface{}
+ switch s.view {
+ {{- range .Endpoint.Method.ViewedResult.Views }}
+ case {{ printf "%q" .Name }}{{ if eq .Name "default" }}, ""{{ end }}:
+ {{- $vsb := (viewedServerBody $.Response.ServerBody .Name) }}
+ body = {{ $vsb.Init.Name }}({{ range $vsb.Init.ServerArgs }}{{ .Ref }}, {{ end }})
+ {{- end }}
+ }
+ {{- end }}
+ {{- else }}
+ body := {{ (index .Response.ServerBody 0).Init.Name }}({{ range (index .Response.ServerBody 0).Init.ServerArgs }}{{ .Ref }}, {{ end }})
+ {{- end }}
+ return s.conn.WriteJSON(body)
+ {{- else }}
+ return s.conn.WriteJSON(res)
+ {{- end }}
+ {{- else }}
+ return s.conn.WriteJSON(res)
+ {{- end }}
+{{- else }}
+ {{- if .Payload.Init }}
+ body := {{ .Payload.Init.Name }}(v)
+ return s.conn.WriteJSON(body)
+ {{- else }}
+ return s.conn.WriteJSON(v)
+ {{- end }}
+{{- end }}
+}
+` + upgradeT
+
+ // streamRecvT renders the function implementing the Recv method in
+ // stream interface.
+ // input: StreamData
+ streamRecvT = `{{ comment .RecvDesc }}
+func (s *{{ .VarName }}) {{ .RecvName }}() ({{ .RecvTypeRef }}, error) {
+ var (
+ rv {{ .RecvTypeRef }}
+ {{- if eq .Type "server" }}
+ msg *{{ .Payload.Ref }}
+ {{- else }}
+ body {{ .Response.ClientBody.VarName }}
+ {{- end }}
+ err error
+ )
+{{- if eq .Type "server" }}
+ {{- template "websocket_upgrade" (upgradeParams .Endpoint .RecvName) }}
+ if err = s.conn.ReadJSON(&msg); err != nil {
+ return rv, err
+ }
+ if msg == nil {
+ return rv, io.EOF
+ }
+ body := *msg
+ {{- if .Payload.ValidateRef }}
+ {{ .Payload.ValidateRef }}
+ if err != nil {
+ return rv, err
+ }
+ {{- end }}
+ {{- if .Payload.Init }}
+ return {{ .Payload.Init.Name }}(body), nil
+ {{- else }}
+ return body, nil
+ {{- end }}
+{{- else }} {{/* client side code */}}
+ {{- if eq .RecvName "CloseAndRecv" }}
+ defer s.conn.Close()
+ {{ comment "Send a nil payload to the server implying end of message" }}
+ if err = s.conn.WriteJSON(nil); err != nil {
+ return rv, err
+ }
+ {{- end }}
+ err = s.conn.ReadJSON(&body)
+ if websocket.IsCloseError(err, websocket.CloseNormalClosure) {
+ {{- if not .MustClose }}
+ s.conn.Close()
+ {{- end }}
+ return rv, io.EOF
+ }
+ if err != nil {
+ return rv, err
+ }
+ {{- if and .Response.ClientBody.ValidateRef (not .Endpoint.Method.ViewedResult) }}
+ {{ .Response.ClientBody.ValidateRef }}
+ if err != nil {
+ return rv, err
+ }
+ {{- end }}
+ {{- if .Response.ResultInit }}
+ res := {{ .Response.ResultInit.Name }}({{ range .Response.ResultInit.ClientArgs }}{{ .Ref }},{{ end }})
+ {{- if .Endpoint.Method.ViewedResult }}{{ with .Endpoint.Method.ViewedResult }}
+ vres := {{ if not .IsCollection }}&{{ end }}{{ .ViewsPkg }}.{{ .VarName }}{res, {{ if .ViewName }}{{ printf "%q" .ViewName }}{{ else }}s.view{{ end }} }
+ if err := {{ .ViewsPkg }}.Validate{{ $.Endpoint.Method.Result }}(vres); err != nil {
+ return rv, goahttp.ErrValidationError("{{ $.Endpoint.ServiceName }}", "{{ $.Endpoint.Method.Name }}", err)
+ }
+ return {{ $.PkgName }}.{{ .ResultInit.Name }}(vres){{ end }}, nil
+ {{- else }}
+ return res, nil
+ {{- end }}
+ {{- else }}
+ return body, nil
+ {{- end }}
+{{- end }}
+}
+` + upgradeT
+
+ // upgradeT renders the code to upgrade the HTTP connection to a gorilla
+ // websocket connection.
+ upgradeT = `{{- define "websocket_upgrade" }}
+ {{ printf "Upgrade the HTTP connection to a websocket connection only once. Connection upgrade is done here so that authorization logic in the endpoint is executed before calling the actual service method which may call %s()." .Function | comment }}
+ s.once.Do(func() {
+ {{- if and .ViewedResult (eq .Function "Send") }}
+ {{- if not .ViewedResult.ViewName }}
+ respHdr := make(http.Header)
+ respHdr.Add("goa-view", s.view)
+ {{- end }}
+ {{- end }}
+ var conn *websocket.Conn
+ {{- if eq .Function "Send" }}
+ {{- if .ViewedResult }}
+ {{- if not .ViewedResult.ViewName }}
+ conn, err = s.upgrader.Upgrade(s.w, s.r, respHdr)
+ {{- else }}
+ conn, err = s.upgrader.Upgrade(s.w, s.r, nil)
+ {{- end }}
+ {{- else }}
+ conn, err = s.upgrader.Upgrade(s.w, s.r, nil)
+ {{- end }}
+ {{- else }}
+ conn, err = s.upgrader.Upgrade(s.w, s.r, nil)
+ {{- end }}
+ if err != nil {
+ return
+ }
+ if s.connConfigFn != nil {
+ conn = s.connConfigFn(conn)
+ }
+ s.conn = conn
+ })
+ if err != nil {
+ return {{ if eq .Function "Recv" }}rv, {{ end }}err
+ }
+{{- end }}
+`
+
+ // streamCloseT renders the function implementing the Close method in
+ // stream interface.
+ // input: StreamData
+ streamCloseT = `{{ printf "Close closes the %q endpoint websocket connection." .Endpoint.Method.Name | comment }}
+func (s *{{ .VarName }}) Close() error {
+ defer s.conn.Close()
+ var err error
+{{- if eq .Type "server" }}
+ {{- template "websocket_upgrade" (upgradeParams .Endpoint "Close") }}
+ if err = s.conn.WriteControl(
+ websocket.CloseMessage,
+ websocket.FormatCloseMessage(websocket.CloseNormalClosure, "server closing connection"),
+ time.Now().Add(time.Second),
+ ); err != nil {
+ return err
+ }
+{{- else }} {{/* client side code */}}
+ {{ comment "Send a nil payload to the server implying client closing connection." }}
+ if err = s.conn.WriteJSON(nil); err != nil {
+ return err
+ }
+{{- end }}
+ return nil
+}
+` + upgradeT
+
+ // streamSetViewT renders the function implementing the SetView method in
+ // server stream interface.
+ // input: StreamData
+ streamSetViewT = `{{ printf "SetView sets the view to render the %s type before sending to the %q endpoint websocket connection." .SendTypeName .Endpoint.Method.Name | comment }}
+func (s *{{ .VarName }}) SetView(view string) {
+ s.view = view
+}
+`
+)
diff --git a/vendor/goa.design/goa/http/codegen/testing.go b/vendor/goa.design/goa/http/codegen/testing.go
new file mode 100644
index 000000000..d788caf09
--- /dev/null
+++ b/vendor/goa.design/goa/http/codegen/testing.go
@@ -0,0 +1,31 @@
+package codegen
+
+import (
+ "os"
+ "testing"
+
+ "goa.design/goa/codegen/service"
+ "goa.design/goa/expr"
+)
+
+// RunHTTPDSL returns the HTTP DSL root resulting from running the given DSL.
+func RunHTTPDSL(t *testing.T, dsl func()) *expr.RootExpr {
+ // reset all roots and codegen data structures
+ service.Services = make(service.ServicesData)
+ HTTPServices = make(ServicesData)
+ return expr.RunHTTPDSL(t, dsl)
+}
+
+// makeGolden returns a file object used to write test expectations. If
+// makeGolden returns nil then the test should not generate test
+// expectations.
+func makeGolden(t *testing.T, p string) *os.File {
+ if os.Getenv("GOLDEN") == "" {
+ return nil
+ }
+ f, err := os.OpenFile(p, os.O_CREATE|os.O_WRONLY, 0600)
+ if err != nil {
+ t.Fatal(err)
+ }
+ return f
+}
diff --git a/vendor/goa.design/goa/http/codegen/typedef.go b/vendor/goa.design/goa/http/codegen/typedef.go
new file mode 100644
index 000000000..a4c310863
--- /dev/null
+++ b/vendor/goa.design/goa/http/codegen/typedef.go
@@ -0,0 +1,95 @@
+package codegen
+
+import (
+ "fmt"
+ "strings"
+
+ "goa.design/goa/codegen"
+ "goa.design/goa/expr"
+)
+
+// goTypeDef returns the Go code that defines the struct corresponding to ma.
+// It differs from the function defined in the codegen package in the following
+// ways:
+//
+// - It defines marshaler tags on each fields using the HTTP element names.
+//
+// - It produced fields with pointers even if the corresponding attribute is
+// required when ptr is true so that the generated code may validate
+// explicitly.
+//
+// useDefault directs whether fields holding primitive types with default values
+// should hold pointers when ptr is false. If it is true then the fields are
+// values even when not required (to account for the fact that they have a
+// default value so cannot be nil) otherwise the fields are values only when
+// required.
+func goTypeDef(scope *codegen.NameScope, att *expr.AttributeExpr, ptr, useDefault bool) string {
+ switch actual := att.Type.(type) {
+ case expr.Primitive:
+ return codegen.GoNativeTypeName(actual)
+ case *expr.Array:
+ d := goTypeDef(scope, actual.ElemType, ptr, useDefault)
+ if expr.IsObject(actual.ElemType.Type) {
+ d = "*" + d
+ }
+ return "[]" + d
+ case *expr.Map:
+ keyDef := goTypeDef(scope, actual.KeyType, ptr, useDefault)
+ if expr.IsObject(actual.KeyType.Type) {
+ keyDef = "*" + keyDef
+ }
+ elemDef := goTypeDef(scope, actual.ElemType, ptr, useDefault)
+ if expr.IsObject(actual.ElemType.Type) {
+ elemDef = "*" + elemDef
+ }
+ return fmt.Sprintf("map[%s]%s", keyDef, elemDef)
+ case *expr.Object:
+ var ss []string
+ ss = append(ss, "struct {")
+ ma := expr.NewMappedAttributeExpr(att)
+ mat := ma.Attribute()
+ codegen.WalkMappedAttr(ma, func(name, elem string, required bool, at *expr.AttributeExpr) error {
+ var (
+ fn string
+ tdef string
+ desc string
+ tags string
+ )
+ {
+ fn = codegen.GoifyAtt(at, name, true)
+ tdef = goTypeDef(scope, at, ptr, useDefault)
+ if expr.IsPrimitive(at.Type) {
+ if ptr || mat.IsPrimitivePointer(name, useDefault) {
+ tdef = "*" + tdef
+ }
+ } else if expr.IsObject(at.Type) {
+ tdef = "*" + tdef
+ }
+ if at.Description != "" {
+ desc = codegen.Comment(at.Description) + "\n\t"
+ }
+ tags = attributeTags(mat, at, elem, ptr || !ma.IsRequired(name))
+ }
+ ss = append(ss, fmt.Sprintf("\t%s%s %s%s", desc, fn, tdef, tags))
+ return nil
+ })
+ ss = append(ss, "}")
+ return strings.Join(ss, "\n")
+ case expr.UserType:
+ return scope.GoTypeName(att)
+ default:
+ panic(fmt.Sprintf("unknown data type %T", actual)) // bug
+ }
+}
+
+// attributeTags computes the struct field tags.
+func attributeTags(parent, att *expr.AttributeExpr, t string, optional bool) string {
+ if tags := codegen.AttributeTags(parent, att); tags != "" {
+ return tags
+ }
+ var o string
+ if optional {
+ o = ",omitempty"
+ }
+ return fmt.Sprintf(" `form:\"%s%s\" json:\"%s%s\" xml:\"%s%s\"`", t, o, t, o, t, o)
+}
diff --git a/vendor/goa.design/goa/http/encoding.go b/vendor/goa.design/goa/http/encoding.go
new file mode 100644
index 000000000..b7d08971a
--- /dev/null
+++ b/vendor/goa.design/goa/http/encoding.go
@@ -0,0 +1,238 @@
+package http
+
+import (
+ "bytes"
+ "context"
+ "encoding/gob"
+ "encoding/json"
+ "encoding/xml"
+ "io/ioutil"
+ "mime"
+ "net/http"
+ "strings"
+)
+
+const (
+ // AcceptTypeKey is the context key used to store the value of the HTTP
+ // request Accept-Type header. The value may be used by encoders and
+ // decoders to implement a content type negotiation algorithm.
+ AcceptTypeKey contextKey = iota + 1
+ // ContentTypeKey is the context key used to store the value of the HTTP
+ // response Content-Type header when explicitly set in the DSL. The value
+ // may be used by encoders to set the header appropriately.
+ ContentTypeKey
+)
+
+type (
+ // Decoder provides the actual decoding algorithm used to load HTTP
+ // request and response bodies.
+ Decoder interface {
+ // Decode decodes into v.
+ Decode(v interface{}) error
+ }
+
+ // Encoder provides the actual encoding algorithm used to write HTTP
+ // request and response bodies.
+ Encoder interface {
+ // Encode encodes v.
+ Encode(v interface{}) error
+ }
+
+ // EncodingFunc allows a function with appropriate signature to act as a
+ // Decoder/Encoder.
+ EncodingFunc func(v interface{}) error
+
+ // private type used to define context keys.
+ contextKey int
+)
+
+// RequestDecoder returns a HTTP request body decoder suitable for the given
+// request. The decoder handles the following mime types:
+//
+// * application/json using package encoding/json
+// * application/xml using package encoding/xml
+// * application/gob using package encoding/gob
+//
+// RequestDecoder defaults to the JSON decoder if the request "Content-Type"
+// header does not match any of the supported mime type or is missing
+// altogether.
+func RequestDecoder(r *http.Request) Decoder {
+ contentType := r.Header.Get("Content-Type")
+ if contentType == "" {
+ // default to JSON
+ contentType = "application/json"
+ } else {
+ // sanitize
+ if mediaType, _, err := mime.ParseMediaType(contentType); err == nil {
+ contentType = mediaType
+ }
+ }
+ switch contentType {
+ case "application/json":
+ return json.NewDecoder(r.Body)
+ case "application/gob":
+ return gob.NewDecoder(r.Body)
+ case "application/xml":
+ return xml.NewDecoder(r.Body)
+ default:
+ return json.NewDecoder(r.Body)
+ }
+}
+
+// ResponseEncoder returns a HTTP response encoder leveraging the mime type
+// set in the context under the AcceptTypeKey or the ContentTypeKey if any.
+// The encoder supports the following mime types:
+//
+// * application/json using package encoding/json
+// * application/xml using package encoding/xml
+// * application/gob using package encoding/gob
+//
+// ResponseEncoder defaults to the JSON encoder if the context AcceptTypeKey or
+// ContentTypeKey value does not match any of the supported mime types or is
+// missing altogether.
+func ResponseEncoder(ctx context.Context, w http.ResponseWriter) Encoder {
+ negotiate := func(a string) (Encoder, string) {
+ switch a {
+ case "", "application/json":
+ // default to JSON
+ return json.NewEncoder(w), "application/json"
+ case "application/xml":
+ return xml.NewEncoder(w), "application/xml"
+ case "application/gob":
+ return gob.NewEncoder(w), "application/gob"
+ }
+ return nil, ""
+ }
+ var accept string
+ {
+ if a := ctx.Value(AcceptTypeKey); a != nil {
+ accept = a.(string)
+ }
+ }
+ var ct string
+ {
+ if a := ctx.Value(ContentTypeKey); a != nil {
+ ct = a.(string)
+ }
+ }
+ var (
+ enc Encoder
+ mt string
+ err error
+ )
+ {
+ if ct != "" {
+ // If content type explicitly set in the DSL, infer the response encoder
+ // from the content type context key.
+ if mt, _, err = mime.ParseMediaType(ct); err == nil {
+ switch {
+ case ct == "application/json" || strings.HasSuffix(ct, "+json"):
+ enc = json.NewEncoder(w)
+ case ct == "application/xml" || strings.HasSuffix(ct, "+xml"):
+ enc = xml.NewEncoder(w)
+ case ct == "application/gob" || strings.HasSuffix(ct, "+gob"):
+ enc = gob.NewEncoder(w)
+ default:
+ enc = json.NewEncoder(w)
+ }
+ }
+ SetContentType(w, mt)
+ return enc
+ }
+ // If Accept header exists in the request, infer the response encoder
+ // from the header value.
+ if enc, mt = negotiate(accept); enc == nil {
+ // attempt to normalize
+ if mt, _, err = mime.ParseMediaType(accept); err == nil {
+ enc, mt = negotiate(mt)
+ }
+ }
+ if enc == nil {
+ enc, mt = negotiate("")
+ }
+ }
+ SetContentType(w, mt)
+ return enc
+}
+
+// RequestEncoder returns a HTTP request encoder.
+// The encoder uses package encoding/json.
+func RequestEncoder(r *http.Request) Encoder {
+ var buf bytes.Buffer
+ r.Body = ioutil.NopCloser(&buf)
+ return json.NewEncoder(&buf)
+}
+
+// ResponseDecoder returns a HTTP response decoder.
+// The decoder handles the following content types:
+//
+// * application/json using package encoding/json (default)
+// * application/xml using package encoding/xml
+// * application/gob using package encoding/gob
+//
+func ResponseDecoder(resp *http.Response) Decoder {
+ ct := resp.Header.Get("Content-Type")
+ if ct == "" {
+ return json.NewDecoder(resp.Body)
+ }
+ if mediaType, _, err := mime.ParseMediaType(ct); err == nil {
+ ct = mediaType
+ }
+ switch {
+ case ct == "application/json" || strings.HasSuffix(ct, "+json"):
+ return json.NewDecoder(resp.Body)
+ case ct == "application/xml" || strings.HasSuffix(ct, "+xml"):
+ return xml.NewDecoder(resp.Body)
+ case ct == "application/gob" || strings.HasSuffix(ct, "+gob"):
+ return gob.NewDecoder(resp.Body)
+ default:
+ return json.NewDecoder(resp.Body)
+ }
+}
+
+// ErrorEncoder returns an encoder that encodes errors returned by service
+// methods. The encoder checks whether the error is a goa ServiceError struct
+// and if so uses the error temporary and timeout fields to infer a proper HTTP
+// status code and marshals the error struct to the body using the provided
+// encoder. If the error is not a goa ServiceError struct then it is encoded
+// as a permanent internal server error.
+func ErrorEncoder(encoder func(context.Context, http.ResponseWriter) Encoder) func(context.Context, http.ResponseWriter, error) error {
+ return func(ctx context.Context, w http.ResponseWriter, err error) error {
+ enc := encoder(ctx, w)
+ resp := NewErrorResponse(err)
+ w.WriteHeader(resp.StatusCode())
+ return enc.Encode(resp)
+ }
+}
+
+// Decode implements the Decoder interface. It simply calls f(v).
+func (f EncodingFunc) Decode(v interface{}) error { return f(v) }
+
+// Encode implements the Encoder interface. It simply calls f(v).
+func (f EncodingFunc) Encode(v interface{}) error { return f(v) }
+
+// SetContentType initializes the response Content-Type header given a MIME
+// type. If the Content-Type header is already set and the MIME type is
+// "application/json" or "application/xml" then SetContentType appends a suffix
+// to the header ("+json" or "+xml" respectively).
+func SetContentType(w http.ResponseWriter, ct string) {
+ h := w.Header().Get("Content-Type")
+ if h == "" {
+ w.Header().Set("Content-Type", ct)
+ return
+ }
+ // RFC6839 only defines suffixes for a few mime types, we only concern
+ // ourselves with JSON and XML.
+ if ct != "application/json" && ct != "application/xml" {
+ w.Header().Set("Content-Type", ct)
+ return
+ }
+ if strings.Contains(h, "+") {
+ return
+ }
+ suffix := "+json"
+ if ct == "application/xml" {
+ suffix = "+xml"
+ }
+ w.Header().Set("Content-Type", h+suffix)
+}
diff --git a/vendor/goa.design/goa/http/error.go b/vendor/goa.design/goa/http/error.go
new file mode 100644
index 000000000..b2083a616
--- /dev/null
+++ b/vendor/goa.design/goa/http/error.go
@@ -0,0 +1,62 @@
+package http
+
+import (
+ "net/http"
+
+ "goa.design/goa"
+)
+
+type (
+ // ErrorResponse is the data structure encoded in HTTP responses that
+ // correspond to errors created by the generated code. This struct is
+ // mainly intended for clients to decode error responses.
+ ErrorResponse struct {
+ // Name is a name for that class of errors.
+ Name string `json:"name" xml:"name" form:"name"`
+ // ID is the unique error instance identifier.
+ ID string `json:"id" xml:"id" form:"id"`
+ // Message describes the specific error occurrence.
+ Message string `json:"message" xml:"message" form:"message"`
+ // Temporary indicates whether the error is temporary.
+ Temporary bool `json:"temporary" xml:"temporary" form:"temporary"`
+ // Timeout indicates whether the error is a timeout.
+ Timeout bool `json:"timeout" xml:"timeout" form:"timeout"`
+ // Fault indicates whether the error is a server-side fault.
+ Fault bool `json:"fault" xml:"fault" form:"fault"`
+ }
+)
+
+// NewErrorResponse creates a HTTP response from the given error.
+func NewErrorResponse(err error) *ErrorResponse {
+ if gerr, ok := err.(*goa.ServiceError); ok {
+ return &ErrorResponse{
+ Name: gerr.Name,
+ ID: gerr.ID,
+ Message: gerr.Message,
+ Timeout: gerr.Timeout,
+ Temporary: gerr.Temporary,
+ Fault: gerr.Fault,
+ }
+ }
+ return NewErrorResponse(goa.Fault(err.Error()))
+}
+
+// StatusCode implements a heuristic that computes a HTTP response status code
+// appropriate for the timeout, temporary and fault characteristics of the
+// error. This method is used by the generated server code when the error is not
+// described explicitly in the design.
+func (resp *ErrorResponse) StatusCode() int {
+ if resp.Fault {
+ return http.StatusInternalServerError
+ }
+ if resp.Timeout {
+ if resp.Temporary {
+ return http.StatusGatewayTimeout
+ }
+ return http.StatusRequestTimeout
+ }
+ if resp.Temporary {
+ return http.StatusServiceUnavailable
+ }
+ return http.StatusBadRequest
+}
diff --git a/vendor/goa.design/goa/http/middleware/capture.go b/vendor/goa.design/goa/http/middleware/capture.go
new file mode 100644
index 000000000..7bd395f7e
--- /dev/null
+++ b/vendor/goa.design/goa/http/middleware/capture.go
@@ -0,0 +1,42 @@
+package middleware
+
+import (
+ "bufio"
+ "fmt"
+ "net"
+ "net/http"
+)
+
+// ResponseCapture is a http.ResponseWriter which captures the response status
+// code and content length.
+type ResponseCapture struct {
+ http.ResponseWriter
+ StatusCode int
+ ContentLength int
+}
+
+// CaptureResponse creates a ResponseCapture that wraps the given ResponseWriter.
+func CaptureResponse(w http.ResponseWriter) *ResponseCapture {
+ return &ResponseCapture{ResponseWriter: w}
+}
+
+// WriteHeader records the value of the status code before writing it.
+func (w *ResponseCapture) WriteHeader(code int) {
+ w.StatusCode = code
+ w.ResponseWriter.WriteHeader(code)
+}
+
+// Write computes the written len and stores it in ContentLength.
+func (w *ResponseCapture) Write(b []byte) (int, error) {
+ n, err := w.ResponseWriter.Write(b)
+ w.ContentLength += n
+ return n, err
+}
+
+// Hijack supports the http.Hijacker interface.
+func (w *ResponseCapture) Hijack() (net.Conn, *bufio.ReadWriter, error) {
+ if h, ok := w.ResponseWriter.(http.Hijacker); ok {
+ return h.Hijack()
+ }
+ return nil, nil, fmt.Errorf("response writer does not support hijacking: %T", w.ResponseWriter)
+}
diff --git a/vendor/goa.design/goa/http/middleware/context.go b/vendor/goa.design/goa/http/middleware/context.go
new file mode 100644
index 000000000..ff4930c13
--- /dev/null
+++ b/vendor/goa.design/goa/http/middleware/context.go
@@ -0,0 +1,31 @@
+package middleware
+
+import (
+ "context"
+ "net/http"
+)
+
+// RequestContext returns a middleware which initializes the request context.
+func RequestContext(ctx context.Context) func(http.Handler) http.Handler {
+ return func(h http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ h.ServeHTTP(w, r.WithContext(ctx))
+ })
+ }
+}
+
+// RequestContextKeyVals returns a middleware which adds the given key/value pairs to the
+// request context.
+func RequestContextKeyVals(keyvals ...interface{}) func(http.Handler) http.Handler {
+ if len(keyvals)%2 != 0 {
+ panic("initctx: invalid number of key/value elements, must be an even number")
+ }
+ return func(h http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ for i := 0; i < len(keyvals); i += 2 {
+ r = r.WithContext(context.WithValue(r.Context(), keyvals[i], keyvals[i+1]))
+ }
+ h.ServeHTTP(w, r)
+ })
+ }
+}
diff --git a/vendor/goa.design/goa/http/middleware/ctxkeys.go b/vendor/goa.design/goa/http/middleware/ctxkeys.go
new file mode 100644
index 000000000..dfdef8fb3
--- /dev/null
+++ b/vendor/goa.design/goa/http/middleware/ctxkeys.go
@@ -0,0 +1,84 @@
+package middleware
+
+type (
+ // private type used to define context keys
+ ctxKey int
+)
+
+const (
+ // RequestIDKey is the request context key used to store the request ID
+ // created by the RequestID middleware.
+ RequestIDKey ctxKey = iota + 1
+
+ // TraceIDKey is the request context key used to store the current Trace
+ // ID if any.
+ TraceIDKey
+
+ // TraceSpanIDKey is the request context key used to store the current
+ // trace span ID if any.
+ TraceSpanIDKey
+
+ // TraceParentSpanIDKey is the request context key used to store the current
+ // trace parent span ID if any.
+ TraceParentSpanIDKey
+
+ // RequestMethodKey is the request context key used to store r.Method created by
+ // the PopulateRequestContext middleware.
+ RequestMethodKey
+
+ // RequestURIKey is the request context key used to store r.RequestURI created by
+ // the PopulateRequestContext middleware.
+ RequestURIKey
+
+ // RequestPathKey is the request context key used to store r.URL.Path created by
+ // the PopulateRequestContext middleware.
+ RequestPathKey
+
+ // RequestProtoKey is the request context key used to store r.Proto created by
+ // the PopulateRequestContext middleware.
+ RequestProtoKey
+
+ // RequestHostKey is the request context key used to store r.Host created by
+ // the PopulateRequestContext middleware.
+ RequestHostKey
+
+ // RequestRemoteAddrKey is the request context key used to store r.RemoteAddr
+ // created by the PopulateRequestContext middleware.
+ RequestRemoteAddrKey
+
+ // RequestXForwardedForKey is the request context key used to store the
+ // X-Forwarded-For header created by the PopulateRequestContext middleware.
+ RequestXForwardedForKey
+
+ // RequestXForwardedProtoKey is the request context key used to store the
+ // X-Forwarded-Proto header created by the PopulateRequestContext middleware.
+ RequestXForwardedProtoKey
+
+ // RequestXRealIPKey is the request context key used to store the
+ // X-Real-IP header created by the PopulateRequestContext middleware.
+ RequestXRealIPKey
+
+ // RequestAuthorizationKey is the request context key used to store the
+ // Authorization header created by the PopulateRequestContext middleware.
+ RequestAuthorizationKey
+
+ // RequestRefererKey is the request context key used to store Referer header
+ // created by the PopulateRequestContext middleware.
+ RequestRefererKey
+
+ // RequestUserAgentKey is the request context key used to store the User-Agent
+ // header created by the PopulateRequestContext middleware.
+ RequestUserAgentKey
+
+ // RequestXRequestIDKey is the request context key used to store the X-Request-Id
+ // header created by the PopulateRequestContext middleware.
+ RequestXRequestIDKey
+
+ // RequestAcceptKey is the request context key used to store the Accept header
+ // created by the PopulateRequestContext middleware.
+ RequestAcceptKey
+
+ // RequestXCSRFTokenKey is the request context key used to store X-Csrf-Token header
+ // created by the PopulateRequestContext middleware.
+ RequestXCSRFTokenKey
+)
diff --git a/vendor/goa.design/goa/http/middleware/debug.go b/vendor/goa.design/goa/http/middleware/debug.go
new file mode 100644
index 000000000..d7f42a35f
--- /dev/null
+++ b/vendor/goa.design/goa/http/middleware/debug.go
@@ -0,0 +1,124 @@
+package middleware
+
+import (
+ "bytes"
+ "crypto/rand"
+ "encoding/base64"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "net/http"
+ "sort"
+ "strings"
+
+ goahttp "goa.design/goa/http"
+)
+
+// responseDupper tees the response to a buffer and a response writer.
+type responseDupper struct {
+ http.ResponseWriter
+ Buffer *bytes.Buffer
+ Status int
+}
+
+// Debug returns a debug middleware which prints detailed information about
+// incoming requests and outgoing responses including all headers, parameters
+// and bodies.
+func Debug(mux goahttp.Muxer, w io.Writer) func(http.Handler) http.Handler {
+ return func(h http.Handler) http.Handler {
+ return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
+ buf := &bytes.Buffer{}
+ // Request ID
+ reqID := r.Context().Value(RequestIDKey)
+ if reqID == nil {
+ reqID = shortID()
+ }
+
+ // Request URL
+ buf.WriteString(fmt.Sprintf("> [%s] %s %s", reqID, r.Method, r.URL.String()))
+
+ // Request Headers
+ keys := make([]string, len(r.Header))
+ i := 0
+ for k := range r.Header {
+ keys[i] = k
+ i++
+ }
+ sort.Strings(keys)
+ for _, k := range keys {
+ buf.WriteString(fmt.Sprintf("\n> [%s] %s: %s", reqID, k, strings.Join(r.Header[k], ", ")))
+ }
+
+ // Request parameters
+ params := mux.Vars(r)
+ keys = make([]string, len(params))
+ i = 0
+ for k := range params {
+ keys[i] = k
+ i++
+ }
+ sort.Strings(keys)
+ for _, k := range keys {
+ buf.WriteString(fmt.Sprintf("\n> [%s] %s: %s", reqID, k, strings.Join(r.Header[k], ", ")))
+ }
+
+ // Request body
+ b, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ b = []byte("failed to read body: " + err.Error())
+ }
+ if len(b) > 0 {
+ buf.WriteByte('\n')
+ lines := strings.Split(string(b), "\n")
+ for _, line := range lines {
+ buf.WriteString(fmt.Sprintf("[%s] %s\n", reqID, line))
+ }
+ }
+ r.Body = ioutil.NopCloser(bytes.NewBuffer(b))
+
+ dupper := &responseDupper{ResponseWriter: rw, Buffer: &bytes.Buffer{}}
+ h.ServeHTTP(dupper, r)
+
+ buf.WriteString(fmt.Sprintf("\n< [%s] %s", reqID, http.StatusText(dupper.Status)))
+ keys = make([]string, len(dupper.Header()))
+ i = 0
+ for k := range dupper.Header() {
+ keys[i] = k
+ i++
+ }
+ sort.Strings(keys)
+ for _, k := range keys {
+ buf.WriteString(fmt.Sprintf("\n< [%s] %s: %s", reqID, k, strings.Join(dupper.Header()[k], ", ")))
+ }
+ if dupper.Buffer.Len() > 0 {
+ buf.WriteByte('\n')
+ lines := strings.Split(dupper.Buffer.String(), "\n")
+ for _, line := range lines {
+ buf.WriteString(fmt.Sprintf("[%s] %s\n", reqID, line))
+ }
+ }
+ buf.WriteByte('\n')
+ w.Write(buf.Bytes())
+ })
+ }
+}
+
+// Write writes the data to the buffer and connection as part of an HTTP reply.
+func (r *responseDupper) Write(b []byte) (int, error) {
+ r.Buffer.Write(b)
+ return r.ResponseWriter.Write(b)
+}
+
+// WriteHeader records the status and sends an HTTP response header with status code.
+func (r *responseDupper) WriteHeader(s int) {
+ r.Status = s
+ r.ResponseWriter.WriteHeader(s)
+}
+
+// shortID produces a " unique" 6 bytes long string.
+// Do not use as a reliable way to get unique IDs, instead use for things like logging.
+func shortID() string {
+ b := make([]byte, 6)
+ io.ReadFull(rand.Reader, b)
+ return base64.RawURLEncoding.EncodeToString(b)
+}
diff --git a/vendor/goa.design/goa/http/middleware/log.go b/vendor/goa.design/goa/http/middleware/log.go
new file mode 100644
index 000000000..48cfdda45
--- /dev/null
+++ b/vendor/goa.design/goa/http/middleware/log.go
@@ -0,0 +1,95 @@
+package middleware
+
+import (
+ "bytes"
+ "fmt"
+ "log"
+ "net"
+ "net/http"
+ "time"
+)
+
+type (
+ // Logger is the logging interface used by the middleware to produce
+ // log entries.
+ Logger interface {
+ // Log creates a log entry using a sequence of alternating keys
+ // and values.
+ Log(keyvals ...interface{}) error
+ }
+
+ // adapter is a thin wrapper around the stdlib logger that adapts it to
+ // the Logger interface.
+ adapter struct {
+ *log.Logger
+ }
+)
+
+// Log returns a middleware that logs incoming HTTP requests and outgoing
+// responses. The middleware uses the request ID set by the RequestID middleware
+// or creates a short unique request ID if missing for each incoming request and
+// logs it with the request and corresponding response details.
+//
+// The middleware logs the incoming requests HTTP method and path as well as the
+// originator of the request. The originator is computed by looking at the
+// X-Forwarded-For HTTP header or - absent of that - the originating IP. The
+// middleware also logs the response HTTP status code, body length (in bytes) and
+// timing information.
+func Log(l Logger) func(h http.Handler) http.Handler {
+ return func(h http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ reqID := r.Context().Value(RequestIDKey)
+ if reqID == nil {
+ reqID = shortID()
+ }
+ started := time.Now()
+
+ l.Log("id", reqID,
+ "req", r.Method+" "+r.URL.String(),
+ "from", from(r))
+
+ rw := CaptureResponse(w)
+ h.ServeHTTP(rw, r)
+
+ l.Log("id", reqID,
+ "status", rw.StatusCode,
+ "bytes", rw.ContentLength,
+ "time", time.Since(started).String())
+ })
+ }
+}
+
+// NewLogger creates a Logger backed by a stdlib logger.
+func NewLogger(l *log.Logger) Logger {
+ return &adapter{l}
+}
+
+func (a *adapter) Log(keyvals ...interface{}) error {
+ n := (len(keyvals) + 1) / 2
+ if len(keyvals)%2 != 0 {
+ keyvals = append(keyvals, "MISSING")
+ }
+ var fm bytes.Buffer
+ vals := make([]interface{}, n)
+ for i := 0; i < len(keyvals); i += 2 {
+ k := keyvals[i]
+ v := keyvals[i+1]
+ vals[i/2] = v
+ fm.WriteString(fmt.Sprintf(" %s=%%+v", k))
+ }
+ a.Logger.Printf(fm.String(), vals...)
+ return nil
+}
+
+// from makes a best effort to compute the request client IP.
+func from(req *http.Request) string {
+ if f := req.Header.Get("X-Forwarded-For"); f != "" {
+ return f
+ }
+ f := req.RemoteAddr
+ ip, _, err := net.SplitHostPort(f)
+ if err != nil {
+ return f
+ }
+ return ip
+}
diff --git a/vendor/goa.design/goa/http/middleware/request.go b/vendor/goa.design/goa/http/middleware/request.go
new file mode 100644
index 000000000..4c9c0d7fb
--- /dev/null
+++ b/vendor/goa.design/goa/http/middleware/request.go
@@ -0,0 +1,37 @@
+package middleware
+
+import (
+ "context"
+ "net/http"
+)
+
+// PopulateRequestContext returns a middleware which populates a number of standard HTTP reqeust
+// values into the request context. Those values may be extracted using the
+// corresponding ContextKey type in this package.
+func PopulateRequestContext() func(http.Handler) http.Handler {
+ return func(h http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ ctx := r.Context()
+ for k, v := range map[ctxKey]string{
+ RequestMethodKey: r.Method,
+ RequestURIKey: r.RequestURI,
+ RequestPathKey: r.URL.Path,
+ RequestProtoKey: r.Proto,
+ RequestHostKey: r.Host,
+ RequestRemoteAddrKey: r.RemoteAddr,
+ RequestXForwardedForKey: r.Header.Get("X-Forwarded-For"),
+ RequestXRealIPKey: r.Header.Get("X-Real-Ip"),
+ RequestXForwardedProtoKey: r.Header.Get("X-Forwarded-Proto"),
+ RequestAuthorizationKey: r.Header.Get("Authorization"),
+ RequestRefererKey: r.Header.Get("Referer"),
+ RequestUserAgentKey: r.Header.Get("User-Agent"),
+ RequestXRequestIDKey: r.Header.Get("X-Request-Id"),
+ RequestXCSRFTokenKey: r.Header.Get("X-Csrf-Token"),
+ RequestAcceptKey: r.Header.Get("Accept"),
+ } {
+ ctx = context.WithValue(ctx, k, v)
+ }
+ h.ServeHTTP(w, r.WithContext(ctx))
+ })
+ }
+}
diff --git a/vendor/goa.design/goa/http/middleware/requestid.go b/vendor/goa.design/goa/http/middleware/requestid.go
new file mode 100644
index 000000000..2f6e982a5
--- /dev/null
+++ b/vendor/goa.design/goa/http/middleware/requestid.go
@@ -0,0 +1,74 @@
+package middleware
+
+import (
+ "context"
+ "net/http"
+)
+
+type (
+ // RequestIDOption uses a constructor pattern to customize middleware
+ RequestIDOption func(*requestIDOption) *requestIDOption
+
+ // requestIDOption is the struct storing all the options.
+ requestIDOption struct {
+ // useXRequestIDHeader is true to use incoming "X-Request-Id" headers,
+ // instead of always generating unique IDs, when present in request.
+ // defaults to always-generate.
+ useXRequestIDHeader bool
+ // xRequestHeaderLimit is positive to truncate incoming "X-Request-Id"
+ // headers at the specified length. defaults to no limit.
+ xRequestHeaderLimit int
+ }
+)
+
+// RequestID returns a middleware, which initializes the context with a unique
+// value under the RequestIDKey key. Optionally uses the incoming "X-Request-Id"
+// header, if present, with or without a length limit to use as request ID. the
+// default behavior is to always generate a new ID.
+//
+// examples of use:
+// service.Use(middleware.RequestID())
+//
+// // enable options for using "X-Request-Id" header with length limit.
+// service.Use(middleware.RequestID(
+// middleware.UseXRequestIDHeaderOption(true),
+// middleware.XRequestHeaderLimitOption(128)))
+func RequestID(options ...RequestIDOption) func(http.Handler) http.Handler {
+ o := new(requestIDOption)
+ for _, option := range options {
+ o = option(o)
+ }
+ return func(h http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ var id string
+ if o.useXRequestIDHeader {
+ id = r.Header.Get("X-Request-Id")
+ if o.xRequestHeaderLimit > 0 && len(id) > o.xRequestHeaderLimit {
+ id = id[:o.xRequestHeaderLimit]
+ } else if id == "" {
+ id = shortID()
+ }
+ } else {
+ id = shortID()
+ }
+ ctx := context.WithValue(r.Context(), RequestIDKey, id)
+ h.ServeHTTP(w, r.WithContext(ctx))
+ })
+ }
+}
+
+// UseXRequestIDHeaderOption enables/disables using "X-Request-Id" header.
+func UseXRequestIDHeaderOption(f bool) RequestIDOption {
+ return func(o *requestIDOption) *requestIDOption {
+ o.useXRequestIDHeader = f
+ return o
+ }
+}
+
+// XRequestHeaderLimitOption sets the option for using "X-Request-Id" header.
+func XRequestHeaderLimitOption(limit int) RequestIDOption {
+ return func(o *requestIDOption) *requestIDOption {
+ o.xRequestHeaderLimit = limit
+ return o
+ }
+}
diff --git a/vendor/goa.design/goa/http/middleware/sampler.go b/vendor/goa.design/goa/http/middleware/sampler.go
new file mode 100644
index 000000000..4af61b1b6
--- /dev/null
+++ b/vendor/goa.design/goa/http/middleware/sampler.go
@@ -0,0 +1,98 @@
+package middleware
+
+import (
+ "math/rand"
+ "sync"
+ "sync/atomic"
+ "time"
+)
+
+type (
+ // Sampler is an interface for computing when a sample falls within a range.
+ Sampler interface {
+ // Sample returns true if the caller should sample now.
+ Sample() bool
+ }
+
+ adaptiveSampler struct {
+ sync.Mutex
+ lastRate int64
+ maxSamplingRate int
+ sampleSize uint32
+ start time.Time
+ counter uint32
+ }
+
+ fixedSampler int
+)
+
+const (
+ // adaptive upper bound has granularity in case caller becomes extremely busy.
+ adaptiveUpperBoundInt = 10000
+ adaptiveUpperBoundFloat = float64(adaptiveUpperBoundInt)
+)
+
+// NewAdaptiveSampler computes the interval for sampling for tracing middleware.
+// it can also be used by non-web go routines to trace internal API calls.
+//
+// maxSamplingRate is the desired maximum sampling rate in requests per second.
+//
+// sampleSize sets the number of requests between two adjustments of the
+// sampling rate when MaxSamplingRate is set. the sample rate cannot be adjusted
+// until the sample size is reached at least once.
+func NewAdaptiveSampler(maxSamplingRate, sampleSize int) Sampler {
+ if maxSamplingRate <= 0 {
+ panic("maxSamplingRate must be greater than 0")
+ }
+ if sampleSize <= 0 {
+ panic("sample size must be greater than 0")
+ }
+ return &adaptiveSampler{
+ lastRate: adaptiveUpperBoundInt, // samples all until initial count reaches sample size
+ maxSamplingRate: maxSamplingRate,
+ sampleSize: uint32(sampleSize),
+ start: time.Now(),
+ }
+}
+
+// NewFixedSampler sets the tracing sampling rate as a percentage value.
+func NewFixedSampler(samplingPercent int) Sampler {
+ if samplingPercent < 0 || samplingPercent > 100 {
+ panic("samplingPercent must be between 0 and 100")
+ }
+ return fixedSampler(samplingPercent)
+}
+
+// Sample implementation for adaptive rate
+func (s *adaptiveSampler) Sample() bool {
+ // adjust sampling rate whenever sample size is reached.
+ var currentRate int
+ if atomic.AddUint32(&s.counter, 1) == s.sampleSize { // exact match prevents
+ atomic.StoreUint32(&s.counter, 0) // race is ok
+ s.Lock()
+ {
+ d := time.Since(s.start).Seconds()
+ r := float64(s.sampleSize) / d
+ currentRate = int((float64(s.maxSamplingRate) * adaptiveUpperBoundFloat) / r)
+ if currentRate > adaptiveUpperBoundInt {
+ currentRate = adaptiveUpperBoundInt
+ } else if currentRate < 1 {
+ currentRate = 1
+ }
+ s.start = time.Now()
+ }
+ s.Unlock()
+ atomic.StoreInt64(&s.lastRate, int64(currentRate))
+ } else {
+ currentRate = int(atomic.LoadInt64(&s.lastRate))
+ }
+
+ // currentRate is never zero.
+ return currentRate == adaptiveUpperBoundInt || rand.Intn(adaptiveUpperBoundInt) < currentRate
+}
+
+// Sample implementation for fixed percentage
+func (s fixedSampler) Sample() bool {
+ samplingPercent := int(s)
+ return samplingPercent > 0 && (samplingPercent == 100 || rand.Intn(100) < samplingPercent)
+}
diff --git a/vendor/goa.design/goa/http/middleware/trace.go b/vendor/goa.design/goa/http/middleware/trace.go
new file mode 100644
index 000000000..98abf9d9c
--- /dev/null
+++ b/vendor/goa.design/goa/http/middleware/trace.go
@@ -0,0 +1,213 @@
+package middleware
+
+import (
+ "context"
+ "net/http"
+)
+
+type (
+ // IDFunc is a function that produces span and trace IDs for consumption
+ // by tracing systems such as Zipkin or AWS X-Ray.
+ IDFunc func() string
+
+ // Doer is the http client Do interface.
+ Doer interface {
+ Do(*http.Request) (*http.Response, error)
+ }
+
+ // Option is a constructor option that makes it possible to customize
+ // the middleware.
+ Option func(*options) *options
+
+ // tracedDoer is a client Doer that inserts the tracing headers for each
+ // request it makes.
+ tracedDoer struct {
+ Doer
+ }
+
+ // tracedLogger is a logger which logs the trace ID with every log entry
+ // when one is present.
+ tracedLogger struct {
+ logger Logger
+ traceID string
+ }
+
+ // options is the struct storing all the options.
+ options struct {
+ traceIDFunc IDFunc
+ spanIDFunc IDFunc
+ samplingPercent int
+ maxSamplingRate int
+ sampleSize int
+ }
+)
+
+const (
+ // TraceIDHeader is the default name of the HTTP request header
+ // containing the current TraceID if any.
+ TraceIDHeader = "TraceID"
+
+ // ParentSpanIDHeader is the default name of the HTTP request header
+ // containing the parent span ID if any.
+ ParentSpanIDHeader = "ParentSpanID"
+)
+
+// Trace returns a trace middleware that initializes the trace information in the
+// request context.
+//
+// samplingRate must be a value between 0 and 100. It represents the percentage of
+// requests that should be traced. If the incoming request has a Trace ID header
+// then the sampling rate is disregarded and the tracing is enabled.
+//
+// spanIDFunc and traceIDFunc are the functions used to create Span and Trace
+// IDs respectively. This is configurable so that the created IDs are compatible
+// with the various backend tracing systems. The xray package provides
+// implementations that produce AWS X-Ray compatible IDs.
+func Trace(opts ...Option) func(http.Handler) http.Handler {
+ o := &options{
+ traceIDFunc: shortID,
+ spanIDFunc: shortID,
+ samplingPercent: 100,
+ // Below only apply if maxSamplingRate is set
+ sampleSize: 1000,
+ }
+ for _, opt := range opts {
+ o = opt(o)
+ }
+ var sampler Sampler
+ if o.maxSamplingRate > 0 {
+ sampler = NewAdaptiveSampler(o.maxSamplingRate, o.sampleSize)
+ } else {
+ sampler = NewFixedSampler(o.samplingPercent)
+ }
+ return func(h http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ // insert a new trace ID only if not already being traced.
+ traceID := r.Header.Get(TraceIDHeader)
+ if traceID == "" && sampler.Sample() {
+ // insert tracing only within sample.
+ traceID = o.traceIDFunc()
+ }
+ if traceID == "" {
+ h.ServeHTTP(w, r)
+ } else {
+ // insert IDs into context to enable tracing.
+ spanID := o.spanIDFunc()
+ parentID := r.Header.Get(ParentSpanIDHeader)
+ ctx := WithSpan(r.Context(), traceID, spanID, parentID)
+ h.ServeHTTP(w, r.WithContext(ctx))
+ }
+ })
+ }
+}
+
+// TraceIDFunc is a constructor option that overrides the function used to
+// compute trace IDs.
+func TraceIDFunc(f IDFunc) Option {
+ return func(o *options) *options {
+ o.traceIDFunc = f
+ return o
+ }
+}
+
+// SpanIDFunc is a constructor option that overrides the function used to
+// compute span IDs.
+func SpanIDFunc(f IDFunc) Option {
+ return func(o *options) *options {
+ o.spanIDFunc = f
+ return o
+ }
+}
+
+// SamplingPercent sets the tracing sampling rate as a percentage value.
+// It panics if p is less than 0 or more than 100.
+// SamplingPercent and MaxSamplingRate are mutually exclusive.
+func SamplingPercent(p int) Option {
+ if p < 0 || p > 100 {
+ panic("sampling rate must be between 0 and 100")
+ }
+ return func(o *options) *options {
+ o.samplingPercent = p
+ return o
+ }
+}
+
+// MaxSamplingRate sets a target sampling rate in requests per second. Setting a
+// max sampling rate causes the middleware to adjust the sampling percent
+// dynamically. Defaults to 2 req/s.
+// SamplingPercent and MaxSamplingRate are mutually exclusive.
+func MaxSamplingRate(r int) Option {
+ if r <= 0 {
+ panic("max sampling rate must be greater than 0")
+ }
+ return func(o *options) *options {
+ o.maxSamplingRate = r
+ return o
+ }
+}
+
+// SampleSize sets the number of requests between two adjustments of the sampling
+// rate when MaxSamplingRate is set. Defaults to 1,000.
+func SampleSize(s int) Option {
+ if s <= 0 {
+ panic("sample size must be greater than 0")
+ }
+ return func(o *options) *options {
+ o.sampleSize = s
+ return o
+ }
+}
+
+// WrapDoer wraps a goa client Doer and sets the trace headers so that the
+// downstream service may properly retrieve the parent span ID and trace ID.
+func WrapDoer(doer Doer) Doer {
+ return &tracedDoer{doer}
+}
+
+// WrapLogger returns a logger which logs the trace ID with every message if
+// there is one.
+func WrapLogger(l Logger, traceID string) Logger {
+ return &tracedLogger{logger: l, traceID: traceID}
+}
+
+// WithTrace returns a context containing the given trace ID.
+func WithTrace(ctx context.Context, traceID string) context.Context {
+ ctx = context.WithValue(ctx, TraceIDKey, traceID)
+ return ctx
+}
+
+// WithSpan returns a context containing the given trace, span and parent span
+// IDs.
+func WithSpan(ctx context.Context, traceID, spanID, parentID string) context.Context {
+ if parentID != "" {
+ ctx = context.WithValue(ctx, TraceParentSpanIDKey, parentID)
+ }
+ ctx = context.WithValue(ctx, TraceIDKey, traceID)
+ ctx = context.WithValue(ctx, TraceSpanIDKey, spanID)
+ return ctx
+}
+
+// Do adds the tracing headers to the requests before making it.
+func (d *tracedDoer) Do(r *http.Request) (*http.Response, error) {
+ var (
+ traceID = r.Context().Value(TraceIDKey)
+ spanID = r.Context().Value(TraceSpanIDKey)
+ )
+ if traceID != nil {
+ r.Header.Set(TraceIDHeader, traceID.(string))
+ r.Header.Set(ParentSpanIDHeader, spanID.(string))
+ }
+
+ return d.Doer.Do(r)
+}
+
+// Log logs the trace ID when present then the values passed as argument.
+func (l *tracedLogger) Log(keyvals ...interface{}) error {
+ if l.traceID == "" {
+ l.logger.Log(keyvals...)
+ return nil
+ }
+ keyvals = append([]interface{}{"trace", l.traceID}, keyvals...)
+ l.logger.Log(keyvals)
+ return nil
+}
diff --git a/vendor/goa.design/goa/http/mux.go b/vendor/goa.design/goa/http/mux.go
new file mode 100644
index 000000000..c711d5eda
--- /dev/null
+++ b/vendor/goa.design/goa/http/mux.go
@@ -0,0 +1,84 @@
+package http
+
+import (
+ "net/http"
+ "regexp"
+
+ "github.com/dimfeld/httptreemux"
+)
+
+type (
+ // Muxer is the HTTP request multiplexer interface used by the generated
+ // code. ServerHTTP must match the HTTP method and URL of each incoming
+ // request against the list of registered patterns and call the handler
+ // for the corresponding method and the pattern that most closely
+ // matches the URL.
+ //
+ // The patterns may include wildcards that identify URL segments that
+ // must be captured.
+ //
+ // There are two forms of wildcards the implementation must support:
+ //
+ // - "{name}" wildcards capture a single path segment, for example the
+ // pattern "/images/{name}" captures "/images/favicon.ico" and adds
+ // the key "name" with the value "favicon.ico" to the map returned
+ // by Vars.
+ //
+ // - "{*name}" wildcards must appear at the end of the pattern and
+ // captures the entire path starting where the wildcard matches. For
+ // example the pattern "/images/{*filename}" captures
+ // "/images/public/thumbnail.jpg" and associates the key key
+ // "filename" with "public/thumbnail.jpg" in the map returned by
+ // Vars.
+ //
+ // The names of wildcards must match the regular expression
+ // "[a-zA-Z0-9_]+".
+ Muxer interface {
+ // Handle registers the handler function for the given method
+ // and pattern.
+ Handle(method, pattern string, handler http.HandlerFunc)
+
+ // ServeHTTP dispatches the request to the handler whose method
+ // matches the request method and whose pattern most closely
+ // matches the request URL.
+ ServeHTTP(http.ResponseWriter, *http.Request)
+
+ // Vars returns the path variables captured for the given
+ // request.
+ Vars(*http.Request) map[string]string
+ }
+
+ // mux is the default Muxer implementation. It leverages the
+ // httptreemux router and simply substitutes the syntax used to define
+ // wildcards from ":wildcard" and "*wildcard" to "{wildcard}" and
+ // "{*wildcard}" respectively.
+ mux struct {
+ *httptreemux.ContextMux
+ }
+)
+
+// NewMuxer returns a Muxer implementation based on the httptreemux router.
+func NewMuxer() Muxer {
+ r := httptreemux.NewContextMux()
+ r.EscapeAddedRoutes = true
+ return &mux{r}
+}
+
+// Handle maps the wildcard format used by goa to the one used by httptreemux.
+func (m *mux) Handle(method, pattern string, handler http.HandlerFunc) {
+ m.ContextMux.Handle(method, treemuxify(pattern), handler)
+}
+
+// Vars extracts the path variables from the request context.
+func (m *mux) Vars(r *http.Request) map[string]string {
+ return httptreemux.ContextParams(r.Context())
+}
+
+var wildSeg = regexp.MustCompile(`/{([a-zA-Z0-9_]+)}`)
+var wildPath = regexp.MustCompile(`/{\*([a-zA-Z0-9_]+)}`)
+
+func treemuxify(pattern string) string {
+ pattern = wildSeg.ReplaceAllString(pattern, "/:$1")
+ pattern = wildPath.ReplaceAllString(pattern, "/*$1")
+ return pattern
+}
diff --git a/vendor/goa.design/goa/http/websocket.go b/vendor/goa.design/goa/http/websocket.go
new file mode 100644
index 000000000..fc7b9da1b
--- /dev/null
+++ b/vendor/goa.design/goa/http/websocket.go
@@ -0,0 +1,25 @@
+package http
+
+import (
+ "net/http"
+
+ "github.com/gorilla/websocket"
+)
+
+type (
+ // Upgrader is an HTTP connection that is able to upgrade to websocket.
+ Upgrader interface {
+ // Upgrade upgrades the HTTP connection to the websocket protocol.
+ Upgrade(w http.ResponseWriter, r *http.Request, responseHeader http.Header) (*websocket.Conn, error)
+ }
+
+ // Dialer creates a websocket connection to a given URL.
+ Dialer interface {
+ // Dial creates a client connection to the websocket server.
+ Dial(url string, h http.Header) (*websocket.Conn, *http.Response, error)
+ }
+
+ // ConnConfigureFunc is used to configure a websocket connection with
+ // custom handlers.
+ ConnConfigureFunc func(*websocket.Conn) *websocket.Conn
+)
diff --git a/vendor/goa.design/goa/pkg/version.go b/vendor/goa.design/goa/pkg/version.go
new file mode 100644
index 000000000..3968b5302
--- /dev/null
+++ b/vendor/goa.design/goa/pkg/version.go
@@ -0,0 +1,42 @@
+package pkg
+
+import (
+ "fmt"
+ "regexp"
+ "strconv"
+)
+
+const (
+ // Major version number
+ Major = 2
+ // Minor version number
+ Minor = 0
+ // Build number
+ Build = 0
+ // Suffix - set to empty string in release tag commits.
+ Suffix = "wip"
+)
+
+var (
+ // Version format
+ versionFormat = regexp.MustCompile(`v(\d+?)\.(\d+?)\.(\d+?)(?:-.+)?`)
+)
+
+// Version returns the complete version number.
+func Version() string {
+ return fmt.Sprintf("v%d.%d.%d-%s", Major, Minor, Build, Suffix)
+}
+
+// Compatible returns true if Major matches the major version of the given version string.
+// It returns an error if the given string is not a valid version string.
+func Compatible(v string) (bool, error) {
+ matches := versionFormat.FindStringSubmatch(v)
+ if len(matches) != 4 {
+ return false, fmt.Errorf("invalid version string format %#v, %+v", v, matches)
+ }
+ mj, err := strconv.Atoi(matches[1])
+ if err != nil {
+ return false, fmt.Errorf("invalid major version number %#v, must be number, %v", matches[1], err)
+ }
+ return mj == Major, nil
+}
diff --git a/vendor/goa.design/goa/validation.go b/vendor/goa.design/goa/validation.go
new file mode 100644
index 000000000..7491933f3
--- /dev/null
+++ b/vendor/goa.design/goa/validation.go
@@ -0,0 +1,201 @@
+package goa
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "net"
+ "net/mail"
+ "net/url"
+ "regexp"
+ "sync"
+ "time"
+)
+
+// Format defines a validation format.
+type Format string
+
+const (
+ // FormatDate describes RFC3339 date values.
+ FormatDate Format = "date"
+
+ // FormatDateTime describes RFC3339 date time values.
+ FormatDateTime Format = "date-time"
+
+ // FormatUUID describes RFC4122 UUID values.
+ FormatUUID = "uuid"
+
+ // FormatEmail describes RFC5322 email addresses.
+ FormatEmail = "email"
+
+ // FormatHostname describes RFC1035 Internet hostnames.
+ FormatHostname = "hostname"
+
+ // FormatIPv4 describes RFC2373 IPv4 address values.
+ FormatIPv4 = "ipv4"
+
+ // FormatIPv6 describes RFC2373 IPv6 address values.
+ FormatIPv6 = "ipv6"
+
+ // FormatIP describes RFC2373 IPv4 or IPv6 address values.
+ FormatIP = "ip"
+
+ // FormatURI describes RFC3986 URI values.
+ FormatURI = "uri"
+
+ // FormatMAC describes IEEE 802 MAC-48, EUI-48 or EUI-64 MAC address values.
+ FormatMAC = "mac"
+
+ // FormatCIDR describes RFC4632 and RFC4291 CIDR notation IP address values.
+ FormatCIDR = "cidr"
+
+ // FormatRegexp describes regular expression syntax accepted by RE2.
+ FormatRegexp = "regexp"
+
+ // FormatJSON describes JSON text.
+ FormatJSON = "json"
+
+ // FormatRFC1123 describes RFC1123 date time values.
+ FormatRFC1123 = "rfc1123"
+)
+
+var (
+ hostnameRegex = regexp.MustCompile(`^[[:alnum:]][[:alnum:]\-]{0,61}[[:alnum:]]|[[:alpha:]]$`)
+ ipv4Regex = regexp.MustCompile(`^(?:[0-9]{1,3}\.){3}[0-9]{1,3}$`)
+ uuidURNPrefix = []byte("urn:uuid:")
+ uuidByteGroups = []int{8, 4, 4, 4, 12}
+)
+
+// ValidateFormat validates val against f. It returns nil if the string conforms
+// to the format, an error otherwise. name is the name of the variable used in
+// error messages. where in a data structure the error occurred if any. The
+// format specification follows the json schema draft 4 validation extension.
+// see http://json-schema.org/latest/json-schema-validation.html#anchor105
+// Supported formats are:
+//
+// - "date": RFC3339 date value
+// - "date-time": RFC3339 date time value
+// - "email": RFC5322 email address
+// - "hostname": RFC1035 Internet host name
+// - "ipv4", "ipv6", "ip": RFC2673 and RFC2373 IP address values
+// - "uri": RFC3986 URI value
+// - "mac": IEEE 802 MAC-48, EUI-48 or EUI-64 MAC address value
+// - "cidr": RFC4632 and RFC4291 CIDR notation IP address value
+// - "regexp": Regular expression syntax accepted by RE2
+// - "rfc1123": RFC1123 date time value
+func ValidateFormat(name string, val string, f Format) error {
+ var err error
+ switch f {
+ case FormatDate:
+ _, err = time.Parse("2006-01-02", val)
+ case FormatDateTime:
+ _, err = time.Parse(time.RFC3339, val)
+ case FormatUUID:
+ err = validateUUID(val)
+ case FormatEmail:
+ _, err = mail.ParseAddress(val)
+ case FormatHostname:
+ if !hostnameRegex.MatchString(val) {
+ err = fmt.Errorf("hostname value '%s' does not match %s",
+ val, hostnameRegex.String())
+ }
+ case FormatIPv4, FormatIPv6, FormatIP:
+ ip := net.ParseIP(val)
+ if ip == nil {
+ err = fmt.Errorf("\"%s\" is an invalid %s value", val, f)
+ }
+ if f == FormatIPv4 {
+ if !ipv4Regex.MatchString(val) {
+ err = fmt.Errorf("\"%s\" is an invalid ipv4 value", val)
+ }
+ }
+ if f == FormatIPv6 {
+ if ipv4Regex.MatchString(val) {
+ err = fmt.Errorf("\"%s\" is an invalid ipv6 value", val)
+ }
+ }
+ case FormatURI:
+ _, err = url.ParseRequestURI(val)
+ case FormatMAC:
+ _, err = net.ParseMAC(val)
+ case FormatCIDR:
+ _, _, err = net.ParseCIDR(val)
+ case FormatRegexp:
+ _, err = regexp.Compile(val)
+ case FormatJSON:
+ if !json.Valid([]byte(val)) {
+ err = fmt.Errorf("invalid JSON")
+ }
+ case FormatRFC1123:
+ _, err = time.Parse(time.RFC1123, val)
+ default:
+ return fmt.Errorf("unknown format %#v", f)
+ }
+ if err != nil {
+ return InvalidFormatError(name, val, f, err)
+ }
+ return nil
+}
+
+// knownPatterns records the compiled patterns.
+// TBD: refactor all this so that the generated code initializes the map on start to get rid of the
+// need for a RW mutex.
+var knownPatterns = make(map[string]*regexp.Regexp)
+
+// knownPatternsLock is the mutex used to access knownPatterns
+var knownPatternsLock = &sync.RWMutex{}
+
+// ValidatePattern returns an error if val does not match the regular expression
+// p. It makes an effort to minimize the number of times the regular expression
+// needs to be compiled. name is the name of the variable used in error messages.
+func ValidatePattern(name, val, p string) error {
+ knownPatternsLock.RLock()
+ r, ok := knownPatterns[p]
+ knownPatternsLock.RUnlock()
+ if !ok {
+ r = regexp.MustCompile(p) // DSL validation makes sure regexp is valid
+ knownPatternsLock.Lock()
+ knownPatterns[p] = r
+ knownPatternsLock.Unlock()
+ }
+ if !r.MatchString(val) {
+ return InvalidPatternError(name, val, p)
+ }
+ return nil
+}
+
+// The following formats are supported:
+// "6ba7b810-9dad-11d1-80b4-00c04fd430c8",
+// "{6ba7b810-9dad-11d1-80b4-00c04fd430c8}",
+// "urn:uuid:6ba7b810-9dad-11d1-80b4-00c04fd430c8"
+func validateUUID(uuid string) error {
+ if len(uuid) < 32 {
+ return fmt.Errorf("uuid: UUID string too short: %s", uuid)
+ }
+ t := []byte(uuid)
+ braced := false
+ if bytes.Equal(t[:9], uuidURNPrefix) {
+ t = t[9:]
+ } else if t[0] == '{' {
+ t = t[1:]
+ braced = true
+ }
+ for i, byteGroup := range uuidByteGroups {
+ if i > 0 {
+ if t[0] != '-' {
+ return fmt.Errorf("uuid: invalid string format")
+ }
+ t = t[1:]
+ }
+ if len(t) < byteGroup {
+ return fmt.Errorf("uuid: UUID string too short: %s", uuid)
+ }
+ if i == 4 && len(t) > byteGroup &&
+ ((braced && t[byteGroup] != '}') || len(t[byteGroup:]) > 1 || !braced) {
+ return fmt.Errorf("uuid: UUID string too long: %s", uuid)
+ }
+ t = t[byteGroup:]
+ }
+
+ return nil
+}
diff --git a/vendor/golang.org/x/tools/AUTHORS b/vendor/golang.org/x/tools/AUTHORS
new file mode 100644
index 000000000..15167cd74
--- /dev/null
+++ b/vendor/golang.org/x/tools/AUTHORS
@@ -0,0 +1,3 @@
+# This source code refers to The Go Authors for copyright purposes.
+# The master list of authors is in the main Go distribution,
+# visible at http://tip.golang.org/AUTHORS.
diff --git a/vendor/golang.org/x/tools/CONTRIBUTORS b/vendor/golang.org/x/tools/CONTRIBUTORS
new file mode 100644
index 000000000..1c4577e96
--- /dev/null
+++ b/vendor/golang.org/x/tools/CONTRIBUTORS
@@ -0,0 +1,3 @@
+# This source code was written by the Go contributors.
+# The master list of contributors is in the main Go distribution,
+# visible at http://tip.golang.org/CONTRIBUTORS.
diff --git a/vendor/golang.org/x/tools/LICENSE b/vendor/golang.org/x/tools/LICENSE
new file mode 100644
index 000000000..6a66aea5e
--- /dev/null
+++ b/vendor/golang.org/x/tools/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2009 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/golang.org/x/tools/PATENTS b/vendor/golang.org/x/tools/PATENTS
new file mode 100644
index 000000000..733099041
--- /dev/null
+++ b/vendor/golang.org/x/tools/PATENTS
@@ -0,0 +1,22 @@
+Additional IP Rights Grant (Patents)
+
+"This implementation" means the copyrightable works distributed by
+Google as part of the Go project.
+
+Google hereby grants to You a perpetual, worldwide, non-exclusive,
+no-charge, royalty-free, irrevocable (except as stated in this section)
+patent license to make, have made, use, offer to sell, sell, import,
+transfer and otherwise run, modify and propagate the contents of this
+implementation of Go, where such license applies only to those patent
+claims, both currently owned or controlled by Google and acquired in
+the future, licensable by Google that are necessarily infringed by this
+implementation of Go. This grant does not include claims that would be
+infringed only as a consequence of further modification of this
+implementation. If you or your agent or exclusive licensee institute or
+order or agree to the institution of patent litigation against any
+entity (including a cross-claim or counterclaim in a lawsuit) alleging
+that this implementation of Go or any code incorporated within this
+implementation of Go constitutes direct or contributory patent
+infringement, or inducement of patent infringement, then any patent
+rights granted to you under this License for this implementation of Go
+shall terminate as of the date such litigation is filed.
diff --git a/vendor/golang.org/x/tools/cmd/getgo/LICENSE b/vendor/golang.org/x/tools/cmd/getgo/LICENSE
new file mode 100644
index 000000000..32017f8fa
--- /dev/null
+++ b/vendor/golang.org/x/tools/cmd/getgo/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2017 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
new file mode 100644
index 000000000..6b7052b89
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
@@ -0,0 +1,627 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+// This file defines utilities for working with source positions.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "sort"
+)
+
+// PathEnclosingInterval returns the node that encloses the source
+// interval [start, end), and all its ancestors up to the AST root.
+//
+// The definition of "enclosing" used by this function considers
+// additional whitespace abutting a node to be enclosed by it.
+// In this example:
+//
+// z := x + y // add them
+// <-A->
+// <----B----->
+//
+// the ast.BinaryExpr(+) node is considered to enclose interval B
+// even though its [Pos()..End()) is actually only interval A.
+// This behaviour makes user interfaces more tolerant of imperfect
+// input.
+//
+// This function treats tokens as nodes, though they are not included
+// in the result. e.g. PathEnclosingInterval("+") returns the
+// enclosing ast.BinaryExpr("x + y").
+//
+// If start==end, the 1-char interval following start is used instead.
+//
+// The 'exact' result is true if the interval contains only path[0]
+// and perhaps some adjacent whitespace. It is false if the interval
+// overlaps multiple children of path[0], or if it contains only
+// interior whitespace of path[0].
+// In this example:
+//
+// z := x + y // add them
+// <--C--> <---E-->
+// ^
+// D
+//
+// intervals C, D and E are inexact. C is contained by the
+// z-assignment statement, because it spans three of its children (:=,
+// x, +). So too is the 1-char interval D, because it contains only
+// interior whitespace of the assignment. E is considered interior
+// whitespace of the BlockStmt containing the assignment.
+//
+// Precondition: [start, end) both lie within the same file as root.
+// TODO(adonovan): return (nil, false) in this case and remove precond.
+// Requires FileSet; see loader.tokenFileContainsPos.
+//
+// Postcondition: path is never nil; it always contains at least 'root'.
+//
+func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) {
+ // fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging
+
+ // Precondition: node.[Pos..End) and adjoining whitespace contain [start, end).
+ var visit func(node ast.Node) bool
+ visit = func(node ast.Node) bool {
+ path = append(path, node)
+
+ nodePos := node.Pos()
+ nodeEnd := node.End()
+
+ // fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging
+
+ // Intersect [start, end) with interval of node.
+ if start < nodePos {
+ start = nodePos
+ }
+ if end > nodeEnd {
+ end = nodeEnd
+ }
+
+ // Find sole child that contains [start, end).
+ children := childrenOf(node)
+ l := len(children)
+ for i, child := range children {
+ // [childPos, childEnd) is unaugmented interval of child.
+ childPos := child.Pos()
+ childEnd := child.End()
+
+ // [augPos, augEnd) is whitespace-augmented interval of child.
+ augPos := childPos
+ augEnd := childEnd
+ if i > 0 {
+ augPos = children[i-1].End() // start of preceding whitespace
+ }
+ if i < l-1 {
+ nextChildPos := children[i+1].Pos()
+ // Does [start, end) lie between child and next child?
+ if start >= augEnd && end <= nextChildPos {
+ return false // inexact match
+ }
+ augEnd = nextChildPos // end of following whitespace
+ }
+
+ // fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n",
+ // i, augPos, augEnd, start, end) // debugging
+
+ // Does augmented child strictly contain [start, end)?
+ if augPos <= start && end <= augEnd {
+ _, isToken := child.(tokenNode)
+ return isToken || visit(child)
+ }
+
+ // Does [start, end) overlap multiple children?
+ // i.e. left-augmented child contains start
+ // but LR-augmented child does not contain end.
+ if start < childEnd && end > augEnd {
+ break
+ }
+ }
+
+ // No single child contained [start, end),
+ // so node is the result. Is it exact?
+
+ // (It's tempting to put this condition before the
+ // child loop, but it gives the wrong result in the
+ // case where a node (e.g. ExprStmt) and its sole
+ // child have equal intervals.)
+ if start == nodePos && end == nodeEnd {
+ return true // exact match
+ }
+
+ return false // inexact: overlaps multiple children
+ }
+
+ if start > end {
+ start, end = end, start
+ }
+
+ if start < root.End() && end > root.Pos() {
+ if start == end {
+ end = start + 1 // empty interval => interval of size 1
+ }
+ exact = visit(root)
+
+ // Reverse the path:
+ for i, l := 0, len(path); i < l/2; i++ {
+ path[i], path[l-1-i] = path[l-1-i], path[i]
+ }
+ } else {
+ // Selection lies within whitespace preceding the
+ // first (or following the last) declaration in the file.
+ // The result nonetheless always includes the ast.File.
+ path = append(path, root)
+ }
+
+ return
+}
+
+// tokenNode is a dummy implementation of ast.Node for a single token.
+// They are used transiently by PathEnclosingInterval but never escape
+// this package.
+//
+type tokenNode struct {
+ pos token.Pos
+ end token.Pos
+}
+
+func (n tokenNode) Pos() token.Pos {
+ return n.pos
+}
+
+func (n tokenNode) End() token.Pos {
+ return n.end
+}
+
+func tok(pos token.Pos, len int) ast.Node {
+ return tokenNode{pos, pos + token.Pos(len)}
+}
+
+// childrenOf returns the direct non-nil children of ast.Node n.
+// It may include fake ast.Node implementations for bare tokens.
+// it is not safe to call (e.g.) ast.Walk on such nodes.
+//
+func childrenOf(n ast.Node) []ast.Node {
+ var children []ast.Node
+
+ // First add nodes for all true subtrees.
+ ast.Inspect(n, func(node ast.Node) bool {
+ if node == n { // push n
+ return true // recur
+ }
+ if node != nil { // push child
+ children = append(children, node)
+ }
+ return false // no recursion
+ })
+
+ // Then add fake Nodes for bare tokens.
+ switch n := n.(type) {
+ case *ast.ArrayType:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Elt.End(), len("]")))
+
+ case *ast.AssignStmt:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.BasicLit:
+ children = append(children,
+ tok(n.ValuePos, len(n.Value)))
+
+ case *ast.BinaryExpr:
+ children = append(children, tok(n.OpPos, len(n.Op.String())))
+
+ case *ast.BlockStmt:
+ children = append(children,
+ tok(n.Lbrace, len("{")),
+ tok(n.Rbrace, len("}")))
+
+ case *ast.BranchStmt:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.CallExpr:
+ children = append(children,
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+ if n.Ellipsis != 0 {
+ children = append(children, tok(n.Ellipsis, len("...")))
+ }
+
+ case *ast.CaseClause:
+ if n.List == nil {
+ children = append(children,
+ tok(n.Case, len("default")))
+ } else {
+ children = append(children,
+ tok(n.Case, len("case")))
+ }
+ children = append(children, tok(n.Colon, len(":")))
+
+ case *ast.ChanType:
+ switch n.Dir {
+ case ast.RECV:
+ children = append(children, tok(n.Begin, len("<-chan")))
+ case ast.SEND:
+ children = append(children, tok(n.Begin, len("chan<-")))
+ case ast.RECV | ast.SEND:
+ children = append(children, tok(n.Begin, len("chan")))
+ }
+
+ case *ast.CommClause:
+ if n.Comm == nil {
+ children = append(children,
+ tok(n.Case, len("default")))
+ } else {
+ children = append(children,
+ tok(n.Case, len("case")))
+ }
+ children = append(children, tok(n.Colon, len(":")))
+
+ case *ast.Comment:
+ // nop
+
+ case *ast.CommentGroup:
+ // nop
+
+ case *ast.CompositeLit:
+ children = append(children,
+ tok(n.Lbrace, len("{")),
+ tok(n.Rbrace, len("{")))
+
+ case *ast.DeclStmt:
+ // nop
+
+ case *ast.DeferStmt:
+ children = append(children,
+ tok(n.Defer, len("defer")))
+
+ case *ast.Ellipsis:
+ children = append(children,
+ tok(n.Ellipsis, len("...")))
+
+ case *ast.EmptyStmt:
+ // nop
+
+ case *ast.ExprStmt:
+ // nop
+
+ case *ast.Field:
+ // TODO(adonovan): Field.{Doc,Comment,Tag}?
+
+ case *ast.FieldList:
+ children = append(children,
+ tok(n.Opening, len("(")),
+ tok(n.Closing, len(")")))
+
+ case *ast.File:
+ // TODO test: Doc
+ children = append(children,
+ tok(n.Package, len("package")))
+
+ case *ast.ForStmt:
+ children = append(children,
+ tok(n.For, len("for")))
+
+ case *ast.FuncDecl:
+ // TODO(adonovan): FuncDecl.Comment?
+
+ // Uniquely, FuncDecl breaks the invariant that
+ // preorder traversal yields tokens in lexical order:
+ // in fact, FuncDecl.Recv precedes FuncDecl.Type.Func.
+ //
+ // As a workaround, we inline the case for FuncType
+ // here and order things correctly.
+ //
+ children = nil // discard ast.Walk(FuncDecl) info subtrees
+ children = append(children, tok(n.Type.Func, len("func")))
+ if n.Recv != nil {
+ children = append(children, n.Recv)
+ }
+ children = append(children, n.Name)
+ if n.Type.Params != nil {
+ children = append(children, n.Type.Params)
+ }
+ if n.Type.Results != nil {
+ children = append(children, n.Type.Results)
+ }
+ if n.Body != nil {
+ children = append(children, n.Body)
+ }
+
+ case *ast.FuncLit:
+ // nop
+
+ case *ast.FuncType:
+ if n.Func != 0 {
+ children = append(children,
+ tok(n.Func, len("func")))
+ }
+
+ case *ast.GenDecl:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+ if n.Lparen != 0 {
+ children = append(children,
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+ }
+
+ case *ast.GoStmt:
+ children = append(children,
+ tok(n.Go, len("go")))
+
+ case *ast.Ident:
+ children = append(children,
+ tok(n.NamePos, len(n.Name)))
+
+ case *ast.IfStmt:
+ children = append(children,
+ tok(n.If, len("if")))
+
+ case *ast.ImportSpec:
+ // TODO(adonovan): ImportSpec.{Doc,EndPos}?
+
+ case *ast.IncDecStmt:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.IndexExpr:
+ children = append(children,
+ tok(n.Lbrack, len("{")),
+ tok(n.Rbrack, len("}")))
+
+ case *ast.InterfaceType:
+ children = append(children,
+ tok(n.Interface, len("interface")))
+
+ case *ast.KeyValueExpr:
+ children = append(children,
+ tok(n.Colon, len(":")))
+
+ case *ast.LabeledStmt:
+ children = append(children,
+ tok(n.Colon, len(":")))
+
+ case *ast.MapType:
+ children = append(children,
+ tok(n.Map, len("map")))
+
+ case *ast.ParenExpr:
+ children = append(children,
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+
+ case *ast.RangeStmt:
+ children = append(children,
+ tok(n.For, len("for")),
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.ReturnStmt:
+ children = append(children,
+ tok(n.Return, len("return")))
+
+ case *ast.SelectStmt:
+ children = append(children,
+ tok(n.Select, len("select")))
+
+ case *ast.SelectorExpr:
+ // nop
+
+ case *ast.SendStmt:
+ children = append(children,
+ tok(n.Arrow, len("<-")))
+
+ case *ast.SliceExpr:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Rbrack, len("]")))
+
+ case *ast.StarExpr:
+ children = append(children, tok(n.Star, len("*")))
+
+ case *ast.StructType:
+ children = append(children, tok(n.Struct, len("struct")))
+
+ case *ast.SwitchStmt:
+ children = append(children, tok(n.Switch, len("switch")))
+
+ case *ast.TypeAssertExpr:
+ children = append(children,
+ tok(n.Lparen-1, len(".")),
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+
+ case *ast.TypeSpec:
+ // TODO(adonovan): TypeSpec.{Doc,Comment}?
+
+ case *ast.TypeSwitchStmt:
+ children = append(children, tok(n.Switch, len("switch")))
+
+ case *ast.UnaryExpr:
+ children = append(children, tok(n.OpPos, len(n.Op.String())))
+
+ case *ast.ValueSpec:
+ // TODO(adonovan): ValueSpec.{Doc,Comment}?
+
+ case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt:
+ // nop
+ }
+
+ // TODO(adonovan): opt: merge the logic of ast.Inspect() into
+ // the switch above so we can make interleaved callbacks for
+ // both Nodes and Tokens in the right order and avoid the need
+ // to sort.
+ sort.Sort(byPos(children))
+
+ return children
+}
+
+type byPos []ast.Node
+
+func (sl byPos) Len() int {
+ return len(sl)
+}
+func (sl byPos) Less(i, j int) bool {
+ return sl[i].Pos() < sl[j].Pos()
+}
+func (sl byPos) Swap(i, j int) {
+ sl[i], sl[j] = sl[j], sl[i]
+}
+
+// NodeDescription returns a description of the concrete type of n suitable
+// for a user interface.
+//
+// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident,
+// StarExpr) we could be much more specific given the path to the AST
+// root. Perhaps we should do that.
+//
+func NodeDescription(n ast.Node) string {
+ switch n := n.(type) {
+ case *ast.ArrayType:
+ return "array type"
+ case *ast.AssignStmt:
+ return "assignment"
+ case *ast.BadDecl:
+ return "bad declaration"
+ case *ast.BadExpr:
+ return "bad expression"
+ case *ast.BadStmt:
+ return "bad statement"
+ case *ast.BasicLit:
+ return "basic literal"
+ case *ast.BinaryExpr:
+ return fmt.Sprintf("binary %s operation", n.Op)
+ case *ast.BlockStmt:
+ return "block"
+ case *ast.BranchStmt:
+ switch n.Tok {
+ case token.BREAK:
+ return "break statement"
+ case token.CONTINUE:
+ return "continue statement"
+ case token.GOTO:
+ return "goto statement"
+ case token.FALLTHROUGH:
+ return "fall-through statement"
+ }
+ case *ast.CallExpr:
+ if len(n.Args) == 1 && !n.Ellipsis.IsValid() {
+ return "function call (or conversion)"
+ }
+ return "function call"
+ case *ast.CaseClause:
+ return "case clause"
+ case *ast.ChanType:
+ return "channel type"
+ case *ast.CommClause:
+ return "communication clause"
+ case *ast.Comment:
+ return "comment"
+ case *ast.CommentGroup:
+ return "comment group"
+ case *ast.CompositeLit:
+ return "composite literal"
+ case *ast.DeclStmt:
+ return NodeDescription(n.Decl) + " statement"
+ case *ast.DeferStmt:
+ return "defer statement"
+ case *ast.Ellipsis:
+ return "ellipsis"
+ case *ast.EmptyStmt:
+ return "empty statement"
+ case *ast.ExprStmt:
+ return "expression statement"
+ case *ast.Field:
+ // Can be any of these:
+ // struct {x, y int} -- struct field(s)
+ // struct {T} -- anon struct field
+ // interface {I} -- interface embedding
+ // interface {f()} -- interface method
+ // func (A) func(B) C -- receiver, param(s), result(s)
+ return "field/method/parameter"
+ case *ast.FieldList:
+ return "field/method/parameter list"
+ case *ast.File:
+ return "source file"
+ case *ast.ForStmt:
+ return "for loop"
+ case *ast.FuncDecl:
+ return "function declaration"
+ case *ast.FuncLit:
+ return "function literal"
+ case *ast.FuncType:
+ return "function type"
+ case *ast.GenDecl:
+ switch n.Tok {
+ case token.IMPORT:
+ return "import declaration"
+ case token.CONST:
+ return "constant declaration"
+ case token.TYPE:
+ return "type declaration"
+ case token.VAR:
+ return "variable declaration"
+ }
+ case *ast.GoStmt:
+ return "go statement"
+ case *ast.Ident:
+ return "identifier"
+ case *ast.IfStmt:
+ return "if statement"
+ case *ast.ImportSpec:
+ return "import specification"
+ case *ast.IncDecStmt:
+ if n.Tok == token.INC {
+ return "increment statement"
+ }
+ return "decrement statement"
+ case *ast.IndexExpr:
+ return "index expression"
+ case *ast.InterfaceType:
+ return "interface type"
+ case *ast.KeyValueExpr:
+ return "key/value association"
+ case *ast.LabeledStmt:
+ return "statement label"
+ case *ast.MapType:
+ return "map type"
+ case *ast.Package:
+ return "package"
+ case *ast.ParenExpr:
+ return "parenthesized " + NodeDescription(n.X)
+ case *ast.RangeStmt:
+ return "range loop"
+ case *ast.ReturnStmt:
+ return "return statement"
+ case *ast.SelectStmt:
+ return "select statement"
+ case *ast.SelectorExpr:
+ return "selector"
+ case *ast.SendStmt:
+ return "channel send"
+ case *ast.SliceExpr:
+ return "slice expression"
+ case *ast.StarExpr:
+ return "*-operation" // load/store expr or pointer type
+ case *ast.StructType:
+ return "struct type"
+ case *ast.SwitchStmt:
+ return "switch statement"
+ case *ast.TypeAssertExpr:
+ return "type assertion"
+ case *ast.TypeSpec:
+ return "type specification"
+ case *ast.TypeSwitchStmt:
+ return "type switch"
+ case *ast.UnaryExpr:
+ return fmt.Sprintf("unary %s operation", n.Op)
+ case *ast.ValueSpec:
+ return "value specification"
+
+ }
+ panic(fmt.Sprintf("unexpected node type: %T", n))
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/imports.go b/vendor/golang.org/x/tools/go/ast/astutil/imports.go
new file mode 100644
index 000000000..3e4b19536
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/imports.go
@@ -0,0 +1,481 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package astutil contains common utilities for working with the Go AST.
+package astutil // import "golang.org/x/tools/go/ast/astutil"
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "strconv"
+ "strings"
+)
+
+// AddImport adds the import path to the file f, if absent.
+func AddImport(fset *token.FileSet, f *ast.File, path string) (added bool) {
+ return AddNamedImport(fset, f, "", path)
+}
+
+// AddNamedImport adds the import with the given name and path to the file f, if absent.
+// If name is not empty, it is used to rename the import.
+//
+// For example, calling
+// AddNamedImport(fset, f, "pathpkg", "path")
+// adds
+// import pathpkg "path"
+func AddNamedImport(fset *token.FileSet, f *ast.File, name, path string) (added bool) {
+ if imports(f, name, path) {
+ return false
+ }
+
+ newImport := &ast.ImportSpec{
+ Path: &ast.BasicLit{
+ Kind: token.STRING,
+ Value: strconv.Quote(path),
+ },
+ }
+ if name != "" {
+ newImport.Name = &ast.Ident{Name: name}
+ }
+
+ // Find an import decl to add to.
+ // The goal is to find an existing import
+ // whose import path has the longest shared
+ // prefix with path.
+ var (
+ bestMatch = -1 // length of longest shared prefix
+ lastImport = -1 // index in f.Decls of the file's final import decl
+ impDecl *ast.GenDecl // import decl containing the best match
+ impIndex = -1 // spec index in impDecl containing the best match
+
+ isThirdPartyPath = isThirdParty(path)
+ )
+ for i, decl := range f.Decls {
+ gen, ok := decl.(*ast.GenDecl)
+ if ok && gen.Tok == token.IMPORT {
+ lastImport = i
+ // Do not add to import "C", to avoid disrupting the
+ // association with its doc comment, breaking cgo.
+ if declImports(gen, "C") {
+ continue
+ }
+
+ // Match an empty import decl if that's all that is available.
+ if len(gen.Specs) == 0 && bestMatch == -1 {
+ impDecl = gen
+ }
+
+ // Compute longest shared prefix with imports in this group and find best
+ // matched import spec.
+ // 1. Always prefer import spec with longest shared prefix.
+ // 2. While match length is 0,
+ // - for stdlib package: prefer first import spec.
+ // - for third party package: prefer first third party import spec.
+ // We cannot use last import spec as best match for third party package
+ // because grouped imports are usually placed last by goimports -local
+ // flag.
+ // See issue #19190.
+ seenAnyThirdParty := false
+ for j, spec := range gen.Specs {
+ impspec := spec.(*ast.ImportSpec)
+ p := importPath(impspec)
+ n := matchLen(p, path)
+ if n > bestMatch || (bestMatch == 0 && !seenAnyThirdParty && isThirdPartyPath) {
+ bestMatch = n
+ impDecl = gen
+ impIndex = j
+ }
+ seenAnyThirdParty = seenAnyThirdParty || isThirdParty(p)
+ }
+ }
+ }
+
+ // If no import decl found, add one after the last import.
+ if impDecl == nil {
+ impDecl = &ast.GenDecl{
+ Tok: token.IMPORT,
+ }
+ if lastImport >= 0 {
+ impDecl.TokPos = f.Decls[lastImport].End()
+ } else {
+ // There are no existing imports.
+ // Our new import, preceded by a blank line, goes after the package declaration
+ // and after the comment, if any, that starts on the same line as the
+ // package declaration.
+ impDecl.TokPos = f.Package
+
+ file := fset.File(f.Package)
+ pkgLine := file.Line(f.Package)
+ for _, c := range f.Comments {
+ if file.Line(c.Pos()) > pkgLine {
+ break
+ }
+ // +2 for a blank line
+ impDecl.TokPos = c.End() + 2
+ }
+ }
+ f.Decls = append(f.Decls, nil)
+ copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:])
+ f.Decls[lastImport+1] = impDecl
+ }
+
+ // Insert new import at insertAt.
+ insertAt := 0
+ if impIndex >= 0 {
+ // insert after the found import
+ insertAt = impIndex + 1
+ }
+ impDecl.Specs = append(impDecl.Specs, nil)
+ copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:])
+ impDecl.Specs[insertAt] = newImport
+ pos := impDecl.Pos()
+ if insertAt > 0 {
+ // If there is a comment after an existing import, preserve the comment
+ // position by adding the new import after the comment.
+ if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil {
+ pos = spec.Comment.End()
+ } else {
+ // Assign same position as the previous import,
+ // so that the sorter sees it as being in the same block.
+ pos = impDecl.Specs[insertAt-1].Pos()
+ }
+ }
+ if newImport.Name != nil {
+ newImport.Name.NamePos = pos
+ }
+ newImport.Path.ValuePos = pos
+ newImport.EndPos = pos
+
+ // Clean up parens. impDecl contains at least one spec.
+ if len(impDecl.Specs) == 1 {
+ // Remove unneeded parens.
+ impDecl.Lparen = token.NoPos
+ } else if !impDecl.Lparen.IsValid() {
+ // impDecl needs parens added.
+ impDecl.Lparen = impDecl.Specs[0].Pos()
+ }
+
+ f.Imports = append(f.Imports, newImport)
+
+ if len(f.Decls) <= 1 {
+ return true
+ }
+
+ // Merge all the import declarations into the first one.
+ var first *ast.GenDecl
+ for i := 0; i < len(f.Decls); i++ {
+ decl := f.Decls[i]
+ gen, ok := decl.(*ast.GenDecl)
+ if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") {
+ continue
+ }
+ if first == nil {
+ first = gen
+ continue // Don't touch the first one.
+ }
+ // We now know there is more than one package in this import
+ // declaration. Ensure that it ends up parenthesized.
+ first.Lparen = first.Pos()
+ // Move the imports of the other import declaration to the first one.
+ for _, spec := range gen.Specs {
+ spec.(*ast.ImportSpec).Path.ValuePos = first.Pos()
+ first.Specs = append(first.Specs, spec)
+ }
+ f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
+ i--
+ }
+
+ return true
+}
+
+func isThirdParty(importPath string) bool {
+ // Third party package import path usually contains "." (".com", ".org", ...)
+ // This logic is taken from golang.org/x/tools/imports package.
+ return strings.Contains(importPath, ".")
+}
+
+// DeleteImport deletes the import path from the file f, if present.
+// If there are duplicate import declarations, all matching ones are deleted.
+func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) {
+ return DeleteNamedImport(fset, f, "", path)
+}
+
+// DeleteNamedImport deletes the import with the given name and path from the file f, if present.
+// If there are duplicate import declarations, all matching ones are deleted.
+func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) {
+ var delspecs []*ast.ImportSpec
+ var delcomments []*ast.CommentGroup
+
+ // Find the import nodes that import path, if any.
+ for i := 0; i < len(f.Decls); i++ {
+ decl := f.Decls[i]
+ gen, ok := decl.(*ast.GenDecl)
+ if !ok || gen.Tok != token.IMPORT {
+ continue
+ }
+ for j := 0; j < len(gen.Specs); j++ {
+ spec := gen.Specs[j]
+ impspec := spec.(*ast.ImportSpec)
+ if importName(impspec) != name || importPath(impspec) != path {
+ continue
+ }
+
+ // We found an import spec that imports path.
+ // Delete it.
+ delspecs = append(delspecs, impspec)
+ deleted = true
+ copy(gen.Specs[j:], gen.Specs[j+1:])
+ gen.Specs = gen.Specs[:len(gen.Specs)-1]
+
+ // If this was the last import spec in this decl,
+ // delete the decl, too.
+ if len(gen.Specs) == 0 {
+ copy(f.Decls[i:], f.Decls[i+1:])
+ f.Decls = f.Decls[:len(f.Decls)-1]
+ i--
+ break
+ } else if len(gen.Specs) == 1 {
+ if impspec.Doc != nil {
+ delcomments = append(delcomments, impspec.Doc)
+ }
+ if impspec.Comment != nil {
+ delcomments = append(delcomments, impspec.Comment)
+ }
+ for _, cg := range f.Comments {
+ // Found comment on the same line as the import spec.
+ if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line {
+ delcomments = append(delcomments, cg)
+ break
+ }
+ }
+
+ spec := gen.Specs[0].(*ast.ImportSpec)
+
+ // Move the documentation right after the import decl.
+ if spec.Doc != nil {
+ for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Doc.Pos()).Line {
+ fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
+ }
+ }
+ for _, cg := range f.Comments {
+ if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line {
+ for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Pos()).Line {
+ fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
+ }
+ break
+ }
+ }
+ }
+ if j > 0 {
+ lastImpspec := gen.Specs[j-1].(*ast.ImportSpec)
+ lastLine := fset.Position(lastImpspec.Path.ValuePos).Line
+ line := fset.Position(impspec.Path.ValuePos).Line
+
+ // We deleted an entry but now there may be
+ // a blank line-sized hole where the import was.
+ if line-lastLine > 1 {
+ // There was a blank line immediately preceding the deleted import,
+ // so there's no need to close the hole.
+ // Do nothing.
+ } else if line != fset.File(gen.Rparen).LineCount() {
+ // There was no blank line. Close the hole.
+ fset.File(gen.Rparen).MergeLine(line)
+ }
+ }
+ j--
+ }
+ }
+
+ // Delete imports from f.Imports.
+ for i := 0; i < len(f.Imports); i++ {
+ imp := f.Imports[i]
+ for j, del := range delspecs {
+ if imp == del {
+ copy(f.Imports[i:], f.Imports[i+1:])
+ f.Imports = f.Imports[:len(f.Imports)-1]
+ copy(delspecs[j:], delspecs[j+1:])
+ delspecs = delspecs[:len(delspecs)-1]
+ i--
+ break
+ }
+ }
+ }
+
+ // Delete comments from f.Comments.
+ for i := 0; i < len(f.Comments); i++ {
+ cg := f.Comments[i]
+ for j, del := range delcomments {
+ if cg == del {
+ copy(f.Comments[i:], f.Comments[i+1:])
+ f.Comments = f.Comments[:len(f.Comments)-1]
+ copy(delcomments[j:], delcomments[j+1:])
+ delcomments = delcomments[:len(delcomments)-1]
+ i--
+ break
+ }
+ }
+ }
+
+ if len(delspecs) > 0 {
+ panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs))
+ }
+
+ return
+}
+
+// RewriteImport rewrites any import of path oldPath to path newPath.
+func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) {
+ for _, imp := range f.Imports {
+ if importPath(imp) == oldPath {
+ rewrote = true
+ // record old End, because the default is to compute
+ // it using the length of imp.Path.Value.
+ imp.EndPos = imp.End()
+ imp.Path.Value = strconv.Quote(newPath)
+ }
+ }
+ return
+}
+
+// UsesImport reports whether a given import is used.
+func UsesImport(f *ast.File, path string) (used bool) {
+ spec := importSpec(f, path)
+ if spec == nil {
+ return
+ }
+
+ name := spec.Name.String()
+ switch name {
+ case "":
+ // If the package name is not explicitly specified,
+ // make an educated guess. This is not guaranteed to be correct.
+ lastSlash := strings.LastIndex(path, "/")
+ if lastSlash == -1 {
+ name = path
+ } else {
+ name = path[lastSlash+1:]
+ }
+ case "_", ".":
+ // Not sure if this import is used - err on the side of caution.
+ return true
+ }
+
+ ast.Walk(visitFn(func(n ast.Node) {
+ sel, ok := n.(*ast.SelectorExpr)
+ if ok && isTopName(sel.X, name) {
+ used = true
+ }
+ }), f)
+
+ return
+}
+
+type visitFn func(node ast.Node)
+
+func (fn visitFn) Visit(node ast.Node) ast.Visitor {
+ fn(node)
+ return fn
+}
+
+// imports reports whether f has an import with the specified name and path.
+func imports(f *ast.File, name, path string) bool {
+ for _, s := range f.Imports {
+ if importName(s) == name && importPath(s) == path {
+ return true
+ }
+ }
+ return false
+}
+
+// importSpec returns the import spec if f imports path,
+// or nil otherwise.
+func importSpec(f *ast.File, path string) *ast.ImportSpec {
+ for _, s := range f.Imports {
+ if importPath(s) == path {
+ return s
+ }
+ }
+ return nil
+}
+
+// importName returns the name of s,
+// or "" if the import is not named.
+func importName(s *ast.ImportSpec) string {
+ if s.Name == nil {
+ return ""
+ }
+ return s.Name.Name
+}
+
+// importPath returns the unquoted import path of s,
+// or "" if the path is not properly quoted.
+func importPath(s *ast.ImportSpec) string {
+ t, err := strconv.Unquote(s.Path.Value)
+ if err != nil {
+ return ""
+ }
+ return t
+}
+
+// declImports reports whether gen contains an import of path.
+func declImports(gen *ast.GenDecl, path string) bool {
+ if gen.Tok != token.IMPORT {
+ return false
+ }
+ for _, spec := range gen.Specs {
+ impspec := spec.(*ast.ImportSpec)
+ if importPath(impspec) == path {
+ return true
+ }
+ }
+ return false
+}
+
+// matchLen returns the length of the longest path segment prefix shared by x and y.
+func matchLen(x, y string) int {
+ n := 0
+ for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ {
+ if x[i] == '/' {
+ n++
+ }
+ }
+ return n
+}
+
+// isTopName returns true if n is a top-level unresolved identifier with the given name.
+func isTopName(n ast.Expr, name string) bool {
+ id, ok := n.(*ast.Ident)
+ return ok && id.Name == name && id.Obj == nil
+}
+
+// Imports returns the file imports grouped by paragraph.
+func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec {
+ var groups [][]*ast.ImportSpec
+
+ for _, decl := range f.Decls {
+ genDecl, ok := decl.(*ast.GenDecl)
+ if !ok || genDecl.Tok != token.IMPORT {
+ break
+ }
+
+ group := []*ast.ImportSpec{}
+
+ var lastLine int
+ for _, spec := range genDecl.Specs {
+ importSpec := spec.(*ast.ImportSpec)
+ pos := importSpec.Path.ValuePos
+ line := fset.Position(pos).Line
+ if lastLine > 0 && pos > 0 && line-lastLine > 1 {
+ groups = append(groups, group)
+ group = []*ast.ImportSpec{}
+ }
+ group = append(group, importSpec)
+ lastLine = line
+ }
+ groups = append(groups, group)
+ }
+
+ return groups
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
new file mode 100644
index 000000000..cf72ea990
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
@@ -0,0 +1,477 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+import (
+ "fmt"
+ "go/ast"
+ "reflect"
+ "sort"
+)
+
+// An ApplyFunc is invoked by Apply for each node n, even if n is nil,
+// before and/or after the node's children, using a Cursor describing
+// the current node and providing operations on it.
+//
+// The return value of ApplyFunc controls the syntax tree traversal.
+// See Apply for details.
+type ApplyFunc func(*Cursor) bool
+
+// Apply traverses a syntax tree recursively, starting with root,
+// and calling pre and post for each node as described below.
+// Apply returns the syntax tree, possibly modified.
+//
+// If pre is not nil, it is called for each node before the node's
+// children are traversed (pre-order). If pre returns false, no
+// children are traversed, and post is not called for that node.
+//
+// If post is not nil, and a prior call of pre didn't return false,
+// post is called for each node after its children are traversed
+// (post-order). If post returns false, traversal is terminated and
+// Apply returns immediately.
+//
+// Only fields that refer to AST nodes are considered children;
+// i.e., token.Pos, Scopes, Objects, and fields of basic types
+// (strings, etc.) are ignored.
+//
+// Children are traversed in the order in which they appear in the
+// respective node's struct definition. A package's files are
+// traversed in the filenames' alphabetical order.
+//
+func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) {
+ parent := &struct{ ast.Node }{root}
+ defer func() {
+ if r := recover(); r != nil && r != abort {
+ panic(r)
+ }
+ result = parent.Node
+ }()
+ a := &application{pre: pre, post: post}
+ a.apply(parent, "Node", nil, root)
+ return
+}
+
+var abort = new(int) // singleton, to signal termination of Apply
+
+// A Cursor describes a node encountered during Apply.
+// Information about the node and its parent is available
+// from the Node, Parent, Name, and Index methods.
+//
+// If p is a variable of type and value of the current parent node
+// c.Parent(), and f is the field identifier with name c.Name(),
+// the following invariants hold:
+//
+// p.f == c.Node() if c.Index() < 0
+// p.f[c.Index()] == c.Node() if c.Index() >= 0
+//
+// The methods Replace, Delete, InsertBefore, and InsertAfter
+// can be used to change the AST without disrupting Apply.
+type Cursor struct {
+ parent ast.Node
+ name string
+ iter *iterator // valid if non-nil
+ node ast.Node
+}
+
+// Node returns the current Node.
+func (c *Cursor) Node() ast.Node { return c.node }
+
+// Parent returns the parent of the current Node.
+func (c *Cursor) Parent() ast.Node { return c.parent }
+
+// Name returns the name of the parent Node field that contains the current Node.
+// If the parent is a *ast.Package and the current Node is a *ast.File, Name returns
+// the filename for the current Node.
+func (c *Cursor) Name() string { return c.name }
+
+// Index reports the index >= 0 of the current Node in the slice of Nodes that
+// contains it, or a value < 0 if the current Node is not part of a slice.
+// The index of the current node changes if InsertBefore is called while
+// processing the current node.
+func (c *Cursor) Index() int {
+ if c.iter != nil {
+ return c.iter.index
+ }
+ return -1
+}
+
+// field returns the current node's parent field value.
+func (c *Cursor) field() reflect.Value {
+ return reflect.Indirect(reflect.ValueOf(c.parent)).FieldByName(c.name)
+}
+
+// Replace replaces the current Node with n.
+// The replacement node is not walked by Apply.
+func (c *Cursor) Replace(n ast.Node) {
+ if _, ok := c.node.(*ast.File); ok {
+ file, ok := n.(*ast.File)
+ if !ok {
+ panic("attempt to replace *ast.File with non-*ast.File")
+ }
+ c.parent.(*ast.Package).Files[c.name] = file
+ return
+ }
+
+ v := c.field()
+ if i := c.Index(); i >= 0 {
+ v = v.Index(i)
+ }
+ v.Set(reflect.ValueOf(n))
+}
+
+// Delete deletes the current Node from its containing slice.
+// If the current Node is not part of a slice, Delete panics.
+// As a special case, if the current node is a package file,
+// Delete removes it from the package's Files map.
+func (c *Cursor) Delete() {
+ if _, ok := c.node.(*ast.File); ok {
+ delete(c.parent.(*ast.Package).Files, c.name)
+ return
+ }
+
+ i := c.Index()
+ if i < 0 {
+ panic("Delete node not contained in slice")
+ }
+ v := c.field()
+ l := v.Len()
+ reflect.Copy(v.Slice(i, l), v.Slice(i+1, l))
+ v.Index(l - 1).Set(reflect.Zero(v.Type().Elem()))
+ v.SetLen(l - 1)
+ c.iter.step--
+}
+
+// InsertAfter inserts n after the current Node in its containing slice.
+// If the current Node is not part of a slice, InsertAfter panics.
+// Apply does not walk n.
+func (c *Cursor) InsertAfter(n ast.Node) {
+ i := c.Index()
+ if i < 0 {
+ panic("InsertAfter node not contained in slice")
+ }
+ v := c.field()
+ v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
+ l := v.Len()
+ reflect.Copy(v.Slice(i+2, l), v.Slice(i+1, l))
+ v.Index(i + 1).Set(reflect.ValueOf(n))
+ c.iter.step++
+}
+
+// InsertBefore inserts n before the current Node in its containing slice.
+// If the current Node is not part of a slice, InsertBefore panics.
+// Apply will not walk n.
+func (c *Cursor) InsertBefore(n ast.Node) {
+ i := c.Index()
+ if i < 0 {
+ panic("InsertBefore node not contained in slice")
+ }
+ v := c.field()
+ v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
+ l := v.Len()
+ reflect.Copy(v.Slice(i+1, l), v.Slice(i, l))
+ v.Index(i).Set(reflect.ValueOf(n))
+ c.iter.index++
+}
+
+// application carries all the shared data so we can pass it around cheaply.
+type application struct {
+ pre, post ApplyFunc
+ cursor Cursor
+ iter iterator
+}
+
+func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.Node) {
+ // convert typed nil into untyped nil
+ if v := reflect.ValueOf(n); v.Kind() == reflect.Ptr && v.IsNil() {
+ n = nil
+ }
+
+ // avoid heap-allocating a new cursor for each apply call; reuse a.cursor instead
+ saved := a.cursor
+ a.cursor.parent = parent
+ a.cursor.name = name
+ a.cursor.iter = iter
+ a.cursor.node = n
+
+ if a.pre != nil && !a.pre(&a.cursor) {
+ a.cursor = saved
+ return
+ }
+
+ // walk children
+ // (the order of the cases matches the order of the corresponding node types in go/ast)
+ switch n := n.(type) {
+ case nil:
+ // nothing to do
+
+ // Comments and fields
+ case *ast.Comment:
+ // nothing to do
+
+ case *ast.CommentGroup:
+ if n != nil {
+ a.applyList(n, "List")
+ }
+
+ case *ast.Field:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.applyList(n, "Names")
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Tag", nil, n.Tag)
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.FieldList:
+ a.applyList(n, "List")
+
+ // Expressions
+ case *ast.BadExpr, *ast.Ident, *ast.BasicLit:
+ // nothing to do
+
+ case *ast.Ellipsis:
+ a.apply(n, "Elt", nil, n.Elt)
+
+ case *ast.FuncLit:
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.CompositeLit:
+ a.apply(n, "Type", nil, n.Type)
+ a.applyList(n, "Elts")
+
+ case *ast.ParenExpr:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.SelectorExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Sel", nil, n.Sel)
+
+ case *ast.IndexExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Index", nil, n.Index)
+
+ case *ast.SliceExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Low", nil, n.Low)
+ a.apply(n, "High", nil, n.High)
+ a.apply(n, "Max", nil, n.Max)
+
+ case *ast.TypeAssertExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Type", nil, n.Type)
+
+ case *ast.CallExpr:
+ a.apply(n, "Fun", nil, n.Fun)
+ a.applyList(n, "Args")
+
+ case *ast.StarExpr:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.UnaryExpr:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.BinaryExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Y", nil, n.Y)
+
+ case *ast.KeyValueExpr:
+ a.apply(n, "Key", nil, n.Key)
+ a.apply(n, "Value", nil, n.Value)
+
+ // Types
+ case *ast.ArrayType:
+ a.apply(n, "Len", nil, n.Len)
+ a.apply(n, "Elt", nil, n.Elt)
+
+ case *ast.StructType:
+ a.apply(n, "Fields", nil, n.Fields)
+
+ case *ast.FuncType:
+ a.apply(n, "Params", nil, n.Params)
+ a.apply(n, "Results", nil, n.Results)
+
+ case *ast.InterfaceType:
+ a.apply(n, "Methods", nil, n.Methods)
+
+ case *ast.MapType:
+ a.apply(n, "Key", nil, n.Key)
+ a.apply(n, "Value", nil, n.Value)
+
+ case *ast.ChanType:
+ a.apply(n, "Value", nil, n.Value)
+
+ // Statements
+ case *ast.BadStmt:
+ // nothing to do
+
+ case *ast.DeclStmt:
+ a.apply(n, "Decl", nil, n.Decl)
+
+ case *ast.EmptyStmt:
+ // nothing to do
+
+ case *ast.LabeledStmt:
+ a.apply(n, "Label", nil, n.Label)
+ a.apply(n, "Stmt", nil, n.Stmt)
+
+ case *ast.ExprStmt:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.SendStmt:
+ a.apply(n, "Chan", nil, n.Chan)
+ a.apply(n, "Value", nil, n.Value)
+
+ case *ast.IncDecStmt:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.AssignStmt:
+ a.applyList(n, "Lhs")
+ a.applyList(n, "Rhs")
+
+ case *ast.GoStmt:
+ a.apply(n, "Call", nil, n.Call)
+
+ case *ast.DeferStmt:
+ a.apply(n, "Call", nil, n.Call)
+
+ case *ast.ReturnStmt:
+ a.applyList(n, "Results")
+
+ case *ast.BranchStmt:
+ a.apply(n, "Label", nil, n.Label)
+
+ case *ast.BlockStmt:
+ a.applyList(n, "List")
+
+ case *ast.IfStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Cond", nil, n.Cond)
+ a.apply(n, "Body", nil, n.Body)
+ a.apply(n, "Else", nil, n.Else)
+
+ case *ast.CaseClause:
+ a.applyList(n, "List")
+ a.applyList(n, "Body")
+
+ case *ast.SwitchStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Tag", nil, n.Tag)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.TypeSwitchStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Assign", nil, n.Assign)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.CommClause:
+ a.apply(n, "Comm", nil, n.Comm)
+ a.applyList(n, "Body")
+
+ case *ast.SelectStmt:
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.ForStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Cond", nil, n.Cond)
+ a.apply(n, "Post", nil, n.Post)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.RangeStmt:
+ a.apply(n, "Key", nil, n.Key)
+ a.apply(n, "Value", nil, n.Value)
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Body", nil, n.Body)
+
+ // Declarations
+ case *ast.ImportSpec:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Name", nil, n.Name)
+ a.apply(n, "Path", nil, n.Path)
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.ValueSpec:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.applyList(n, "Names")
+ a.apply(n, "Type", nil, n.Type)
+ a.applyList(n, "Values")
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.TypeSpec:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Name", nil, n.Name)
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.BadDecl:
+ // nothing to do
+
+ case *ast.GenDecl:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.applyList(n, "Specs")
+
+ case *ast.FuncDecl:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Recv", nil, n.Recv)
+ a.apply(n, "Name", nil, n.Name)
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Body", nil, n.Body)
+
+ // Files and packages
+ case *ast.File:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Name", nil, n.Name)
+ a.applyList(n, "Decls")
+ // Don't walk n.Comments; they have either been walked already if
+ // they are Doc comments, or they can be easily walked explicitly.
+
+ case *ast.Package:
+ // collect and sort names for reproducible behavior
+ var names []string
+ for name := range n.Files {
+ names = append(names, name)
+ }
+ sort.Strings(names)
+ for _, name := range names {
+ a.apply(n, name, nil, n.Files[name])
+ }
+
+ default:
+ panic(fmt.Sprintf("Apply: unexpected node type %T", n))
+ }
+
+ if a.post != nil && !a.post(&a.cursor) {
+ panic(abort)
+ }
+
+ a.cursor = saved
+}
+
+// An iterator controls iteration over a slice of nodes.
+type iterator struct {
+ index, step int
+}
+
+func (a *application) applyList(parent ast.Node, name string) {
+ // avoid heap-allocating a new iterator for each applyList call; reuse a.iter instead
+ saved := a.iter
+ a.iter.index = 0
+ for {
+ // must reload parent.name each time, since cursor modifications might change it
+ v := reflect.Indirect(reflect.ValueOf(parent)).FieldByName(name)
+ if a.iter.index >= v.Len() {
+ break
+ }
+
+ // element x may be nil in a bad AST - be cautious
+ var x ast.Node
+ if e := v.Index(a.iter.index); e.IsValid() {
+ x = e.Interface().(ast.Node)
+ }
+
+ a.iter.step = 1
+ a.apply(parent, name, &a.iter, x)
+ a.iter.index += a.iter.step
+ }
+ a.iter = saved
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/util.go b/vendor/golang.org/x/tools/go/ast/astutil/util.go
new file mode 100644
index 000000000..763062982
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/util.go
@@ -0,0 +1,14 @@
+package astutil
+
+import "go/ast"
+
+// Unparen returns e with any enclosing parentheses stripped.
+func Unparen(e ast.Expr) ast.Expr {
+ for {
+ p, ok := e.(*ast.ParenExpr)
+ if !ok {
+ return e
+ }
+ e = p.X
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
new file mode 100644
index 000000000..98b3987b9
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
@@ -0,0 +1,109 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package gcexportdata provides functions for locating, reading, and
+// writing export data files containing type information produced by the
+// gc compiler. This package supports go1.7 export data format and all
+// later versions.
+//
+// Although it might seem convenient for this package to live alongside
+// go/types in the standard library, this would cause version skew
+// problems for developer tools that use it, since they must be able to
+// consume the outputs of the gc compiler both before and after a Go
+// update such as from Go 1.7 to Go 1.8. Because this package lives in
+// golang.org/x/tools, sites can update their version of this repo some
+// time before the Go 1.8 release and rebuild and redeploy their
+// developer tools, which will then be able to consume both Go 1.7 and
+// Go 1.8 export data files, so they will work before and after the
+// Go update. (See discussion at https://golang.org/issue/15651.)
+//
+package gcexportdata // import "golang.org/x/tools/go/gcexportdata"
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "go/token"
+ "go/types"
+ "io"
+ "io/ioutil"
+
+ "golang.org/x/tools/go/internal/gcimporter"
+)
+
+// Find returns the name of an object (.o) or archive (.a) file
+// containing type information for the specified import path,
+// using the workspace layout conventions of go/build.
+// If no file was found, an empty filename is returned.
+//
+// A relative srcDir is interpreted relative to the current working directory.
+//
+// Find also returns the package's resolved (canonical) import path,
+// reflecting the effects of srcDir and vendoring on importPath.
+func Find(importPath, srcDir string) (filename, path string) {
+ return gcimporter.FindPkg(importPath, srcDir)
+}
+
+// NewReader returns a reader for the export data section of an object
+// (.o) or archive (.a) file read from r. The new reader may provide
+// additional trailing data beyond the end of the export data.
+func NewReader(r io.Reader) (io.Reader, error) {
+ buf := bufio.NewReader(r)
+ _, err := gcimporter.FindExportData(buf)
+ // If we ever switch to a zip-like archive format with the ToC
+ // at the end, we can return the correct portion of export data,
+ // but for now we must return the entire rest of the file.
+ return buf, err
+}
+
+// Read reads export data from in, decodes it, and returns type
+// information for the package.
+// The package name is specified by path.
+// File position information is added to fset.
+//
+// Read may inspect and add to the imports map to ensure that references
+// within the export data to other packages are consistent. The caller
+// must ensure that imports[path] does not exist, or exists but is
+// incomplete (see types.Package.Complete), and Read inserts the
+// resulting package into this map entry.
+//
+// On return, the state of the reader is undefined.
+func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) {
+ data, err := ioutil.ReadAll(in)
+ if err != nil {
+ return nil, fmt.Errorf("reading export data for %q: %v", path, err)
+ }
+
+ if bytes.HasPrefix(data, []byte("!")) {
+ return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path)
+ }
+
+ // The App Engine Go runtime v1.6 uses the old export data format.
+ // TODO(adonovan): delete once v1.7 has been around for a while.
+ if bytes.HasPrefix(data, []byte("package ")) {
+ return gcimporter.ImportData(imports, path, path, bytes.NewReader(data))
+ }
+
+ // The indexed export format starts with an 'i'; the older
+ // binary export format starts with a 'c', 'd', or 'v'
+ // (from "version"). Select appropriate importer.
+ if len(data) > 0 && data[0] == 'i' {
+ _, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path)
+ return pkg, err
+ }
+
+ _, pkg, err := gcimporter.BImportData(fset, imports, data, path)
+ return pkg, err
+}
+
+// Write writes encoded type information for the specified package to out.
+// The FileSet provides file position information for named objects.
+func Write(out io.Writer, fset *token.FileSet, pkg *types.Package) error {
+ b, err := gcimporter.BExportData(fset, pkg)
+ if err != nil {
+ return err
+ }
+ _, err = out.Write(b)
+ return err
+}
diff --git a/vendor/golang.org/x/tools/go/gcexportdata/importer.go b/vendor/golang.org/x/tools/go/gcexportdata/importer.go
new file mode 100644
index 000000000..efe221e7e
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/gcexportdata/importer.go
@@ -0,0 +1,73 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package gcexportdata
+
+import (
+ "fmt"
+ "go/token"
+ "go/types"
+ "os"
+)
+
+// NewImporter returns a new instance of the types.Importer interface
+// that reads type information from export data files written by gc.
+// The Importer also satisfies types.ImporterFrom.
+//
+// Export data files are located using "go build" workspace conventions
+// and the build.Default context.
+//
+// Use this importer instead of go/importer.For("gc", ...) to avoid the
+// version-skew problems described in the documentation of this package,
+// or to control the FileSet or access the imports map populated during
+// package loading.
+//
+func NewImporter(fset *token.FileSet, imports map[string]*types.Package) types.ImporterFrom {
+ return importer{fset, imports}
+}
+
+type importer struct {
+ fset *token.FileSet
+ imports map[string]*types.Package
+}
+
+func (imp importer) Import(importPath string) (*types.Package, error) {
+ return imp.ImportFrom(importPath, "", 0)
+}
+
+func (imp importer) ImportFrom(importPath, srcDir string, mode types.ImportMode) (_ *types.Package, err error) {
+ filename, path := Find(importPath, srcDir)
+ if filename == "" {
+ if importPath == "unsafe" {
+ // Even for unsafe, call Find first in case
+ // the package was vendored.
+ return types.Unsafe, nil
+ }
+ return nil, fmt.Errorf("can't find import: %s", importPath)
+ }
+
+ if pkg, ok := imp.imports[path]; ok && pkg.Complete() {
+ return pkg, nil // cache hit
+ }
+
+ // open file
+ f, err := os.Open(filename)
+ if err != nil {
+ return nil, err
+ }
+ defer func() {
+ f.Close()
+ if err != nil {
+ // add file name to error
+ err = fmt.Errorf("reading export data: %s: %v", filename, err)
+ }
+ }()
+
+ r, err := NewReader(f)
+ if err != nil {
+ return nil, err
+ }
+
+ return Read(r, imp.fset, imp.imports, path)
+}
diff --git a/vendor/golang.org/x/tools/go/gcexportdata/main.go b/vendor/golang.org/x/tools/go/gcexportdata/main.go
new file mode 100644
index 000000000..2713dce64
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/gcexportdata/main.go
@@ -0,0 +1,99 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build ignore
+
+// The gcexportdata command is a diagnostic tool that displays the
+// contents of gc export data files.
+package main
+
+import (
+ "flag"
+ "fmt"
+ "go/token"
+ "go/types"
+ "log"
+ "os"
+
+ "golang.org/x/tools/go/gcexportdata"
+ "golang.org/x/tools/go/types/typeutil"
+)
+
+var packageFlag = flag.String("package", "", "alternative package to print")
+
+func main() {
+ log.SetPrefix("gcexportdata: ")
+ log.SetFlags(0)
+ flag.Usage = func() {
+ fmt.Fprintln(os.Stderr, "usage: gcexportdata [-package path] file.a")
+ }
+ flag.Parse()
+ if flag.NArg() != 1 {
+ flag.Usage()
+ os.Exit(2)
+ }
+ filename := flag.Args()[0]
+
+ f, err := os.Open(filename)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ r, err := gcexportdata.NewReader(f)
+ if err != nil {
+ log.Fatalf("%s: %s", filename, err)
+ }
+
+ // Decode the package.
+ const primary = ""
+ imports := make(map[string]*types.Package)
+ fset := token.NewFileSet()
+ pkg, err := gcexportdata.Read(r, fset, imports, primary)
+ if err != nil {
+ log.Fatalf("%s: %s", filename, err)
+ }
+
+ // Optionally select an indirectly mentioned package.
+ if *packageFlag != "" {
+ pkg = imports[*packageFlag]
+ if pkg == nil {
+ fmt.Fprintf(os.Stderr, "export data file %s does not mention %s; has:\n",
+ filename, *packageFlag)
+ for p := range imports {
+ if p != primary {
+ fmt.Fprintf(os.Stderr, "\t%s\n", p)
+ }
+ }
+ os.Exit(1)
+ }
+ }
+
+ // Print all package-level declarations, including non-exported ones.
+ fmt.Printf("package %s\n", pkg.Name())
+ for _, imp := range pkg.Imports() {
+ fmt.Printf("import %q\n", imp.Path())
+ }
+ qual := func(p *types.Package) string {
+ if pkg == p {
+ return ""
+ }
+ return p.Name()
+ }
+ scope := pkg.Scope()
+ for _, name := range scope.Names() {
+ obj := scope.Lookup(name)
+ fmt.Printf("%s: %s\n",
+ fset.Position(obj.Pos()),
+ types.ObjectString(obj, qual))
+
+ // For types, print each method.
+ if _, ok := obj.(*types.TypeName); ok {
+ for _, method := range typeutil.IntuitiveMethodSet(obj.Type(), nil) {
+ fmt.Printf("%s: %s\n",
+ fset.Position(method.Obj().Pos()),
+ types.SelectionString(method, qual))
+ }
+ }
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/internal/cgo/cgo.go b/vendor/golang.org/x/tools/go/internal/cgo/cgo.go
new file mode 100644
index 000000000..0f652ea6f
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/cgo/cgo.go
@@ -0,0 +1,220 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package cgo
+
+// This file handles cgo preprocessing of files containing `import "C"`.
+//
+// DESIGN
+//
+// The approach taken is to run the cgo processor on the package's
+// CgoFiles and parse the output, faking the filenames of the
+// resulting ASTs so that the synthetic file containing the C types is
+// called "C" (e.g. "~/go/src/net/C") and the preprocessed files
+// have their original names (e.g. "~/go/src/net/cgo_unix.go"),
+// not the names of the actual temporary files.
+//
+// The advantage of this approach is its fidelity to 'go build'. The
+// downside is that the token.Position.Offset for each AST node is
+// incorrect, being an offset within the temporary file. Line numbers
+// should still be correct because of the //line comments.
+//
+// The logic of this file is mostly plundered from the 'go build'
+// tool, which also invokes the cgo preprocessor.
+//
+//
+// REJECTED ALTERNATIVE
+//
+// An alternative approach that we explored is to extend go/types'
+// Importer mechanism to provide the identity of the importing package
+// so that each time `import "C"` appears it resolves to a different
+// synthetic package containing just the objects needed in that case.
+// The loader would invoke cgo but parse only the cgo_types.go file
+// defining the package-level objects, discarding the other files
+// resulting from preprocessing.
+//
+// The benefit of this approach would have been that source-level
+// syntax information would correspond exactly to the original cgo
+// file, with no preprocessing involved, making source tools like
+// godoc, guru, and eg happy. However, the approach was rejected
+// due to the additional complexity it would impose on go/types. (It
+// made for a beautiful demo, though.)
+//
+// cgo files, despite their *.go extension, are not legal Go source
+// files per the specification since they may refer to unexported
+// members of package "C" such as C.int. Also, a function such as
+// C.getpwent has in effect two types, one matching its C type and one
+// which additionally returns (errno C.int). The cgo preprocessor
+// uses name mangling to distinguish these two functions in the
+// processed code, but go/types would need to duplicate this logic in
+// its handling of function calls, analogous to the treatment of map
+// lookups in which y=m[k] and y,ok=m[k] are both legal.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/parser"
+ "go/token"
+ "io/ioutil"
+ "log"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "regexp"
+ "strings"
+)
+
+// ProcessFiles invokes the cgo preprocessor on bp.CgoFiles, parses
+// the output and returns the resulting ASTs.
+//
+func ProcessFiles(bp *build.Package, fset *token.FileSet, DisplayPath func(path string) string, mode parser.Mode) ([]*ast.File, error) {
+ tmpdir, err := ioutil.TempDir("", strings.Replace(bp.ImportPath, "/", "_", -1)+"_C")
+ if err != nil {
+ return nil, err
+ }
+ defer os.RemoveAll(tmpdir)
+
+ pkgdir := bp.Dir
+ if DisplayPath != nil {
+ pkgdir = DisplayPath(pkgdir)
+ }
+
+ cgoFiles, cgoDisplayFiles, err := Run(bp, pkgdir, tmpdir, false)
+ if err != nil {
+ return nil, err
+ }
+ var files []*ast.File
+ for i := range cgoFiles {
+ rd, err := os.Open(cgoFiles[i])
+ if err != nil {
+ return nil, err
+ }
+ display := filepath.Join(bp.Dir, cgoDisplayFiles[i])
+ f, err := parser.ParseFile(fset, display, rd, mode)
+ rd.Close()
+ if err != nil {
+ return nil, err
+ }
+ files = append(files, f)
+ }
+ return files, nil
+}
+
+var cgoRe = regexp.MustCompile(`[/\\:]`)
+
+// Run invokes the cgo preprocessor on bp.CgoFiles and returns two
+// lists of files: the resulting processed files (in temporary
+// directory tmpdir) and the corresponding names of the unprocessed files.
+//
+// Run is adapted from (*builder).cgo in
+// $GOROOT/src/cmd/go/build.go, but these features are unsupported:
+// Objective C, CGOPKGPATH, CGO_FLAGS.
+//
+// If useabs is set to true, absolute paths of the bp.CgoFiles will be passed in
+// to the cgo preprocessor. This in turn will set the // line comments
+// referring to those files to use absolute paths. This is needed for
+// go/packages using the legacy go list support so it is able to find
+// the original files.
+func Run(bp *build.Package, pkgdir, tmpdir string, useabs bool) (files, displayFiles []string, err error) {
+ cgoCPPFLAGS, _, _, _ := cflags(bp, true)
+ _, cgoexeCFLAGS, _, _ := cflags(bp, false)
+
+ if len(bp.CgoPkgConfig) > 0 {
+ pcCFLAGS, err := pkgConfigFlags(bp)
+ if err != nil {
+ return nil, nil, err
+ }
+ cgoCPPFLAGS = append(cgoCPPFLAGS, pcCFLAGS...)
+ }
+
+ // Allows including _cgo_export.h from .[ch] files in the package.
+ cgoCPPFLAGS = append(cgoCPPFLAGS, "-I", tmpdir)
+
+ // _cgo_gotypes.go (displayed "C") contains the type definitions.
+ files = append(files, filepath.Join(tmpdir, "_cgo_gotypes.go"))
+ displayFiles = append(displayFiles, "C")
+ for _, fn := range bp.CgoFiles {
+ // "foo.cgo1.go" (displayed "foo.go") is the processed Go source.
+ f := cgoRe.ReplaceAllString(fn[:len(fn)-len("go")], "_")
+ files = append(files, filepath.Join(tmpdir, f+"cgo1.go"))
+ displayFiles = append(displayFiles, fn)
+ }
+
+ var cgoflags []string
+ if bp.Goroot && bp.ImportPath == "runtime/cgo" {
+ cgoflags = append(cgoflags, "-import_runtime_cgo=false")
+ }
+ if bp.Goroot && bp.ImportPath == "runtime/race" || bp.ImportPath == "runtime/cgo" {
+ cgoflags = append(cgoflags, "-import_syscall=false")
+ }
+
+ var cgoFiles []string = bp.CgoFiles
+ if useabs {
+ cgoFiles = make([]string, len(bp.CgoFiles))
+ for i := range cgoFiles {
+ cgoFiles[i] = filepath.Join(pkgdir, bp.CgoFiles[i])
+ }
+ }
+
+ args := stringList(
+ "go", "tool", "cgo", "-objdir", tmpdir, cgoflags, "--",
+ cgoCPPFLAGS, cgoexeCFLAGS, cgoFiles,
+ )
+ if false {
+ log.Printf("Running cgo for package %q: %s (dir=%s)", bp.ImportPath, args, pkgdir)
+ }
+ cmd := exec.Command(args[0], args[1:]...)
+ cmd.Dir = pkgdir
+ cmd.Stdout = os.Stderr
+ cmd.Stderr = os.Stderr
+ if err := cmd.Run(); err != nil {
+ return nil, nil, fmt.Errorf("cgo failed: %s: %s", args, err)
+ }
+
+ return files, displayFiles, nil
+}
+
+// -- unmodified from 'go build' ---------------------------------------
+
+// Return the flags to use when invoking the C or C++ compilers, or cgo.
+func cflags(p *build.Package, def bool) (cppflags, cflags, cxxflags, ldflags []string) {
+ var defaults string
+ if def {
+ defaults = "-g -O2"
+ }
+
+ cppflags = stringList(envList("CGO_CPPFLAGS", ""), p.CgoCPPFLAGS)
+ cflags = stringList(envList("CGO_CFLAGS", defaults), p.CgoCFLAGS)
+ cxxflags = stringList(envList("CGO_CXXFLAGS", defaults), p.CgoCXXFLAGS)
+ ldflags = stringList(envList("CGO_LDFLAGS", defaults), p.CgoLDFLAGS)
+ return
+}
+
+// envList returns the value of the given environment variable broken
+// into fields, using the default value when the variable is empty.
+func envList(key, def string) []string {
+ v := os.Getenv(key)
+ if v == "" {
+ v = def
+ }
+ return strings.Fields(v)
+}
+
+// stringList's arguments should be a sequence of string or []string values.
+// stringList flattens them into a single []string.
+func stringList(args ...interface{}) []string {
+ var x []string
+ for _, arg := range args {
+ switch arg := arg.(type) {
+ case []string:
+ x = append(x, arg...)
+ case string:
+ x = append(x, arg)
+ default:
+ panic("stringList: invalid argument")
+ }
+ }
+ return x
+}
diff --git a/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go b/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go
new file mode 100644
index 000000000..b5bb95a63
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go
@@ -0,0 +1,39 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package cgo
+
+import (
+ "errors"
+ "fmt"
+ "go/build"
+ "os/exec"
+ "strings"
+)
+
+// pkgConfig runs pkg-config with the specified arguments and returns the flags it prints.
+func pkgConfig(mode string, pkgs []string) (flags []string, err error) {
+ cmd := exec.Command("pkg-config", append([]string{mode}, pkgs...)...)
+ out, err := cmd.CombinedOutput()
+ if err != nil {
+ s := fmt.Sprintf("%s failed: %v", strings.Join(cmd.Args, " "), err)
+ if len(out) > 0 {
+ s = fmt.Sprintf("%s: %s", s, out)
+ }
+ return nil, errors.New(s)
+ }
+ if len(out) > 0 {
+ flags = strings.Fields(string(out))
+ }
+ return
+}
+
+// pkgConfigFlags calls pkg-config if needed and returns the cflags
+// needed to build the package.
+func pkgConfigFlags(p *build.Package) (cflags []string, err error) {
+ if len(p.CgoPkgConfig) == 0 {
+ return nil, nil
+ }
+ return pkgConfig("--cflags", p.CgoPkgConfig)
+}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go
new file mode 100644
index 000000000..9f6504914
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go
@@ -0,0 +1,852 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Binary package export.
+// This file was derived from $GOROOT/src/cmd/compile/internal/gc/bexport.go;
+// see that file for specification of the format.
+
+package gcimporter
+
+import (
+ "bytes"
+ "encoding/binary"
+ "fmt"
+ "go/ast"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "math"
+ "math/big"
+ "sort"
+ "strings"
+)
+
+// If debugFormat is set, each integer and string value is preceded by a marker
+// and position information in the encoding. This mechanism permits an importer
+// to recognize immediately when it is out of sync. The importer recognizes this
+// mode automatically (i.e., it can import export data produced with debugging
+// support even if debugFormat is not set at the time of import). This mode will
+// lead to massively larger export data (by a factor of 2 to 3) and should only
+// be enabled during development and debugging.
+//
+// NOTE: This flag is the first flag to enable if importing dies because of
+// (suspected) format errors, and whenever a change is made to the format.
+const debugFormat = false // default: false
+
+// If trace is set, debugging output is printed to std out.
+const trace = false // default: false
+
+// Current export format version. Increase with each format change.
+// Note: The latest binary (non-indexed) export format is at version 6.
+// This exporter is still at level 4, but it doesn't matter since
+// the binary importer can handle older versions just fine.
+// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE
+// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMEMTED HERE
+// 4: type name objects support type aliases, uses aliasTag
+// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used)
+// 2: removed unused bool in ODCL export (compiler only)
+// 1: header format change (more regular), export package for _ struct fields
+// 0: Go1.7 encoding
+const exportVersion = 4
+
+// trackAllTypes enables cycle tracking for all types, not just named
+// types. The existing compiler invariants assume that unnamed types
+// that are not completely set up are not used, or else there are spurious
+// errors.
+// If disabled, only named types are tracked, possibly leading to slightly
+// less efficient encoding in rare cases. It also prevents the export of
+// some corner-case type declarations (but those are not handled correctly
+// with with the textual export format either).
+// TODO(gri) enable and remove once issues caused by it are fixed
+const trackAllTypes = false
+
+type exporter struct {
+ fset *token.FileSet
+ out bytes.Buffer
+
+ // object -> index maps, indexed in order of serialization
+ strIndex map[string]int
+ pkgIndex map[*types.Package]int
+ typIndex map[types.Type]int
+
+ // position encoding
+ posInfoFormat bool
+ prevFile string
+ prevLine int
+
+ // debugging support
+ written int // bytes written
+ indent int // for trace
+}
+
+// internalError represents an error generated inside this package.
+type internalError string
+
+func (e internalError) Error() string { return "gcimporter: " + string(e) }
+
+func internalErrorf(format string, args ...interface{}) error {
+ return internalError(fmt.Sprintf(format, args...))
+}
+
+// BExportData returns binary export data for pkg.
+// If no file set is provided, position info will be missing.
+func BExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) {
+ defer func() {
+ if e := recover(); e != nil {
+ if ierr, ok := e.(internalError); ok {
+ err = ierr
+ return
+ }
+ // Not an internal error; panic again.
+ panic(e)
+ }
+ }()
+
+ p := exporter{
+ fset: fset,
+ strIndex: map[string]int{"": 0}, // empty string is mapped to 0
+ pkgIndex: make(map[*types.Package]int),
+ typIndex: make(map[types.Type]int),
+ posInfoFormat: true, // TODO(gri) might become a flag, eventually
+ }
+
+ // write version info
+ // The version string must start with "version %d" where %d is the version
+ // number. Additional debugging information may follow after a blank; that
+ // text is ignored by the importer.
+ p.rawStringln(fmt.Sprintf("version %d", exportVersion))
+ var debug string
+ if debugFormat {
+ debug = "debug"
+ }
+ p.rawStringln(debug) // cannot use p.bool since it's affected by debugFormat; also want to see this clearly
+ p.bool(trackAllTypes)
+ p.bool(p.posInfoFormat)
+
+ // --- generic export data ---
+
+ // populate type map with predeclared "known" types
+ for index, typ := range predeclared {
+ p.typIndex[typ] = index
+ }
+ if len(p.typIndex) != len(predeclared) {
+ return nil, internalError("duplicate entries in type map?")
+ }
+
+ // write package data
+ p.pkg(pkg, true)
+ if trace {
+ p.tracef("\n")
+ }
+
+ // write objects
+ objcount := 0
+ scope := pkg.Scope()
+ for _, name := range scope.Names() {
+ if !ast.IsExported(name) {
+ continue
+ }
+ if trace {
+ p.tracef("\n")
+ }
+ p.obj(scope.Lookup(name))
+ objcount++
+ }
+
+ // indicate end of list
+ if trace {
+ p.tracef("\n")
+ }
+ p.tag(endTag)
+
+ // for self-verification only (redundant)
+ p.int(objcount)
+
+ if trace {
+ p.tracef("\n")
+ }
+
+ // --- end of export data ---
+
+ return p.out.Bytes(), nil
+}
+
+func (p *exporter) pkg(pkg *types.Package, emptypath bool) {
+ if pkg == nil {
+ panic(internalError("unexpected nil pkg"))
+ }
+
+ // if we saw the package before, write its index (>= 0)
+ if i, ok := p.pkgIndex[pkg]; ok {
+ p.index('P', i)
+ return
+ }
+
+ // otherwise, remember the package, write the package tag (< 0) and package data
+ if trace {
+ p.tracef("P%d = { ", len(p.pkgIndex))
+ defer p.tracef("} ")
+ }
+ p.pkgIndex[pkg] = len(p.pkgIndex)
+
+ p.tag(packageTag)
+ p.string(pkg.Name())
+ if emptypath {
+ p.string("")
+ } else {
+ p.string(pkg.Path())
+ }
+}
+
+func (p *exporter) obj(obj types.Object) {
+ switch obj := obj.(type) {
+ case *types.Const:
+ p.tag(constTag)
+ p.pos(obj)
+ p.qualifiedName(obj)
+ p.typ(obj.Type())
+ p.value(obj.Val())
+
+ case *types.TypeName:
+ if obj.IsAlias() {
+ p.tag(aliasTag)
+ p.pos(obj)
+ p.qualifiedName(obj)
+ } else {
+ p.tag(typeTag)
+ }
+ p.typ(obj.Type())
+
+ case *types.Var:
+ p.tag(varTag)
+ p.pos(obj)
+ p.qualifiedName(obj)
+ p.typ(obj.Type())
+
+ case *types.Func:
+ p.tag(funcTag)
+ p.pos(obj)
+ p.qualifiedName(obj)
+ sig := obj.Type().(*types.Signature)
+ p.paramList(sig.Params(), sig.Variadic())
+ p.paramList(sig.Results(), false)
+
+ default:
+ panic(internalErrorf("unexpected object %v (%T)", obj, obj))
+ }
+}
+
+func (p *exporter) pos(obj types.Object) {
+ if !p.posInfoFormat {
+ return
+ }
+
+ file, line := p.fileLine(obj)
+ if file == p.prevFile {
+ // common case: write line delta
+ // delta == 0 means different file or no line change
+ delta := line - p.prevLine
+ p.int(delta)
+ if delta == 0 {
+ p.int(-1) // -1 means no file change
+ }
+ } else {
+ // different file
+ p.int(0)
+ // Encode filename as length of common prefix with previous
+ // filename, followed by (possibly empty) suffix. Filenames
+ // frequently share path prefixes, so this can save a lot
+ // of space and make export data size less dependent on file
+ // path length. The suffix is unlikely to be empty because
+ // file names tend to end in ".go".
+ n := commonPrefixLen(p.prevFile, file)
+ p.int(n) // n >= 0
+ p.string(file[n:]) // write suffix only
+ p.prevFile = file
+ p.int(line)
+ }
+ p.prevLine = line
+}
+
+func (p *exporter) fileLine(obj types.Object) (file string, line int) {
+ if p.fset != nil {
+ pos := p.fset.Position(obj.Pos())
+ file = pos.Filename
+ line = pos.Line
+ }
+ return
+}
+
+func commonPrefixLen(a, b string) int {
+ if len(a) > len(b) {
+ a, b = b, a
+ }
+ // len(a) <= len(b)
+ i := 0
+ for i < len(a) && a[i] == b[i] {
+ i++
+ }
+ return i
+}
+
+func (p *exporter) qualifiedName(obj types.Object) {
+ p.string(obj.Name())
+ p.pkg(obj.Pkg(), false)
+}
+
+func (p *exporter) typ(t types.Type) {
+ if t == nil {
+ panic(internalError("nil type"))
+ }
+
+ // Possible optimization: Anonymous pointer types *T where
+ // T is a named type are common. We could canonicalize all
+ // such types *T to a single type PT = *T. This would lead
+ // to at most one *T entry in typIndex, and all future *T's
+ // would be encoded as the respective index directly. Would
+ // save 1 byte (pointerTag) per *T and reduce the typIndex
+ // size (at the cost of a canonicalization map). We can do
+ // this later, without encoding format change.
+
+ // if we saw the type before, write its index (>= 0)
+ if i, ok := p.typIndex[t]; ok {
+ p.index('T', i)
+ return
+ }
+
+ // otherwise, remember the type, write the type tag (< 0) and type data
+ if trackAllTypes {
+ if trace {
+ p.tracef("T%d = {>\n", len(p.typIndex))
+ defer p.tracef("<\n} ")
+ }
+ p.typIndex[t] = len(p.typIndex)
+ }
+
+ switch t := t.(type) {
+ case *types.Named:
+ if !trackAllTypes {
+ // if we don't track all types, track named types now
+ p.typIndex[t] = len(p.typIndex)
+ }
+
+ p.tag(namedTag)
+ p.pos(t.Obj())
+ p.qualifiedName(t.Obj())
+ p.typ(t.Underlying())
+ if !types.IsInterface(t) {
+ p.assocMethods(t)
+ }
+
+ case *types.Array:
+ p.tag(arrayTag)
+ p.int64(t.Len())
+ p.typ(t.Elem())
+
+ case *types.Slice:
+ p.tag(sliceTag)
+ p.typ(t.Elem())
+
+ case *dddSlice:
+ p.tag(dddTag)
+ p.typ(t.elem)
+
+ case *types.Struct:
+ p.tag(structTag)
+ p.fieldList(t)
+
+ case *types.Pointer:
+ p.tag(pointerTag)
+ p.typ(t.Elem())
+
+ case *types.Signature:
+ p.tag(signatureTag)
+ p.paramList(t.Params(), t.Variadic())
+ p.paramList(t.Results(), false)
+
+ case *types.Interface:
+ p.tag(interfaceTag)
+ p.iface(t)
+
+ case *types.Map:
+ p.tag(mapTag)
+ p.typ(t.Key())
+ p.typ(t.Elem())
+
+ case *types.Chan:
+ p.tag(chanTag)
+ p.int(int(3 - t.Dir())) // hack
+ p.typ(t.Elem())
+
+ default:
+ panic(internalErrorf("unexpected type %T: %s", t, t))
+ }
+}
+
+func (p *exporter) assocMethods(named *types.Named) {
+ // Sort methods (for determinism).
+ var methods []*types.Func
+ for i := 0; i < named.NumMethods(); i++ {
+ methods = append(methods, named.Method(i))
+ }
+ sort.Sort(methodsByName(methods))
+
+ p.int(len(methods))
+
+ if trace && methods != nil {
+ p.tracef("associated methods {>\n")
+ }
+
+ for i, m := range methods {
+ if trace && i > 0 {
+ p.tracef("\n")
+ }
+
+ p.pos(m)
+ name := m.Name()
+ p.string(name)
+ if !exported(name) {
+ p.pkg(m.Pkg(), false)
+ }
+
+ sig := m.Type().(*types.Signature)
+ p.paramList(types.NewTuple(sig.Recv()), false)
+ p.paramList(sig.Params(), sig.Variadic())
+ p.paramList(sig.Results(), false)
+ p.int(0) // dummy value for go:nointerface pragma - ignored by importer
+ }
+
+ if trace && methods != nil {
+ p.tracef("<\n} ")
+ }
+}
+
+type methodsByName []*types.Func
+
+func (x methodsByName) Len() int { return len(x) }
+func (x methodsByName) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
+func (x methodsByName) Less(i, j int) bool { return x[i].Name() < x[j].Name() }
+
+func (p *exporter) fieldList(t *types.Struct) {
+ if trace && t.NumFields() > 0 {
+ p.tracef("fields {>\n")
+ defer p.tracef("<\n} ")
+ }
+
+ p.int(t.NumFields())
+ for i := 0; i < t.NumFields(); i++ {
+ if trace && i > 0 {
+ p.tracef("\n")
+ }
+ p.field(t.Field(i))
+ p.string(t.Tag(i))
+ }
+}
+
+func (p *exporter) field(f *types.Var) {
+ if !f.IsField() {
+ panic(internalError("field expected"))
+ }
+
+ p.pos(f)
+ p.fieldName(f)
+ p.typ(f.Type())
+}
+
+func (p *exporter) iface(t *types.Interface) {
+ // TODO(gri): enable importer to load embedded interfaces,
+ // then emit Embeddeds and ExplicitMethods separately here.
+ p.int(0)
+
+ n := t.NumMethods()
+ if trace && n > 0 {
+ p.tracef("methods {>\n")
+ defer p.tracef("<\n} ")
+ }
+ p.int(n)
+ for i := 0; i < n; i++ {
+ if trace && i > 0 {
+ p.tracef("\n")
+ }
+ p.method(t.Method(i))
+ }
+}
+
+func (p *exporter) method(m *types.Func) {
+ sig := m.Type().(*types.Signature)
+ if sig.Recv() == nil {
+ panic(internalError("method expected"))
+ }
+
+ p.pos(m)
+ p.string(m.Name())
+ if m.Name() != "_" && !ast.IsExported(m.Name()) {
+ p.pkg(m.Pkg(), false)
+ }
+
+ // interface method; no need to encode receiver.
+ p.paramList(sig.Params(), sig.Variadic())
+ p.paramList(sig.Results(), false)
+}
+
+func (p *exporter) fieldName(f *types.Var) {
+ name := f.Name()
+
+ if f.Anonymous() {
+ // anonymous field - we distinguish between 3 cases:
+ // 1) field name matches base type name and is exported
+ // 2) field name matches base type name and is not exported
+ // 3) field name doesn't match base type name (alias name)
+ bname := basetypeName(f.Type())
+ if name == bname {
+ if ast.IsExported(name) {
+ name = "" // 1) we don't need to know the field name or package
+ } else {
+ name = "?" // 2) use unexported name "?" to force package export
+ }
+ } else {
+ // 3) indicate alias and export name as is
+ // (this requires an extra "@" but this is a rare case)
+ p.string("@")
+ }
+ }
+
+ p.string(name)
+ if name != "" && !ast.IsExported(name) {
+ p.pkg(f.Pkg(), false)
+ }
+}
+
+func basetypeName(typ types.Type) string {
+ switch typ := deref(typ).(type) {
+ case *types.Basic:
+ return typ.Name()
+ case *types.Named:
+ return typ.Obj().Name()
+ default:
+ return "" // unnamed type
+ }
+}
+
+func (p *exporter) paramList(params *types.Tuple, variadic bool) {
+ // use negative length to indicate unnamed parameters
+ // (look at the first parameter only since either all
+ // names are present or all are absent)
+ n := params.Len()
+ if n > 0 && params.At(0).Name() == "" {
+ n = -n
+ }
+ p.int(n)
+ for i := 0; i < params.Len(); i++ {
+ q := params.At(i)
+ t := q.Type()
+ if variadic && i == params.Len()-1 {
+ t = &dddSlice{t.(*types.Slice).Elem()}
+ }
+ p.typ(t)
+ if n > 0 {
+ name := q.Name()
+ p.string(name)
+ if name != "_" {
+ p.pkg(q.Pkg(), false)
+ }
+ }
+ p.string("") // no compiler-specific info
+ }
+}
+
+func (p *exporter) value(x constant.Value) {
+ if trace {
+ p.tracef("= ")
+ }
+
+ switch x.Kind() {
+ case constant.Bool:
+ tag := falseTag
+ if constant.BoolVal(x) {
+ tag = trueTag
+ }
+ p.tag(tag)
+
+ case constant.Int:
+ if v, exact := constant.Int64Val(x); exact {
+ // common case: x fits into an int64 - use compact encoding
+ p.tag(int64Tag)
+ p.int64(v)
+ return
+ }
+ // uncommon case: large x - use float encoding
+ // (powers of 2 will be encoded efficiently with exponent)
+ p.tag(floatTag)
+ p.float(constant.ToFloat(x))
+
+ case constant.Float:
+ p.tag(floatTag)
+ p.float(x)
+
+ case constant.Complex:
+ p.tag(complexTag)
+ p.float(constant.Real(x))
+ p.float(constant.Imag(x))
+
+ case constant.String:
+ p.tag(stringTag)
+ p.string(constant.StringVal(x))
+
+ case constant.Unknown:
+ // package contains type errors
+ p.tag(unknownTag)
+
+ default:
+ panic(internalErrorf("unexpected value %v (%T)", x, x))
+ }
+}
+
+func (p *exporter) float(x constant.Value) {
+ if x.Kind() != constant.Float {
+ panic(internalErrorf("unexpected constant %v, want float", x))
+ }
+ // extract sign (there is no -0)
+ sign := constant.Sign(x)
+ if sign == 0 {
+ // x == 0
+ p.int(0)
+ return
+ }
+ // x != 0
+
+ var f big.Float
+ if v, exact := constant.Float64Val(x); exact {
+ // float64
+ f.SetFloat64(v)
+ } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int {
+ // TODO(gri): add big.Rat accessor to constant.Value.
+ r := valueToRat(num)
+ f.SetRat(r.Quo(r, valueToRat(denom)))
+ } else {
+ // Value too large to represent as a fraction => inaccessible.
+ // TODO(gri): add big.Float accessor to constant.Value.
+ f.SetFloat64(math.MaxFloat64) // FIXME
+ }
+
+ // extract exponent such that 0.5 <= m < 1.0
+ var m big.Float
+ exp := f.MantExp(&m)
+
+ // extract mantissa as *big.Int
+ // - set exponent large enough so mant satisfies mant.IsInt()
+ // - get *big.Int from mant
+ m.SetMantExp(&m, int(m.MinPrec()))
+ mant, acc := m.Int(nil)
+ if acc != big.Exact {
+ panic(internalError("internal error"))
+ }
+
+ p.int(sign)
+ p.int(exp)
+ p.string(string(mant.Bytes()))
+}
+
+func valueToRat(x constant.Value) *big.Rat {
+ // Convert little-endian to big-endian.
+ // I can't believe this is necessary.
+ bytes := constant.Bytes(x)
+ for i := 0; i < len(bytes)/2; i++ {
+ bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i]
+ }
+ return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes))
+}
+
+func (p *exporter) bool(b bool) bool {
+ if trace {
+ p.tracef("[")
+ defer p.tracef("= %v] ", b)
+ }
+
+ x := 0
+ if b {
+ x = 1
+ }
+ p.int(x)
+ return b
+}
+
+// ----------------------------------------------------------------------------
+// Low-level encoders
+
+func (p *exporter) index(marker byte, index int) {
+ if index < 0 {
+ panic(internalError("invalid index < 0"))
+ }
+ if debugFormat {
+ p.marker('t')
+ }
+ if trace {
+ p.tracef("%c%d ", marker, index)
+ }
+ p.rawInt64(int64(index))
+}
+
+func (p *exporter) tag(tag int) {
+ if tag >= 0 {
+ panic(internalError("invalid tag >= 0"))
+ }
+ if debugFormat {
+ p.marker('t')
+ }
+ if trace {
+ p.tracef("%s ", tagString[-tag])
+ }
+ p.rawInt64(int64(tag))
+}
+
+func (p *exporter) int(x int) {
+ p.int64(int64(x))
+}
+
+func (p *exporter) int64(x int64) {
+ if debugFormat {
+ p.marker('i')
+ }
+ if trace {
+ p.tracef("%d ", x)
+ }
+ p.rawInt64(x)
+}
+
+func (p *exporter) string(s string) {
+ if debugFormat {
+ p.marker('s')
+ }
+ if trace {
+ p.tracef("%q ", s)
+ }
+ // if we saw the string before, write its index (>= 0)
+ // (the empty string is mapped to 0)
+ if i, ok := p.strIndex[s]; ok {
+ p.rawInt64(int64(i))
+ return
+ }
+ // otherwise, remember string and write its negative length and bytes
+ p.strIndex[s] = len(p.strIndex)
+ p.rawInt64(-int64(len(s)))
+ for i := 0; i < len(s); i++ {
+ p.rawByte(s[i])
+ }
+}
+
+// marker emits a marker byte and position information which makes
+// it easy for a reader to detect if it is "out of sync". Used for
+// debugFormat format only.
+func (p *exporter) marker(m byte) {
+ p.rawByte(m)
+ // Enable this for help tracking down the location
+ // of an incorrect marker when running in debugFormat.
+ if false && trace {
+ p.tracef("#%d ", p.written)
+ }
+ p.rawInt64(int64(p.written))
+}
+
+// rawInt64 should only be used by low-level encoders.
+func (p *exporter) rawInt64(x int64) {
+ var tmp [binary.MaxVarintLen64]byte
+ n := binary.PutVarint(tmp[:], x)
+ for i := 0; i < n; i++ {
+ p.rawByte(tmp[i])
+ }
+}
+
+// rawStringln should only be used to emit the initial version string.
+func (p *exporter) rawStringln(s string) {
+ for i := 0; i < len(s); i++ {
+ p.rawByte(s[i])
+ }
+ p.rawByte('\n')
+}
+
+// rawByte is the bottleneck interface to write to p.out.
+// rawByte escapes b as follows (any encoding does that
+// hides '$'):
+//
+// '$' => '|' 'S'
+// '|' => '|' '|'
+//
+// Necessary so other tools can find the end of the
+// export data by searching for "$$".
+// rawByte should only be used by low-level encoders.
+func (p *exporter) rawByte(b byte) {
+ switch b {
+ case '$':
+ // write '$' as '|' 'S'
+ b = 'S'
+ fallthrough
+ case '|':
+ // write '|' as '|' '|'
+ p.out.WriteByte('|')
+ p.written++
+ }
+ p.out.WriteByte(b)
+ p.written++
+}
+
+// tracef is like fmt.Printf but it rewrites the format string
+// to take care of indentation.
+func (p *exporter) tracef(format string, args ...interface{}) {
+ if strings.ContainsAny(format, "<>\n") {
+ var buf bytes.Buffer
+ for i := 0; i < len(format); i++ {
+ // no need to deal with runes
+ ch := format[i]
+ switch ch {
+ case '>':
+ p.indent++
+ continue
+ case '<':
+ p.indent--
+ continue
+ }
+ buf.WriteByte(ch)
+ if ch == '\n' {
+ for j := p.indent; j > 0; j-- {
+ buf.WriteString(". ")
+ }
+ }
+ }
+ format = buf.String()
+ }
+ fmt.Printf(format, args...)
+}
+
+// Debugging support.
+// (tagString is only used when tracing is enabled)
+var tagString = [...]string{
+ // Packages
+ -packageTag: "package",
+
+ // Types
+ -namedTag: "named type",
+ -arrayTag: "array",
+ -sliceTag: "slice",
+ -dddTag: "ddd",
+ -structTag: "struct",
+ -pointerTag: "pointer",
+ -signatureTag: "signature",
+ -interfaceTag: "interface",
+ -mapTag: "map",
+ -chanTag: "chan",
+
+ // Values
+ -falseTag: "false",
+ -trueTag: "true",
+ -int64Tag: "int64",
+ -floatTag: "float",
+ -fractionTag: "fraction",
+ -complexTag: "complex",
+ -stringTag: "string",
+ -unknownTag: "unknown",
+
+ // Type aliases
+ -aliasTag: "alias",
+}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go
new file mode 100644
index 000000000..b31eacfc0
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go
@@ -0,0 +1,1028 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file is a copy of $GOROOT/src/go/internal/gcimporter/bimport.go.
+
+package gcimporter
+
+import (
+ "encoding/binary"
+ "fmt"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+ "unicode"
+ "unicode/utf8"
+)
+
+type importer struct {
+ imports map[string]*types.Package
+ data []byte
+ importpath string
+ buf []byte // for reading strings
+ version int // export format version
+
+ // object lists
+ strList []string // in order of appearance
+ pathList []string // in order of appearance
+ pkgList []*types.Package // in order of appearance
+ typList []types.Type // in order of appearance
+ interfaceList []*types.Interface // for delayed completion only
+ trackAllTypes bool
+
+ // position encoding
+ posInfoFormat bool
+ prevFile string
+ prevLine int
+ fake fakeFileSet
+
+ // debugging support
+ debugFormat bool
+ read int // bytes read
+}
+
+// BImportData imports a package from the serialized package data
+// and returns the number of bytes consumed and a reference to the package.
+// If the export data version is not recognized or the format is otherwise
+// compromised, an error is returned.
+func BImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
+ // catch panics and return them as errors
+ const currentVersion = 6
+ version := -1 // unknown version
+ defer func() {
+ if e := recover(); e != nil {
+ // Return a (possibly nil or incomplete) package unchanged (see #16088).
+ if version > currentVersion {
+ err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
+ } else {
+ err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
+ }
+ }
+ }()
+
+ p := importer{
+ imports: imports,
+ data: data,
+ importpath: path,
+ version: version,
+ strList: []string{""}, // empty string is mapped to 0
+ pathList: []string{""}, // empty string is mapped to 0
+ fake: fakeFileSet{
+ fset: fset,
+ files: make(map[string]*token.File),
+ },
+ }
+
+ // read version info
+ var versionstr string
+ if b := p.rawByte(); b == 'c' || b == 'd' {
+ // Go1.7 encoding; first byte encodes low-level
+ // encoding format (compact vs debug).
+ // For backward-compatibility only (avoid problems with
+ // old installed packages). Newly compiled packages use
+ // the extensible format string.
+ // TODO(gri) Remove this support eventually; after Go1.8.
+ if b == 'd' {
+ p.debugFormat = true
+ }
+ p.trackAllTypes = p.rawByte() == 'a'
+ p.posInfoFormat = p.int() != 0
+ versionstr = p.string()
+ if versionstr == "v1" {
+ version = 0
+ }
+ } else {
+ // Go1.8 extensible encoding
+ // read version string and extract version number (ignore anything after the version number)
+ versionstr = p.rawStringln(b)
+ if s := strings.SplitN(versionstr, " ", 3); len(s) >= 2 && s[0] == "version" {
+ if v, err := strconv.Atoi(s[1]); err == nil && v > 0 {
+ version = v
+ }
+ }
+ }
+ p.version = version
+
+ // read version specific flags - extend as necessary
+ switch p.version {
+ // case currentVersion:
+ // ...
+ // fallthrough
+ case currentVersion, 5, 4, 3, 2, 1:
+ p.debugFormat = p.rawStringln(p.rawByte()) == "debug"
+ p.trackAllTypes = p.int() != 0
+ p.posInfoFormat = p.int() != 0
+ case 0:
+ // Go1.7 encoding format - nothing to do here
+ default:
+ errorf("unknown bexport format version %d (%q)", p.version, versionstr)
+ }
+
+ // --- generic export data ---
+
+ // populate typList with predeclared "known" types
+ p.typList = append(p.typList, predeclared...)
+
+ // read package data
+ pkg = p.pkg()
+
+ // read objects of phase 1 only (see cmd/compile/internal/gc/bexport.go)
+ objcount := 0
+ for {
+ tag := p.tagOrIndex()
+ if tag == endTag {
+ break
+ }
+ p.obj(tag)
+ objcount++
+ }
+
+ // self-verification
+ if count := p.int(); count != objcount {
+ errorf("got %d objects; want %d", objcount, count)
+ }
+
+ // ignore compiler-specific import data
+
+ // complete interfaces
+ // TODO(gri) re-investigate if we still need to do this in a delayed fashion
+ for _, typ := range p.interfaceList {
+ typ.Complete()
+ }
+
+ // record all referenced packages as imports
+ list := append(([]*types.Package)(nil), p.pkgList[1:]...)
+ sort.Sort(byPath(list))
+ pkg.SetImports(list)
+
+ // package was imported completely and without errors
+ pkg.MarkComplete()
+
+ return p.read, pkg, nil
+}
+
+func errorf(format string, args ...interface{}) {
+ panic(fmt.Sprintf(format, args...))
+}
+
+func (p *importer) pkg() *types.Package {
+ // if the package was seen before, i is its index (>= 0)
+ i := p.tagOrIndex()
+ if i >= 0 {
+ return p.pkgList[i]
+ }
+
+ // otherwise, i is the package tag (< 0)
+ if i != packageTag {
+ errorf("unexpected package tag %d version %d", i, p.version)
+ }
+
+ // read package data
+ name := p.string()
+ var path string
+ if p.version >= 5 {
+ path = p.path()
+ } else {
+ path = p.string()
+ }
+ if p.version >= 6 {
+ p.int() // package height; unused by go/types
+ }
+
+ // we should never see an empty package name
+ if name == "" {
+ errorf("empty package name in import")
+ }
+
+ // an empty path denotes the package we are currently importing;
+ // it must be the first package we see
+ if (path == "") != (len(p.pkgList) == 0) {
+ errorf("package path %q for pkg index %d", path, len(p.pkgList))
+ }
+
+ // if the package was imported before, use that one; otherwise create a new one
+ if path == "" {
+ path = p.importpath
+ }
+ pkg := p.imports[path]
+ if pkg == nil {
+ pkg = types.NewPackage(path, name)
+ p.imports[path] = pkg
+ } else if pkg.Name() != name {
+ errorf("conflicting names %s and %s for package %q", pkg.Name(), name, path)
+ }
+ p.pkgList = append(p.pkgList, pkg)
+
+ return pkg
+}
+
+// objTag returns the tag value for each object kind.
+func objTag(obj types.Object) int {
+ switch obj.(type) {
+ case *types.Const:
+ return constTag
+ case *types.TypeName:
+ return typeTag
+ case *types.Var:
+ return varTag
+ case *types.Func:
+ return funcTag
+ default:
+ errorf("unexpected object: %v (%T)", obj, obj) // panics
+ panic("unreachable")
+ }
+}
+
+func sameObj(a, b types.Object) bool {
+ // Because unnamed types are not canonicalized, we cannot simply compare types for
+ // (pointer) identity.
+ // Ideally we'd check equality of constant values as well, but this is good enough.
+ return objTag(a) == objTag(b) && types.Identical(a.Type(), b.Type())
+}
+
+func (p *importer) declare(obj types.Object) {
+ pkg := obj.Pkg()
+ if alt := pkg.Scope().Insert(obj); alt != nil {
+ // This can only trigger if we import a (non-type) object a second time.
+ // Excluding type aliases, this cannot happen because 1) we only import a package
+ // once; and b) we ignore compiler-specific export data which may contain
+ // functions whose inlined function bodies refer to other functions that
+ // were already imported.
+ // However, type aliases require reexporting the original type, so we need
+ // to allow it (see also the comment in cmd/compile/internal/gc/bimport.go,
+ // method importer.obj, switch case importing functions).
+ // TODO(gri) review/update this comment once the gc compiler handles type aliases.
+ if !sameObj(obj, alt) {
+ errorf("inconsistent import:\n\t%v\npreviously imported as:\n\t%v\n", obj, alt)
+ }
+ }
+}
+
+func (p *importer) obj(tag int) {
+ switch tag {
+ case constTag:
+ pos := p.pos()
+ pkg, name := p.qualifiedName()
+ typ := p.typ(nil, nil)
+ val := p.value()
+ p.declare(types.NewConst(pos, pkg, name, typ, val))
+
+ case aliasTag:
+ // TODO(gri) verify type alias hookup is correct
+ pos := p.pos()
+ pkg, name := p.qualifiedName()
+ typ := p.typ(nil, nil)
+ p.declare(types.NewTypeName(pos, pkg, name, typ))
+
+ case typeTag:
+ p.typ(nil, nil)
+
+ case varTag:
+ pos := p.pos()
+ pkg, name := p.qualifiedName()
+ typ := p.typ(nil, nil)
+ p.declare(types.NewVar(pos, pkg, name, typ))
+
+ case funcTag:
+ pos := p.pos()
+ pkg, name := p.qualifiedName()
+ params, isddd := p.paramList()
+ result, _ := p.paramList()
+ sig := types.NewSignature(nil, params, result, isddd)
+ p.declare(types.NewFunc(pos, pkg, name, sig))
+
+ default:
+ errorf("unexpected object tag %d", tag)
+ }
+}
+
+const deltaNewFile = -64 // see cmd/compile/internal/gc/bexport.go
+
+func (p *importer) pos() token.Pos {
+ if !p.posInfoFormat {
+ return token.NoPos
+ }
+
+ file := p.prevFile
+ line := p.prevLine
+ delta := p.int()
+ line += delta
+ if p.version >= 5 {
+ if delta == deltaNewFile {
+ if n := p.int(); n >= 0 {
+ // file changed
+ file = p.path()
+ line = n
+ }
+ }
+ } else {
+ if delta == 0 {
+ if n := p.int(); n >= 0 {
+ // file changed
+ file = p.prevFile[:n] + p.string()
+ line = p.int()
+ }
+ }
+ }
+ p.prevFile = file
+ p.prevLine = line
+
+ return p.fake.pos(file, line)
+}
+
+// Synthesize a token.Pos
+type fakeFileSet struct {
+ fset *token.FileSet
+ files map[string]*token.File
+}
+
+func (s *fakeFileSet) pos(file string, line int) token.Pos {
+ // Since we don't know the set of needed file positions, we
+ // reserve maxlines positions per file.
+ const maxlines = 64 * 1024
+ f := s.files[file]
+ if f == nil {
+ f = s.fset.AddFile(file, -1, maxlines)
+ s.files[file] = f
+ // Allocate the fake linebreak indices on first use.
+ // TODO(adonovan): opt: save ~512KB using a more complex scheme?
+ fakeLinesOnce.Do(func() {
+ fakeLines = make([]int, maxlines)
+ for i := range fakeLines {
+ fakeLines[i] = i
+ }
+ })
+ f.SetLines(fakeLines)
+ }
+
+ if line > maxlines {
+ line = 1
+ }
+
+ // Treat the file as if it contained only newlines
+ // and column=1: use the line number as the offset.
+ return f.Pos(line - 1)
+}
+
+var (
+ fakeLines []int
+ fakeLinesOnce sync.Once
+)
+
+func (p *importer) qualifiedName() (pkg *types.Package, name string) {
+ name = p.string()
+ pkg = p.pkg()
+ return
+}
+
+func (p *importer) record(t types.Type) {
+ p.typList = append(p.typList, t)
+}
+
+// A dddSlice is a types.Type representing ...T parameters.
+// It only appears for parameter types and does not escape
+// the importer.
+type dddSlice struct {
+ elem types.Type
+}
+
+func (t *dddSlice) Underlying() types.Type { return t }
+func (t *dddSlice) String() string { return "..." + t.elem.String() }
+
+// parent is the package which declared the type; parent == nil means
+// the package currently imported. The parent package is needed for
+// exported struct fields and interface methods which don't contain
+// explicit package information in the export data.
+//
+// A non-nil tname is used as the "owner" of the result type; i.e.,
+// the result type is the underlying type of tname. tname is used
+// to give interface methods a named receiver type where possible.
+func (p *importer) typ(parent *types.Package, tname *types.Named) types.Type {
+ // if the type was seen before, i is its index (>= 0)
+ i := p.tagOrIndex()
+ if i >= 0 {
+ return p.typList[i]
+ }
+
+ // otherwise, i is the type tag (< 0)
+ switch i {
+ case namedTag:
+ // read type object
+ pos := p.pos()
+ parent, name := p.qualifiedName()
+ scope := parent.Scope()
+ obj := scope.Lookup(name)
+
+ // if the object doesn't exist yet, create and insert it
+ if obj == nil {
+ obj = types.NewTypeName(pos, parent, name, nil)
+ scope.Insert(obj)
+ }
+
+ if _, ok := obj.(*types.TypeName); !ok {
+ errorf("pkg = %s, name = %s => %s", parent, name, obj)
+ }
+
+ // associate new named type with obj if it doesn't exist yet
+ t0 := types.NewNamed(obj.(*types.TypeName), nil, nil)
+
+ // but record the existing type, if any
+ tname := obj.Type().(*types.Named) // tname is either t0 or the existing type
+ p.record(tname)
+
+ // read underlying type
+ t0.SetUnderlying(p.typ(parent, t0))
+
+ // interfaces don't have associated methods
+ if types.IsInterface(t0) {
+ return tname
+ }
+
+ // read associated methods
+ for i := p.int(); i > 0; i-- {
+ // TODO(gri) replace this with something closer to fieldName
+ pos := p.pos()
+ name := p.string()
+ if !exported(name) {
+ p.pkg()
+ }
+
+ recv, _ := p.paramList() // TODO(gri) do we need a full param list for the receiver?
+ params, isddd := p.paramList()
+ result, _ := p.paramList()
+ p.int() // go:nointerface pragma - discarded
+
+ sig := types.NewSignature(recv.At(0), params, result, isddd)
+ t0.AddMethod(types.NewFunc(pos, parent, name, sig))
+ }
+
+ return tname
+
+ case arrayTag:
+ t := new(types.Array)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ n := p.int64()
+ *t = *types.NewArray(p.typ(parent, nil), n)
+ return t
+
+ case sliceTag:
+ t := new(types.Slice)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ *t = *types.NewSlice(p.typ(parent, nil))
+ return t
+
+ case dddTag:
+ t := new(dddSlice)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ t.elem = p.typ(parent, nil)
+ return t
+
+ case structTag:
+ t := new(types.Struct)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ *t = *types.NewStruct(p.fieldList(parent))
+ return t
+
+ case pointerTag:
+ t := new(types.Pointer)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ *t = *types.NewPointer(p.typ(parent, nil))
+ return t
+
+ case signatureTag:
+ t := new(types.Signature)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ params, isddd := p.paramList()
+ result, _ := p.paramList()
+ *t = *types.NewSignature(nil, params, result, isddd)
+ return t
+
+ case interfaceTag:
+ // Create a dummy entry in the type list. This is safe because we
+ // cannot expect the interface type to appear in a cycle, as any
+ // such cycle must contain a named type which would have been
+ // first defined earlier.
+ // TODO(gri) Is this still true now that we have type aliases?
+ // See issue #23225.
+ n := len(p.typList)
+ if p.trackAllTypes {
+ p.record(nil)
+ }
+
+ var embeddeds []types.Type
+ for n := p.int(); n > 0; n-- {
+ p.pos()
+ embeddeds = append(embeddeds, p.typ(parent, nil))
+ }
+
+ t := newInterface(p.methodList(parent, tname), embeddeds)
+ p.interfaceList = append(p.interfaceList, t)
+ if p.trackAllTypes {
+ p.typList[n] = t
+ }
+ return t
+
+ case mapTag:
+ t := new(types.Map)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ key := p.typ(parent, nil)
+ val := p.typ(parent, nil)
+ *t = *types.NewMap(key, val)
+ return t
+
+ case chanTag:
+ t := new(types.Chan)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ dir := chanDir(p.int())
+ val := p.typ(parent, nil)
+ *t = *types.NewChan(dir, val)
+ return t
+
+ default:
+ errorf("unexpected type tag %d", i) // panics
+ panic("unreachable")
+ }
+}
+
+func chanDir(d int) types.ChanDir {
+ // tag values must match the constants in cmd/compile/internal/gc/go.go
+ switch d {
+ case 1 /* Crecv */ :
+ return types.RecvOnly
+ case 2 /* Csend */ :
+ return types.SendOnly
+ case 3 /* Cboth */ :
+ return types.SendRecv
+ default:
+ errorf("unexpected channel dir %d", d)
+ return 0
+ }
+}
+
+func (p *importer) fieldList(parent *types.Package) (fields []*types.Var, tags []string) {
+ if n := p.int(); n > 0 {
+ fields = make([]*types.Var, n)
+ tags = make([]string, n)
+ for i := range fields {
+ fields[i], tags[i] = p.field(parent)
+ }
+ }
+ return
+}
+
+func (p *importer) field(parent *types.Package) (*types.Var, string) {
+ pos := p.pos()
+ pkg, name, alias := p.fieldName(parent)
+ typ := p.typ(parent, nil)
+ tag := p.string()
+
+ anonymous := false
+ if name == "" {
+ // anonymous field - typ must be T or *T and T must be a type name
+ switch typ := deref(typ).(type) {
+ case *types.Basic: // basic types are named types
+ pkg = nil // // objects defined in Universe scope have no package
+ name = typ.Name()
+ case *types.Named:
+ name = typ.Obj().Name()
+ default:
+ errorf("named base type expected")
+ }
+ anonymous = true
+ } else if alias {
+ // anonymous field: we have an explicit name because it's an alias
+ anonymous = true
+ }
+
+ return types.NewField(pos, pkg, name, typ, anonymous), tag
+}
+
+func (p *importer) methodList(parent *types.Package, baseType *types.Named) (methods []*types.Func) {
+ if n := p.int(); n > 0 {
+ methods = make([]*types.Func, n)
+ for i := range methods {
+ methods[i] = p.method(parent, baseType)
+ }
+ }
+ return
+}
+
+func (p *importer) method(parent *types.Package, baseType *types.Named) *types.Func {
+ pos := p.pos()
+ pkg, name, _ := p.fieldName(parent)
+ // If we don't have a baseType, use a nil receiver.
+ // A receiver using the actual interface type (which
+ // we don't know yet) will be filled in when we call
+ // types.Interface.Complete.
+ var recv *types.Var
+ if baseType != nil {
+ recv = types.NewVar(token.NoPos, parent, "", baseType)
+ }
+ params, isddd := p.paramList()
+ result, _ := p.paramList()
+ sig := types.NewSignature(recv, params, result, isddd)
+ return types.NewFunc(pos, pkg, name, sig)
+}
+
+func (p *importer) fieldName(parent *types.Package) (pkg *types.Package, name string, alias bool) {
+ name = p.string()
+ pkg = parent
+ if pkg == nil {
+ // use the imported package instead
+ pkg = p.pkgList[0]
+ }
+ if p.version == 0 && name == "_" {
+ // version 0 didn't export a package for _ fields
+ return
+ }
+ switch name {
+ case "":
+ // 1) field name matches base type name and is exported: nothing to do
+ case "?":
+ // 2) field name matches base type name and is not exported: need package
+ name = ""
+ pkg = p.pkg()
+ case "@":
+ // 3) field name doesn't match type name (alias)
+ name = p.string()
+ alias = true
+ fallthrough
+ default:
+ if !exported(name) {
+ pkg = p.pkg()
+ }
+ }
+ return
+}
+
+func (p *importer) paramList() (*types.Tuple, bool) {
+ n := p.int()
+ if n == 0 {
+ return nil, false
+ }
+ // negative length indicates unnamed parameters
+ named := true
+ if n < 0 {
+ n = -n
+ named = false
+ }
+ // n > 0
+ params := make([]*types.Var, n)
+ isddd := false
+ for i := range params {
+ params[i], isddd = p.param(named)
+ }
+ return types.NewTuple(params...), isddd
+}
+
+func (p *importer) param(named bool) (*types.Var, bool) {
+ t := p.typ(nil, nil)
+ td, isddd := t.(*dddSlice)
+ if isddd {
+ t = types.NewSlice(td.elem)
+ }
+
+ var pkg *types.Package
+ var name string
+ if named {
+ name = p.string()
+ if name == "" {
+ errorf("expected named parameter")
+ }
+ if name != "_" {
+ pkg = p.pkg()
+ }
+ if i := strings.Index(name, "·"); i > 0 {
+ name = name[:i] // cut off gc-specific parameter numbering
+ }
+ }
+
+ // read and discard compiler-specific info
+ p.string()
+
+ return types.NewVar(token.NoPos, pkg, name, t), isddd
+}
+
+func exported(name string) bool {
+ ch, _ := utf8.DecodeRuneInString(name)
+ return unicode.IsUpper(ch)
+}
+
+func (p *importer) value() constant.Value {
+ switch tag := p.tagOrIndex(); tag {
+ case falseTag:
+ return constant.MakeBool(false)
+ case trueTag:
+ return constant.MakeBool(true)
+ case int64Tag:
+ return constant.MakeInt64(p.int64())
+ case floatTag:
+ return p.float()
+ case complexTag:
+ re := p.float()
+ im := p.float()
+ return constant.BinaryOp(re, token.ADD, constant.MakeImag(im))
+ case stringTag:
+ return constant.MakeString(p.string())
+ case unknownTag:
+ return constant.MakeUnknown()
+ default:
+ errorf("unexpected value tag %d", tag) // panics
+ panic("unreachable")
+ }
+}
+
+func (p *importer) float() constant.Value {
+ sign := p.int()
+ if sign == 0 {
+ return constant.MakeInt64(0)
+ }
+
+ exp := p.int()
+ mant := []byte(p.string()) // big endian
+
+ // remove leading 0's if any
+ for len(mant) > 0 && mant[0] == 0 {
+ mant = mant[1:]
+ }
+
+ // convert to little endian
+ // TODO(gri) go/constant should have a more direct conversion function
+ // (e.g., once it supports a big.Float based implementation)
+ for i, j := 0, len(mant)-1; i < j; i, j = i+1, j-1 {
+ mant[i], mant[j] = mant[j], mant[i]
+ }
+
+ // adjust exponent (constant.MakeFromBytes creates an integer value,
+ // but mant represents the mantissa bits such that 0.5 <= mant < 1.0)
+ exp -= len(mant) << 3
+ if len(mant) > 0 {
+ for msd := mant[len(mant)-1]; msd&0x80 == 0; msd <<= 1 {
+ exp++
+ }
+ }
+
+ x := constant.MakeFromBytes(mant)
+ switch {
+ case exp < 0:
+ d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp))
+ x = constant.BinaryOp(x, token.QUO, d)
+ case exp > 0:
+ x = constant.Shift(x, token.SHL, uint(exp))
+ }
+
+ if sign < 0 {
+ x = constant.UnaryOp(token.SUB, x, 0)
+ }
+ return x
+}
+
+// ----------------------------------------------------------------------------
+// Low-level decoders
+
+func (p *importer) tagOrIndex() int {
+ if p.debugFormat {
+ p.marker('t')
+ }
+
+ return int(p.rawInt64())
+}
+
+func (p *importer) int() int {
+ x := p.int64()
+ if int64(int(x)) != x {
+ errorf("exported integer too large")
+ }
+ return int(x)
+}
+
+func (p *importer) int64() int64 {
+ if p.debugFormat {
+ p.marker('i')
+ }
+
+ return p.rawInt64()
+}
+
+func (p *importer) path() string {
+ if p.debugFormat {
+ p.marker('p')
+ }
+ // if the path was seen before, i is its index (>= 0)
+ // (the empty string is at index 0)
+ i := p.rawInt64()
+ if i >= 0 {
+ return p.pathList[i]
+ }
+ // otherwise, i is the negative path length (< 0)
+ a := make([]string, -i)
+ for n := range a {
+ a[n] = p.string()
+ }
+ s := strings.Join(a, "/")
+ p.pathList = append(p.pathList, s)
+ return s
+}
+
+func (p *importer) string() string {
+ if p.debugFormat {
+ p.marker('s')
+ }
+ // if the string was seen before, i is its index (>= 0)
+ // (the empty string is at index 0)
+ i := p.rawInt64()
+ if i >= 0 {
+ return p.strList[i]
+ }
+ // otherwise, i is the negative string length (< 0)
+ if n := int(-i); n <= cap(p.buf) {
+ p.buf = p.buf[:n]
+ } else {
+ p.buf = make([]byte, n)
+ }
+ for i := range p.buf {
+ p.buf[i] = p.rawByte()
+ }
+ s := string(p.buf)
+ p.strList = append(p.strList, s)
+ return s
+}
+
+func (p *importer) marker(want byte) {
+ if got := p.rawByte(); got != want {
+ errorf("incorrect marker: got %c; want %c (pos = %d)", got, want, p.read)
+ }
+
+ pos := p.read
+ if n := int(p.rawInt64()); n != pos {
+ errorf("incorrect position: got %d; want %d", n, pos)
+ }
+}
+
+// rawInt64 should only be used by low-level decoders.
+func (p *importer) rawInt64() int64 {
+ i, err := binary.ReadVarint(p)
+ if err != nil {
+ errorf("read error: %v", err)
+ }
+ return i
+}
+
+// rawStringln should only be used to read the initial version string.
+func (p *importer) rawStringln(b byte) string {
+ p.buf = p.buf[:0]
+ for b != '\n' {
+ p.buf = append(p.buf, b)
+ b = p.rawByte()
+ }
+ return string(p.buf)
+}
+
+// needed for binary.ReadVarint in rawInt64
+func (p *importer) ReadByte() (byte, error) {
+ return p.rawByte(), nil
+}
+
+// byte is the bottleneck interface for reading p.data.
+// It unescapes '|' 'S' to '$' and '|' '|' to '|'.
+// rawByte should only be used by low-level decoders.
+func (p *importer) rawByte() byte {
+ b := p.data[0]
+ r := 1
+ if b == '|' {
+ b = p.data[1]
+ r = 2
+ switch b {
+ case 'S':
+ b = '$'
+ case '|':
+ // nothing to do
+ default:
+ errorf("unexpected escape sequence in export data")
+ }
+ }
+ p.data = p.data[r:]
+ p.read += r
+ return b
+
+}
+
+// ----------------------------------------------------------------------------
+// Export format
+
+// Tags. Must be < 0.
+const (
+ // Objects
+ packageTag = -(iota + 1)
+ constTag
+ typeTag
+ varTag
+ funcTag
+ endTag
+
+ // Types
+ namedTag
+ arrayTag
+ sliceTag
+ dddTag
+ structTag
+ pointerTag
+ signatureTag
+ interfaceTag
+ mapTag
+ chanTag
+
+ // Values
+ falseTag
+ trueTag
+ int64Tag
+ floatTag
+ fractionTag // not used by gc
+ complexTag
+ stringTag
+ nilTag // only used by gc (appears in exported inlined function bodies)
+ unknownTag // not used by gc (only appears in packages with errors)
+
+ // Type aliases
+ aliasTag
+)
+
+var predeclared = []types.Type{
+ // basic types
+ types.Typ[types.Bool],
+ types.Typ[types.Int],
+ types.Typ[types.Int8],
+ types.Typ[types.Int16],
+ types.Typ[types.Int32],
+ types.Typ[types.Int64],
+ types.Typ[types.Uint],
+ types.Typ[types.Uint8],
+ types.Typ[types.Uint16],
+ types.Typ[types.Uint32],
+ types.Typ[types.Uint64],
+ types.Typ[types.Uintptr],
+ types.Typ[types.Float32],
+ types.Typ[types.Float64],
+ types.Typ[types.Complex64],
+ types.Typ[types.Complex128],
+ types.Typ[types.String],
+
+ // basic type aliases
+ types.Universe.Lookup("byte").Type(),
+ types.Universe.Lookup("rune").Type(),
+
+ // error
+ types.Universe.Lookup("error").Type(),
+
+ // untyped types
+ types.Typ[types.UntypedBool],
+ types.Typ[types.UntypedInt],
+ types.Typ[types.UntypedRune],
+ types.Typ[types.UntypedFloat],
+ types.Typ[types.UntypedComplex],
+ types.Typ[types.UntypedString],
+ types.Typ[types.UntypedNil],
+
+ // package unsafe
+ types.Typ[types.UnsafePointer],
+
+ // invalid type
+ types.Typ[types.Invalid], // only appears in packages with errors
+
+ // used internally by gc; never used by this package or in .a files
+ anyType{},
+}
+
+type anyType struct{}
+
+func (t anyType) Underlying() types.Type { return t }
+func (t anyType) String() string { return "any" }
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go b/vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go
new file mode 100644
index 000000000..f33dc5613
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go
@@ -0,0 +1,93 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file is a copy of $GOROOT/src/go/internal/gcimporter/exportdata.go.
+
+// This file implements FindExportData.
+
+package gcimporter
+
+import (
+ "bufio"
+ "fmt"
+ "io"
+ "strconv"
+ "strings"
+)
+
+func readGopackHeader(r *bufio.Reader) (name string, size int, err error) {
+ // See $GOROOT/include/ar.h.
+ hdr := make([]byte, 16+12+6+6+8+10+2)
+ _, err = io.ReadFull(r, hdr)
+ if err != nil {
+ return
+ }
+ // leave for debugging
+ if false {
+ fmt.Printf("header: %s", hdr)
+ }
+ s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10]))
+ size, err = strconv.Atoi(s)
+ if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' {
+ err = fmt.Errorf("invalid archive header")
+ return
+ }
+ name = strings.TrimSpace(string(hdr[:16]))
+ return
+}
+
+// FindExportData positions the reader r at the beginning of the
+// export data section of an underlying GC-created object/archive
+// file by reading from it. The reader must be positioned at the
+// start of the file before calling this function. The hdr result
+// is the string before the export data, either "$$" or "$$B".
+//
+func FindExportData(r *bufio.Reader) (hdr string, err error) {
+ // Read first line to make sure this is an object file.
+ line, err := r.ReadSlice('\n')
+ if err != nil {
+ err = fmt.Errorf("can't find export data (%v)", err)
+ return
+ }
+
+ if string(line) == "!\n" {
+ // Archive file. Scan to __.PKGDEF.
+ var name string
+ if name, _, err = readGopackHeader(r); err != nil {
+ return
+ }
+
+ // First entry should be __.PKGDEF.
+ if name != "__.PKGDEF" {
+ err = fmt.Errorf("go archive is missing __.PKGDEF")
+ return
+ }
+
+ // Read first line of __.PKGDEF data, so that line
+ // is once again the first line of the input.
+ if line, err = r.ReadSlice('\n'); err != nil {
+ err = fmt.Errorf("can't find export data (%v)", err)
+ return
+ }
+ }
+
+ // Now at __.PKGDEF in archive or still at beginning of file.
+ // Either way, line should begin with "go object ".
+ if !strings.HasPrefix(string(line), "go object ") {
+ err = fmt.Errorf("not a Go object file")
+ return
+ }
+
+ // Skip over object header to export data.
+ // Begins after first line starting with $$.
+ for line[0] != '$' {
+ if line, err = r.ReadSlice('\n'); err != nil {
+ err = fmt.Errorf("can't find export data (%v)", err)
+ return
+ }
+ }
+ hdr = string(line)
+
+ return
+}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
new file mode 100644
index 000000000..9cf186605
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
@@ -0,0 +1,1078 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file is a modified copy of $GOROOT/src/go/internal/gcimporter/gcimporter.go,
+// but it also contains the original source-based importer code for Go1.6.
+// Once we stop supporting 1.6, we can remove that code.
+
+// Package gcimporter provides various functions for reading
+// gc-generated object files that can be used to implement the
+// Importer interface defined by the Go 1.5 standard library package.
+package gcimporter // import "golang.org/x/tools/go/internal/gcimporter"
+
+import (
+ "bufio"
+ "errors"
+ "fmt"
+ "go/build"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "io"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "sort"
+ "strconv"
+ "strings"
+ "text/scanner"
+)
+
+// debugging/development support
+const debug = false
+
+var pkgExts = [...]string{".a", ".o"}
+
+// FindPkg returns the filename and unique package id for an import
+// path based on package information provided by build.Import (using
+// the build.Default build.Context). A relative srcDir is interpreted
+// relative to the current working directory.
+// If no file was found, an empty filename is returned.
+//
+func FindPkg(path, srcDir string) (filename, id string) {
+ if path == "" {
+ return
+ }
+
+ var noext string
+ switch {
+ default:
+ // "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x"
+ // Don't require the source files to be present.
+ if abs, err := filepath.Abs(srcDir); err == nil { // see issue 14282
+ srcDir = abs
+ }
+ bp, _ := build.Import(path, srcDir, build.FindOnly|build.AllowBinary)
+ if bp.PkgObj == "" {
+ id = path // make sure we have an id to print in error message
+ return
+ }
+ noext = strings.TrimSuffix(bp.PkgObj, ".a")
+ id = bp.ImportPath
+
+ case build.IsLocalImport(path):
+ // "./x" -> "/this/directory/x.ext", "/this/directory/x"
+ noext = filepath.Join(srcDir, path)
+ id = noext
+
+ case filepath.IsAbs(path):
+ // for completeness only - go/build.Import
+ // does not support absolute imports
+ // "/x" -> "/x.ext", "/x"
+ noext = path
+ id = path
+ }
+
+ if false { // for debugging
+ if path != id {
+ fmt.Printf("%s -> %s\n", path, id)
+ }
+ }
+
+ // try extensions
+ for _, ext := range pkgExts {
+ filename = noext + ext
+ if f, err := os.Stat(filename); err == nil && !f.IsDir() {
+ return
+ }
+ }
+
+ filename = "" // not found
+ return
+}
+
+// ImportData imports a package by reading the gc-generated export data,
+// adds the corresponding package object to the packages map indexed by id,
+// and returns the object.
+//
+// The packages map must contains all packages already imported. The data
+// reader position must be the beginning of the export data section. The
+// filename is only used in error messages.
+//
+// If packages[id] contains the completely imported package, that package
+// can be used directly, and there is no need to call this function (but
+// there is also no harm but for extra time used).
+//
+func ImportData(packages map[string]*types.Package, filename, id string, data io.Reader) (pkg *types.Package, err error) {
+ // support for parser error handling
+ defer func() {
+ switch r := recover().(type) {
+ case nil:
+ // nothing to do
+ case importError:
+ err = r
+ default:
+ panic(r) // internal error
+ }
+ }()
+
+ var p parser
+ p.init(filename, id, data, packages)
+ pkg = p.parseExport()
+
+ return
+}
+
+// Import imports a gc-generated package given its import path and srcDir, adds
+// the corresponding package object to the packages map, and returns the object.
+// The packages map must contain all packages already imported.
+//
+func Import(packages map[string]*types.Package, path, srcDir string, lookup func(path string) (io.ReadCloser, error)) (pkg *types.Package, err error) {
+ var rc io.ReadCloser
+ var filename, id string
+ if lookup != nil {
+ // With custom lookup specified, assume that caller has
+ // converted path to a canonical import path for use in the map.
+ if path == "unsafe" {
+ return types.Unsafe, nil
+ }
+ id = path
+
+ // No need to re-import if the package was imported completely before.
+ if pkg = packages[id]; pkg != nil && pkg.Complete() {
+ return
+ }
+ f, err := lookup(path)
+ if err != nil {
+ return nil, err
+ }
+ rc = f
+ } else {
+ filename, id = FindPkg(path, srcDir)
+ if filename == "" {
+ if path == "unsafe" {
+ return types.Unsafe, nil
+ }
+ return nil, fmt.Errorf("can't find import: %q", id)
+ }
+
+ // no need to re-import if the package was imported completely before
+ if pkg = packages[id]; pkg != nil && pkg.Complete() {
+ return
+ }
+
+ // open file
+ f, err := os.Open(filename)
+ if err != nil {
+ return nil, err
+ }
+ defer func() {
+ if err != nil {
+ // add file name to error
+ err = fmt.Errorf("%s: %v", filename, err)
+ }
+ }()
+ rc = f
+ }
+ defer rc.Close()
+
+ var hdr string
+ buf := bufio.NewReader(rc)
+ if hdr, err = FindExportData(buf); err != nil {
+ return
+ }
+
+ switch hdr {
+ case "$$\n":
+ // Work-around if we don't have a filename; happens only if lookup != nil.
+ // Either way, the filename is only needed for importer error messages, so
+ // this is fine.
+ if filename == "" {
+ filename = path
+ }
+ return ImportData(packages, filename, id, buf)
+
+ case "$$B\n":
+ var data []byte
+ data, err = ioutil.ReadAll(buf)
+ if err != nil {
+ break
+ }
+
+ // TODO(gri): allow clients of go/importer to provide a FileSet.
+ // Or, define a new standard go/types/gcexportdata package.
+ fset := token.NewFileSet()
+
+ // The indexed export format starts with an 'i'; the older
+ // binary export format starts with a 'c', 'd', or 'v'
+ // (from "version"). Select appropriate importer.
+ if len(data) > 0 && data[0] == 'i' {
+ _, pkg, err = IImportData(fset, packages, data[1:], id)
+ } else {
+ _, pkg, err = BImportData(fset, packages, data, id)
+ }
+
+ default:
+ err = fmt.Errorf("unknown export data header: %q", hdr)
+ }
+
+ return
+}
+
+// ----------------------------------------------------------------------------
+// Parser
+
+// TODO(gri) Imported objects don't have position information.
+// Ideally use the debug table line info; alternatively
+// create some fake position (or the position of the
+// import). That way error messages referring to imported
+// objects can print meaningful information.
+
+// parser parses the exports inside a gc compiler-produced
+// object/archive file and populates its scope with the results.
+type parser struct {
+ scanner scanner.Scanner
+ tok rune // current token
+ lit string // literal string; only valid for Ident, Int, String tokens
+ id string // package id of imported package
+ sharedPkgs map[string]*types.Package // package id -> package object (across importer)
+ localPkgs map[string]*types.Package // package id -> package object (just this package)
+}
+
+func (p *parser) init(filename, id string, src io.Reader, packages map[string]*types.Package) {
+ p.scanner.Init(src)
+ p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) }
+ p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanChars | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments
+ p.scanner.Whitespace = 1<<'\t' | 1<<' '
+ p.scanner.Filename = filename // for good error messages
+ p.next()
+ p.id = id
+ p.sharedPkgs = packages
+ if debug {
+ // check consistency of packages map
+ for _, pkg := range packages {
+ if pkg.Name() == "" {
+ fmt.Printf("no package name for %s\n", pkg.Path())
+ }
+ }
+ }
+}
+
+func (p *parser) next() {
+ p.tok = p.scanner.Scan()
+ switch p.tok {
+ case scanner.Ident, scanner.Int, scanner.Char, scanner.String, '·':
+ p.lit = p.scanner.TokenText()
+ default:
+ p.lit = ""
+ }
+ if debug {
+ fmt.Printf("%s: %q -> %q\n", scanner.TokenString(p.tok), p.scanner.TokenText(), p.lit)
+ }
+}
+
+func declTypeName(pkg *types.Package, name string) *types.TypeName {
+ scope := pkg.Scope()
+ if obj := scope.Lookup(name); obj != nil {
+ return obj.(*types.TypeName)
+ }
+ obj := types.NewTypeName(token.NoPos, pkg, name, nil)
+ // a named type may be referred to before the underlying type
+ // is known - set it up
+ types.NewNamed(obj, nil, nil)
+ scope.Insert(obj)
+ return obj
+}
+
+// ----------------------------------------------------------------------------
+// Error handling
+
+// Internal errors are boxed as importErrors.
+type importError struct {
+ pos scanner.Position
+ err error
+}
+
+func (e importError) Error() string {
+ return fmt.Sprintf("import error %s (byte offset = %d): %s", e.pos, e.pos.Offset, e.err)
+}
+
+func (p *parser) error(err interface{}) {
+ if s, ok := err.(string); ok {
+ err = errors.New(s)
+ }
+ // panic with a runtime.Error if err is not an error
+ panic(importError{p.scanner.Pos(), err.(error)})
+}
+
+func (p *parser) errorf(format string, args ...interface{}) {
+ p.error(fmt.Sprintf(format, args...))
+}
+
+func (p *parser) expect(tok rune) string {
+ lit := p.lit
+ if p.tok != tok {
+ p.errorf("expected %s, got %s (%s)", scanner.TokenString(tok), scanner.TokenString(p.tok), lit)
+ }
+ p.next()
+ return lit
+}
+
+func (p *parser) expectSpecial(tok string) {
+ sep := 'x' // not white space
+ i := 0
+ for i < len(tok) && p.tok == rune(tok[i]) && sep > ' ' {
+ sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
+ p.next()
+ i++
+ }
+ if i < len(tok) {
+ p.errorf("expected %q, got %q", tok, tok[0:i])
+ }
+}
+
+func (p *parser) expectKeyword(keyword string) {
+ lit := p.expect(scanner.Ident)
+ if lit != keyword {
+ p.errorf("expected keyword %s, got %q", keyword, lit)
+ }
+}
+
+// ----------------------------------------------------------------------------
+// Qualified and unqualified names
+
+// PackageId = string_lit .
+//
+func (p *parser) parsePackageId() string {
+ id, err := strconv.Unquote(p.expect(scanner.String))
+ if err != nil {
+ p.error(err)
+ }
+ // id == "" stands for the imported package id
+ // (only known at time of package installation)
+ if id == "" {
+ id = p.id
+ }
+ return id
+}
+
+// PackageName = ident .
+//
+func (p *parser) parsePackageName() string {
+ return p.expect(scanner.Ident)
+}
+
+// dotIdentifier = ( ident | '·' ) { ident | int | '·' } .
+func (p *parser) parseDotIdent() string {
+ ident := ""
+ if p.tok != scanner.Int {
+ sep := 'x' // not white space
+ for (p.tok == scanner.Ident || p.tok == scanner.Int || p.tok == '·') && sep > ' ' {
+ ident += p.lit
+ sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
+ p.next()
+ }
+ }
+ if ident == "" {
+ p.expect(scanner.Ident) // use expect() for error handling
+ }
+ return ident
+}
+
+// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) .
+//
+func (p *parser) parseQualifiedName() (id, name string) {
+ p.expect('@')
+ id = p.parsePackageId()
+ p.expect('.')
+ // Per rev f280b8a485fd (10/2/2013), qualified names may be used for anonymous fields.
+ if p.tok == '?' {
+ p.next()
+ } else {
+ name = p.parseDotIdent()
+ }
+ return
+}
+
+// getPkg returns the package for a given id. If the package is
+// not found, create the package and add it to the p.localPkgs
+// and p.sharedPkgs maps. name is the (expected) name of the
+// package. If name == "", the package name is expected to be
+// set later via an import clause in the export data.
+//
+// id identifies a package, usually by a canonical package path like
+// "encoding/json" but possibly by a non-canonical import path like
+// "./json".
+//
+func (p *parser) getPkg(id, name string) *types.Package {
+ // package unsafe is not in the packages maps - handle explicitly
+ if id == "unsafe" {
+ return types.Unsafe
+ }
+
+ pkg := p.localPkgs[id]
+ if pkg == nil {
+ // first import of id from this package
+ pkg = p.sharedPkgs[id]
+ if pkg == nil {
+ // first import of id by this importer;
+ // add (possibly unnamed) pkg to shared packages
+ pkg = types.NewPackage(id, name)
+ p.sharedPkgs[id] = pkg
+ }
+ // add (possibly unnamed) pkg to local packages
+ if p.localPkgs == nil {
+ p.localPkgs = make(map[string]*types.Package)
+ }
+ p.localPkgs[id] = pkg
+ } else if name != "" {
+ // package exists already and we have an expected package name;
+ // make sure names match or set package name if necessary
+ if pname := pkg.Name(); pname == "" {
+ pkg.SetName(name)
+ } else if pname != name {
+ p.errorf("%s package name mismatch: %s (given) vs %s (expected)", id, pname, name)
+ }
+ }
+ return pkg
+}
+
+// parseExportedName is like parseQualifiedName, but
+// the package id is resolved to an imported *types.Package.
+//
+func (p *parser) parseExportedName() (pkg *types.Package, name string) {
+ id, name := p.parseQualifiedName()
+ pkg = p.getPkg(id, "")
+ return
+}
+
+// ----------------------------------------------------------------------------
+// Types
+
+// BasicType = identifier .
+//
+func (p *parser) parseBasicType() types.Type {
+ id := p.expect(scanner.Ident)
+ obj := types.Universe.Lookup(id)
+ if obj, ok := obj.(*types.TypeName); ok {
+ return obj.Type()
+ }
+ p.errorf("not a basic type: %s", id)
+ return nil
+}
+
+// ArrayType = "[" int_lit "]" Type .
+//
+func (p *parser) parseArrayType(parent *types.Package) types.Type {
+ // "[" already consumed and lookahead known not to be "]"
+ lit := p.expect(scanner.Int)
+ p.expect(']')
+ elem := p.parseType(parent)
+ n, err := strconv.ParseInt(lit, 10, 64)
+ if err != nil {
+ p.error(err)
+ }
+ return types.NewArray(elem, n)
+}
+
+// MapType = "map" "[" Type "]" Type .
+//
+func (p *parser) parseMapType(parent *types.Package) types.Type {
+ p.expectKeyword("map")
+ p.expect('[')
+ key := p.parseType(parent)
+ p.expect(']')
+ elem := p.parseType(parent)
+ return types.NewMap(key, elem)
+}
+
+// Name = identifier | "?" | QualifiedName .
+//
+// For unqualified and anonymous names, the returned package is the parent
+// package unless parent == nil, in which case the returned package is the
+// package being imported. (The parent package is not nil if the the name
+// is an unqualified struct field or interface method name belonging to a
+// type declared in another package.)
+//
+// For qualified names, the returned package is nil (and not created if
+// it doesn't exist yet) unless materializePkg is set (which creates an
+// unnamed package with valid package path). In the latter case, a
+// subsequent import clause is expected to provide a name for the package.
+//
+func (p *parser) parseName(parent *types.Package, materializePkg bool) (pkg *types.Package, name string) {
+ pkg = parent
+ if pkg == nil {
+ pkg = p.sharedPkgs[p.id]
+ }
+ switch p.tok {
+ case scanner.Ident:
+ name = p.lit
+ p.next()
+ case '?':
+ // anonymous
+ p.next()
+ case '@':
+ // exported name prefixed with package path
+ pkg = nil
+ var id string
+ id, name = p.parseQualifiedName()
+ if materializePkg {
+ pkg = p.getPkg(id, "")
+ }
+ default:
+ p.error("name expected")
+ }
+ return
+}
+
+func deref(typ types.Type) types.Type {
+ if p, _ := typ.(*types.Pointer); p != nil {
+ return p.Elem()
+ }
+ return typ
+}
+
+// Field = Name Type [ string_lit ] .
+//
+func (p *parser) parseField(parent *types.Package) (*types.Var, string) {
+ pkg, name := p.parseName(parent, true)
+
+ if name == "_" {
+ // Blank fields should be package-qualified because they
+ // are unexported identifiers, but gc does not qualify them.
+ // Assuming that the ident belongs to the current package
+ // causes types to change during re-exporting, leading
+ // to spurious "can't assign A to B" errors from go/types.
+ // As a workaround, pretend all blank fields belong
+ // to the same unique dummy package.
+ const blankpkg = "<_>"
+ pkg = p.getPkg(blankpkg, blankpkg)
+ }
+
+ typ := p.parseType(parent)
+ anonymous := false
+ if name == "" {
+ // anonymous field - typ must be T or *T and T must be a type name
+ switch typ := deref(typ).(type) {
+ case *types.Basic: // basic types are named types
+ pkg = nil // objects defined in Universe scope have no package
+ name = typ.Name()
+ case *types.Named:
+ name = typ.Obj().Name()
+ default:
+ p.errorf("anonymous field expected")
+ }
+ anonymous = true
+ }
+ tag := ""
+ if p.tok == scanner.String {
+ s := p.expect(scanner.String)
+ var err error
+ tag, err = strconv.Unquote(s)
+ if err != nil {
+ p.errorf("invalid struct tag %s: %s", s, err)
+ }
+ }
+ return types.NewField(token.NoPos, pkg, name, typ, anonymous), tag
+}
+
+// StructType = "struct" "{" [ FieldList ] "}" .
+// FieldList = Field { ";" Field } .
+//
+func (p *parser) parseStructType(parent *types.Package) types.Type {
+ var fields []*types.Var
+ var tags []string
+
+ p.expectKeyword("struct")
+ p.expect('{')
+ for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ {
+ if i > 0 {
+ p.expect(';')
+ }
+ fld, tag := p.parseField(parent)
+ if tag != "" && tags == nil {
+ tags = make([]string, i)
+ }
+ if tags != nil {
+ tags = append(tags, tag)
+ }
+ fields = append(fields, fld)
+ }
+ p.expect('}')
+
+ return types.NewStruct(fields, tags)
+}
+
+// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] .
+//
+func (p *parser) parseParameter() (par *types.Var, isVariadic bool) {
+ _, name := p.parseName(nil, false)
+ // remove gc-specific parameter numbering
+ if i := strings.Index(name, "·"); i >= 0 {
+ name = name[:i]
+ }
+ if p.tok == '.' {
+ p.expectSpecial("...")
+ isVariadic = true
+ }
+ typ := p.parseType(nil)
+ if isVariadic {
+ typ = types.NewSlice(typ)
+ }
+ // ignore argument tag (e.g. "noescape")
+ if p.tok == scanner.String {
+ p.next()
+ }
+ // TODO(gri) should we provide a package?
+ par = types.NewVar(token.NoPos, nil, name, typ)
+ return
+}
+
+// Parameters = "(" [ ParameterList ] ")" .
+// ParameterList = { Parameter "," } Parameter .
+//
+func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) {
+ p.expect('(')
+ for p.tok != ')' && p.tok != scanner.EOF {
+ if len(list) > 0 {
+ p.expect(',')
+ }
+ par, variadic := p.parseParameter()
+ list = append(list, par)
+ if variadic {
+ if isVariadic {
+ p.error("... not on final argument")
+ }
+ isVariadic = true
+ }
+ }
+ p.expect(')')
+
+ return
+}
+
+// Signature = Parameters [ Result ] .
+// Result = Type | Parameters .
+//
+func (p *parser) parseSignature(recv *types.Var) *types.Signature {
+ params, isVariadic := p.parseParameters()
+
+ // optional result type
+ var results []*types.Var
+ if p.tok == '(' {
+ var variadic bool
+ results, variadic = p.parseParameters()
+ if variadic {
+ p.error("... not permitted on result type")
+ }
+ }
+
+ return types.NewSignature(recv, types.NewTuple(params...), types.NewTuple(results...), isVariadic)
+}
+
+// InterfaceType = "interface" "{" [ MethodList ] "}" .
+// MethodList = Method { ";" Method } .
+// Method = Name Signature .
+//
+// The methods of embedded interfaces are always "inlined"
+// by the compiler and thus embedded interfaces are never
+// visible in the export data.
+//
+func (p *parser) parseInterfaceType(parent *types.Package) types.Type {
+ var methods []*types.Func
+
+ p.expectKeyword("interface")
+ p.expect('{')
+ for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ {
+ if i > 0 {
+ p.expect(';')
+ }
+ pkg, name := p.parseName(parent, true)
+ sig := p.parseSignature(nil)
+ methods = append(methods, types.NewFunc(token.NoPos, pkg, name, sig))
+ }
+ p.expect('}')
+
+ // Complete requires the type's embedded interfaces to be fully defined,
+ // but we do not define any
+ return types.NewInterface(methods, nil).Complete()
+}
+
+// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
+//
+func (p *parser) parseChanType(parent *types.Package) types.Type {
+ dir := types.SendRecv
+ if p.tok == scanner.Ident {
+ p.expectKeyword("chan")
+ if p.tok == '<' {
+ p.expectSpecial("<-")
+ dir = types.SendOnly
+ }
+ } else {
+ p.expectSpecial("<-")
+ p.expectKeyword("chan")
+ dir = types.RecvOnly
+ }
+ elem := p.parseType(parent)
+ return types.NewChan(dir, elem)
+}
+
+// Type =
+// BasicType | TypeName | ArrayType | SliceType | StructType |
+// PointerType | FuncType | InterfaceType | MapType | ChanType |
+// "(" Type ")" .
+//
+// BasicType = ident .
+// TypeName = ExportedName .
+// SliceType = "[" "]" Type .
+// PointerType = "*" Type .
+// FuncType = "func" Signature .
+//
+func (p *parser) parseType(parent *types.Package) types.Type {
+ switch p.tok {
+ case scanner.Ident:
+ switch p.lit {
+ default:
+ return p.parseBasicType()
+ case "struct":
+ return p.parseStructType(parent)
+ case "func":
+ // FuncType
+ p.next()
+ return p.parseSignature(nil)
+ case "interface":
+ return p.parseInterfaceType(parent)
+ case "map":
+ return p.parseMapType(parent)
+ case "chan":
+ return p.parseChanType(parent)
+ }
+ case '@':
+ // TypeName
+ pkg, name := p.parseExportedName()
+ return declTypeName(pkg, name).Type()
+ case '[':
+ p.next() // look ahead
+ if p.tok == ']' {
+ // SliceType
+ p.next()
+ return types.NewSlice(p.parseType(parent))
+ }
+ return p.parseArrayType(parent)
+ case '*':
+ // PointerType
+ p.next()
+ return types.NewPointer(p.parseType(parent))
+ case '<':
+ return p.parseChanType(parent)
+ case '(':
+ // "(" Type ")"
+ p.next()
+ typ := p.parseType(parent)
+ p.expect(')')
+ return typ
+ }
+ p.errorf("expected type, got %s (%q)", scanner.TokenString(p.tok), p.lit)
+ return nil
+}
+
+// ----------------------------------------------------------------------------
+// Declarations
+
+// ImportDecl = "import" PackageName PackageId .
+//
+func (p *parser) parseImportDecl() {
+ p.expectKeyword("import")
+ name := p.parsePackageName()
+ p.getPkg(p.parsePackageId(), name)
+}
+
+// int_lit = [ "+" | "-" ] { "0" ... "9" } .
+//
+func (p *parser) parseInt() string {
+ s := ""
+ switch p.tok {
+ case '-':
+ s = "-"
+ p.next()
+ case '+':
+ p.next()
+ }
+ return s + p.expect(scanner.Int)
+}
+
+// number = int_lit [ "p" int_lit ] .
+//
+func (p *parser) parseNumber() (typ *types.Basic, val constant.Value) {
+ // mantissa
+ mant := constant.MakeFromLiteral(p.parseInt(), token.INT, 0)
+ if mant == nil {
+ panic("invalid mantissa")
+ }
+
+ if p.lit == "p" {
+ // exponent (base 2)
+ p.next()
+ exp, err := strconv.ParseInt(p.parseInt(), 10, 0)
+ if err != nil {
+ p.error(err)
+ }
+ if exp < 0 {
+ denom := constant.MakeInt64(1)
+ denom = constant.Shift(denom, token.SHL, uint(-exp))
+ typ = types.Typ[types.UntypedFloat]
+ val = constant.BinaryOp(mant, token.QUO, denom)
+ return
+ }
+ if exp > 0 {
+ mant = constant.Shift(mant, token.SHL, uint(exp))
+ }
+ typ = types.Typ[types.UntypedFloat]
+ val = mant
+ return
+ }
+
+ typ = types.Typ[types.UntypedInt]
+ val = mant
+ return
+}
+
+// ConstDecl = "const" ExportedName [ Type ] "=" Literal .
+// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit .
+// bool_lit = "true" | "false" .
+// complex_lit = "(" float_lit "+" float_lit "i" ")" .
+// rune_lit = "(" int_lit "+" int_lit ")" .
+// string_lit = `"` { unicode_char } `"` .
+//
+func (p *parser) parseConstDecl() {
+ p.expectKeyword("const")
+ pkg, name := p.parseExportedName()
+
+ var typ0 types.Type
+ if p.tok != '=' {
+ // constant types are never structured - no need for parent type
+ typ0 = p.parseType(nil)
+ }
+
+ p.expect('=')
+ var typ types.Type
+ var val constant.Value
+ switch p.tok {
+ case scanner.Ident:
+ // bool_lit
+ if p.lit != "true" && p.lit != "false" {
+ p.error("expected true or false")
+ }
+ typ = types.Typ[types.UntypedBool]
+ val = constant.MakeBool(p.lit == "true")
+ p.next()
+
+ case '-', scanner.Int:
+ // int_lit
+ typ, val = p.parseNumber()
+
+ case '(':
+ // complex_lit or rune_lit
+ p.next()
+ if p.tok == scanner.Char {
+ p.next()
+ p.expect('+')
+ typ = types.Typ[types.UntypedRune]
+ _, val = p.parseNumber()
+ p.expect(')')
+ break
+ }
+ _, re := p.parseNumber()
+ p.expect('+')
+ _, im := p.parseNumber()
+ p.expectKeyword("i")
+ p.expect(')')
+ typ = types.Typ[types.UntypedComplex]
+ val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im))
+
+ case scanner.Char:
+ // rune_lit
+ typ = types.Typ[types.UntypedRune]
+ val = constant.MakeFromLiteral(p.lit, token.CHAR, 0)
+ p.next()
+
+ case scanner.String:
+ // string_lit
+ typ = types.Typ[types.UntypedString]
+ val = constant.MakeFromLiteral(p.lit, token.STRING, 0)
+ p.next()
+
+ default:
+ p.errorf("expected literal got %s", scanner.TokenString(p.tok))
+ }
+
+ if typ0 == nil {
+ typ0 = typ
+ }
+
+ pkg.Scope().Insert(types.NewConst(token.NoPos, pkg, name, typ0, val))
+}
+
+// TypeDecl = "type" ExportedName Type .
+//
+func (p *parser) parseTypeDecl() {
+ p.expectKeyword("type")
+ pkg, name := p.parseExportedName()
+ obj := declTypeName(pkg, name)
+
+ // The type object may have been imported before and thus already
+ // have a type associated with it. We still need to parse the type
+ // structure, but throw it away if the object already has a type.
+ // This ensures that all imports refer to the same type object for
+ // a given type declaration.
+ typ := p.parseType(pkg)
+
+ if name := obj.Type().(*types.Named); name.Underlying() == nil {
+ name.SetUnderlying(typ)
+ }
+}
+
+// VarDecl = "var" ExportedName Type .
+//
+func (p *parser) parseVarDecl() {
+ p.expectKeyword("var")
+ pkg, name := p.parseExportedName()
+ typ := p.parseType(pkg)
+ pkg.Scope().Insert(types.NewVar(token.NoPos, pkg, name, typ))
+}
+
+// Func = Signature [ Body ] .
+// Body = "{" ... "}" .
+//
+func (p *parser) parseFunc(recv *types.Var) *types.Signature {
+ sig := p.parseSignature(recv)
+ if p.tok == '{' {
+ p.next()
+ for i := 1; i > 0; p.next() {
+ switch p.tok {
+ case '{':
+ i++
+ case '}':
+ i--
+ }
+ }
+ }
+ return sig
+}
+
+// MethodDecl = "func" Receiver Name Func .
+// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" .
+//
+func (p *parser) parseMethodDecl() {
+ // "func" already consumed
+ p.expect('(')
+ recv, _ := p.parseParameter() // receiver
+ p.expect(')')
+
+ // determine receiver base type object
+ base := deref(recv.Type()).(*types.Named)
+
+ // parse method name, signature, and possibly inlined body
+ _, name := p.parseName(nil, false)
+ sig := p.parseFunc(recv)
+
+ // methods always belong to the same package as the base type object
+ pkg := base.Obj().Pkg()
+
+ // add method to type unless type was imported before
+ // and method exists already
+ // TODO(gri) This leads to a quadratic algorithm - ok for now because method counts are small.
+ base.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig))
+}
+
+// FuncDecl = "func" ExportedName Func .
+//
+func (p *parser) parseFuncDecl() {
+ // "func" already consumed
+ pkg, name := p.parseExportedName()
+ typ := p.parseFunc(nil)
+ pkg.Scope().Insert(types.NewFunc(token.NoPos, pkg, name, typ))
+}
+
+// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" .
+//
+func (p *parser) parseDecl() {
+ if p.tok == scanner.Ident {
+ switch p.lit {
+ case "import":
+ p.parseImportDecl()
+ case "const":
+ p.parseConstDecl()
+ case "type":
+ p.parseTypeDecl()
+ case "var":
+ p.parseVarDecl()
+ case "func":
+ p.next() // look ahead
+ if p.tok == '(' {
+ p.parseMethodDecl()
+ } else {
+ p.parseFuncDecl()
+ }
+ }
+ }
+ p.expect('\n')
+}
+
+// ----------------------------------------------------------------------------
+// Export
+
+// Export = "PackageClause { Decl } "$$" .
+// PackageClause = "package" PackageName [ "safe" ] "\n" .
+//
+func (p *parser) parseExport() *types.Package {
+ p.expectKeyword("package")
+ name := p.parsePackageName()
+ if p.tok == scanner.Ident && p.lit == "safe" {
+ // package was compiled with -u option - ignore
+ p.next()
+ }
+ p.expect('\n')
+
+ pkg := p.getPkg(p.id, name)
+
+ for p.tok != '$' && p.tok != scanner.EOF {
+ p.parseDecl()
+ }
+
+ if ch := p.scanner.Peek(); p.tok != '$' || ch != '$' {
+ // don't call next()/expect() since reading past the
+ // export data may cause scanner errors (e.g. NUL chars)
+ p.errorf("expected '$$', got %s %c", scanner.TokenString(p.tok), ch)
+ }
+
+ if n := p.scanner.ErrorCount; n != 0 {
+ p.errorf("expected no scanner errors, got %d", n)
+ }
+
+ // Record all locally referenced packages as imports.
+ var imports []*types.Package
+ for id, pkg2 := range p.localPkgs {
+ if pkg2.Name() == "" {
+ p.errorf("%s package has no name", id)
+ }
+ if id == p.id {
+ continue // avoid self-edge
+ }
+ imports = append(imports, pkg2)
+ }
+ sort.Sort(byPath(imports))
+ pkg.SetImports(imports)
+
+ // package was imported completely and without errors
+ pkg.MarkComplete()
+
+ return pkg
+}
+
+type byPath []*types.Package
+
+func (a byPath) Len() int { return len(a) }
+func (a byPath) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
+func (a byPath) Less(i, j int) bool { return a[i].Path() < a[j].Path() }
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go
new file mode 100644
index 000000000..0fd22bb03
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go
@@ -0,0 +1,598 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Indexed package import.
+// See cmd/compile/internal/gc/iexport.go for the export data format.
+
+// This file is a copy of $GOROOT/src/go/internal/gcimporter/iimport.go.
+
+package gcimporter
+
+import (
+ "bytes"
+ "encoding/binary"
+ "fmt"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "io"
+ "sort"
+)
+
+type intReader struct {
+ *bytes.Reader
+ path string
+}
+
+func (r *intReader) int64() int64 {
+ i, err := binary.ReadVarint(r.Reader)
+ if err != nil {
+ errorf("import %q: read varint error: %v", r.path, err)
+ }
+ return i
+}
+
+func (r *intReader) uint64() uint64 {
+ i, err := binary.ReadUvarint(r.Reader)
+ if err != nil {
+ errorf("import %q: read varint error: %v", r.path, err)
+ }
+ return i
+}
+
+const predeclReserved = 32
+
+type itag uint64
+
+const (
+ // Types
+ definedType itag = iota
+ pointerType
+ sliceType
+ arrayType
+ chanType
+ mapType
+ signatureType
+ structType
+ interfaceType
+)
+
+// IImportData imports a package from the serialized package data
+// and returns the number of bytes consumed and a reference to the package.
+// If the export data version is not recognized or the format is otherwise
+// compromised, an error is returned.
+func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
+ const currentVersion = 0
+ version := -1
+ defer func() {
+ if e := recover(); e != nil {
+ if version > currentVersion {
+ err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
+ } else {
+ err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
+ }
+ }
+ }()
+
+ r := &intReader{bytes.NewReader(data), path}
+
+ version = int(r.uint64())
+ switch version {
+ case currentVersion:
+ default:
+ errorf("unknown iexport format version %d", version)
+ }
+
+ sLen := int64(r.uint64())
+ dLen := int64(r.uint64())
+
+ whence, _ := r.Seek(0, io.SeekCurrent)
+ stringData := data[whence : whence+sLen]
+ declData := data[whence+sLen : whence+sLen+dLen]
+ r.Seek(sLen+dLen, io.SeekCurrent)
+
+ p := iimporter{
+ ipath: path,
+
+ stringData: stringData,
+ stringCache: make(map[uint64]string),
+ pkgCache: make(map[uint64]*types.Package),
+
+ declData: declData,
+ pkgIndex: make(map[*types.Package]map[string]uint64),
+ typCache: make(map[uint64]types.Type),
+
+ fake: fakeFileSet{
+ fset: fset,
+ files: make(map[string]*token.File),
+ },
+ }
+
+ for i, pt := range predeclared {
+ p.typCache[uint64(i)] = pt
+ }
+
+ pkgList := make([]*types.Package, r.uint64())
+ for i := range pkgList {
+ pkgPathOff := r.uint64()
+ pkgPath := p.stringAt(pkgPathOff)
+ pkgName := p.stringAt(r.uint64())
+ _ = r.uint64() // package height; unused by go/types
+
+ if pkgPath == "" {
+ pkgPath = path
+ }
+ pkg := imports[pkgPath]
+ if pkg == nil {
+ pkg = types.NewPackage(pkgPath, pkgName)
+ imports[pkgPath] = pkg
+ } else if pkg.Name() != pkgName {
+ errorf("conflicting names %s and %s for package %q", pkg.Name(), pkgName, path)
+ }
+
+ p.pkgCache[pkgPathOff] = pkg
+
+ nameIndex := make(map[string]uint64)
+ for nSyms := r.uint64(); nSyms > 0; nSyms-- {
+ name := p.stringAt(r.uint64())
+ nameIndex[name] = r.uint64()
+ }
+
+ p.pkgIndex[pkg] = nameIndex
+ pkgList[i] = pkg
+ }
+
+ localpkg := pkgList[0]
+
+ names := make([]string, 0, len(p.pkgIndex[localpkg]))
+ for name := range p.pkgIndex[localpkg] {
+ names = append(names, name)
+ }
+ sort.Strings(names)
+ for _, name := range names {
+ p.doDecl(localpkg, name)
+ }
+
+ for _, typ := range p.interfaceList {
+ typ.Complete()
+ }
+
+ // record all referenced packages as imports
+ list := append(([]*types.Package)(nil), pkgList[1:]...)
+ sort.Sort(byPath(list))
+ localpkg.SetImports(list)
+
+ // package was imported completely and without errors
+ localpkg.MarkComplete()
+
+ consumed, _ := r.Seek(0, io.SeekCurrent)
+ return int(consumed), localpkg, nil
+}
+
+type iimporter struct {
+ ipath string
+
+ stringData []byte
+ stringCache map[uint64]string
+ pkgCache map[uint64]*types.Package
+
+ declData []byte
+ pkgIndex map[*types.Package]map[string]uint64
+ typCache map[uint64]types.Type
+
+ fake fakeFileSet
+ interfaceList []*types.Interface
+}
+
+func (p *iimporter) doDecl(pkg *types.Package, name string) {
+ // See if we've already imported this declaration.
+ if obj := pkg.Scope().Lookup(name); obj != nil {
+ return
+ }
+
+ off, ok := p.pkgIndex[pkg][name]
+ if !ok {
+ errorf("%v.%v not in index", pkg, name)
+ }
+
+ r := &importReader{p: p, currPkg: pkg}
+ r.declReader.Reset(p.declData[off:])
+
+ r.obj(name)
+}
+
+func (p *iimporter) stringAt(off uint64) string {
+ if s, ok := p.stringCache[off]; ok {
+ return s
+ }
+
+ slen, n := binary.Uvarint(p.stringData[off:])
+ if n <= 0 {
+ errorf("varint failed")
+ }
+ spos := off + uint64(n)
+ s := string(p.stringData[spos : spos+slen])
+ p.stringCache[off] = s
+ return s
+}
+
+func (p *iimporter) pkgAt(off uint64) *types.Package {
+ if pkg, ok := p.pkgCache[off]; ok {
+ return pkg
+ }
+ path := p.stringAt(off)
+ errorf("missing package %q in %q", path, p.ipath)
+ return nil
+}
+
+func (p *iimporter) typAt(off uint64, base *types.Named) types.Type {
+ if t, ok := p.typCache[off]; ok && (base == nil || !isInterface(t)) {
+ return t
+ }
+
+ if off < predeclReserved {
+ errorf("predeclared type missing from cache: %v", off)
+ }
+
+ r := &importReader{p: p}
+ r.declReader.Reset(p.declData[off-predeclReserved:])
+ t := r.doType(base)
+
+ if base == nil || !isInterface(t) {
+ p.typCache[off] = t
+ }
+ return t
+}
+
+type importReader struct {
+ p *iimporter
+ declReader bytes.Reader
+ currPkg *types.Package
+ prevFile string
+ prevLine int64
+}
+
+func (r *importReader) obj(name string) {
+ tag := r.byte()
+ pos := r.pos()
+
+ switch tag {
+ case 'A':
+ typ := r.typ()
+
+ r.declare(types.NewTypeName(pos, r.currPkg, name, typ))
+
+ case 'C':
+ typ, val := r.value()
+
+ r.declare(types.NewConst(pos, r.currPkg, name, typ, val))
+
+ case 'F':
+ sig := r.signature(nil)
+
+ r.declare(types.NewFunc(pos, r.currPkg, name, sig))
+
+ case 'T':
+ // Types can be recursive. We need to setup a stub
+ // declaration before recursing.
+ obj := types.NewTypeName(pos, r.currPkg, name, nil)
+ named := types.NewNamed(obj, nil, nil)
+ r.declare(obj)
+
+ underlying := r.p.typAt(r.uint64(), named).Underlying()
+ named.SetUnderlying(underlying)
+
+ if !isInterface(underlying) {
+ for n := r.uint64(); n > 0; n-- {
+ mpos := r.pos()
+ mname := r.ident()
+ recv := r.param()
+ msig := r.signature(recv)
+
+ named.AddMethod(types.NewFunc(mpos, r.currPkg, mname, msig))
+ }
+ }
+
+ case 'V':
+ typ := r.typ()
+
+ r.declare(types.NewVar(pos, r.currPkg, name, typ))
+
+ default:
+ errorf("unexpected tag: %v", tag)
+ }
+}
+
+func (r *importReader) declare(obj types.Object) {
+ obj.Pkg().Scope().Insert(obj)
+}
+
+func (r *importReader) value() (typ types.Type, val constant.Value) {
+ typ = r.typ()
+
+ switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType {
+ case types.IsBoolean:
+ val = constant.MakeBool(r.bool())
+
+ case types.IsString:
+ val = constant.MakeString(r.string())
+
+ case types.IsInteger:
+ val = r.mpint(b)
+
+ case types.IsFloat:
+ val = r.mpfloat(b)
+
+ case types.IsComplex:
+ re := r.mpfloat(b)
+ im := r.mpfloat(b)
+ val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im))
+
+ default:
+ errorf("unexpected type %v", typ) // panics
+ panic("unreachable")
+ }
+
+ return
+}
+
+func intSize(b *types.Basic) (signed bool, maxBytes uint) {
+ if (b.Info() & types.IsUntyped) != 0 {
+ return true, 64
+ }
+
+ switch b.Kind() {
+ case types.Float32, types.Complex64:
+ return true, 3
+ case types.Float64, types.Complex128:
+ return true, 7
+ }
+
+ signed = (b.Info() & types.IsUnsigned) == 0
+ switch b.Kind() {
+ case types.Int8, types.Uint8:
+ maxBytes = 1
+ case types.Int16, types.Uint16:
+ maxBytes = 2
+ case types.Int32, types.Uint32:
+ maxBytes = 4
+ default:
+ maxBytes = 8
+ }
+
+ return
+}
+
+func (r *importReader) mpint(b *types.Basic) constant.Value {
+ signed, maxBytes := intSize(b)
+
+ maxSmall := 256 - maxBytes
+ if signed {
+ maxSmall = 256 - 2*maxBytes
+ }
+ if maxBytes == 1 {
+ maxSmall = 256
+ }
+
+ n, _ := r.declReader.ReadByte()
+ if uint(n) < maxSmall {
+ v := int64(n)
+ if signed {
+ v >>= 1
+ if n&1 != 0 {
+ v = ^v
+ }
+ }
+ return constant.MakeInt64(v)
+ }
+
+ v := -n
+ if signed {
+ v = -(n &^ 1) >> 1
+ }
+ if v < 1 || uint(v) > maxBytes {
+ errorf("weird decoding: %v, %v => %v", n, signed, v)
+ }
+
+ buf := make([]byte, v)
+ io.ReadFull(&r.declReader, buf)
+
+ // convert to little endian
+ // TODO(gri) go/constant should have a more direct conversion function
+ // (e.g., once it supports a big.Float based implementation)
+ for i, j := 0, len(buf)-1; i < j; i, j = i+1, j-1 {
+ buf[i], buf[j] = buf[j], buf[i]
+ }
+
+ x := constant.MakeFromBytes(buf)
+ if signed && n&1 != 0 {
+ x = constant.UnaryOp(token.SUB, x, 0)
+ }
+ return x
+}
+
+func (r *importReader) mpfloat(b *types.Basic) constant.Value {
+ x := r.mpint(b)
+ if constant.Sign(x) == 0 {
+ return x
+ }
+
+ exp := r.int64()
+ switch {
+ case exp > 0:
+ x = constant.Shift(x, token.SHL, uint(exp))
+ case exp < 0:
+ d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp))
+ x = constant.BinaryOp(x, token.QUO, d)
+ }
+ return x
+}
+
+func (r *importReader) ident() string {
+ return r.string()
+}
+
+func (r *importReader) qualifiedIdent() (*types.Package, string) {
+ name := r.string()
+ pkg := r.pkg()
+ return pkg, name
+}
+
+func (r *importReader) pos() token.Pos {
+ delta := r.int64()
+ if delta != deltaNewFile {
+ r.prevLine += delta
+ } else if l := r.int64(); l == -1 {
+ r.prevLine += deltaNewFile
+ } else {
+ r.prevFile = r.string()
+ r.prevLine = l
+ }
+
+ if r.prevFile == "" && r.prevLine == 0 {
+ return token.NoPos
+ }
+
+ return r.p.fake.pos(r.prevFile, int(r.prevLine))
+}
+
+func (r *importReader) typ() types.Type {
+ return r.p.typAt(r.uint64(), nil)
+}
+
+func isInterface(t types.Type) bool {
+ _, ok := t.(*types.Interface)
+ return ok
+}
+
+func (r *importReader) pkg() *types.Package { return r.p.pkgAt(r.uint64()) }
+func (r *importReader) string() string { return r.p.stringAt(r.uint64()) }
+
+func (r *importReader) doType(base *types.Named) types.Type {
+ switch k := r.kind(); k {
+ default:
+ errorf("unexpected kind tag in %q: %v", r.p.ipath, k)
+ return nil
+
+ case definedType:
+ pkg, name := r.qualifiedIdent()
+ r.p.doDecl(pkg, name)
+ return pkg.Scope().Lookup(name).(*types.TypeName).Type()
+ case pointerType:
+ return types.NewPointer(r.typ())
+ case sliceType:
+ return types.NewSlice(r.typ())
+ case arrayType:
+ n := r.uint64()
+ return types.NewArray(r.typ(), int64(n))
+ case chanType:
+ dir := chanDir(int(r.uint64()))
+ return types.NewChan(dir, r.typ())
+ case mapType:
+ return types.NewMap(r.typ(), r.typ())
+ case signatureType:
+ r.currPkg = r.pkg()
+ return r.signature(nil)
+
+ case structType:
+ r.currPkg = r.pkg()
+
+ fields := make([]*types.Var, r.uint64())
+ tags := make([]string, len(fields))
+ for i := range fields {
+ fpos := r.pos()
+ fname := r.ident()
+ ftyp := r.typ()
+ emb := r.bool()
+ tag := r.string()
+
+ fields[i] = types.NewField(fpos, r.currPkg, fname, ftyp, emb)
+ tags[i] = tag
+ }
+ return types.NewStruct(fields, tags)
+
+ case interfaceType:
+ r.currPkg = r.pkg()
+
+ embeddeds := make([]types.Type, r.uint64())
+ for i := range embeddeds {
+ _ = r.pos()
+ embeddeds[i] = r.typ()
+ }
+
+ methods := make([]*types.Func, r.uint64())
+ for i := range methods {
+ mpos := r.pos()
+ mname := r.ident()
+
+ // TODO(mdempsky): Matches bimport.go, but I
+ // don't agree with this.
+ var recv *types.Var
+ if base != nil {
+ recv = types.NewVar(token.NoPos, r.currPkg, "", base)
+ }
+
+ msig := r.signature(recv)
+ methods[i] = types.NewFunc(mpos, r.currPkg, mname, msig)
+ }
+
+ typ := newInterface(methods, embeddeds)
+ r.p.interfaceList = append(r.p.interfaceList, typ)
+ return typ
+ }
+}
+
+func (r *importReader) kind() itag {
+ return itag(r.uint64())
+}
+
+func (r *importReader) signature(recv *types.Var) *types.Signature {
+ params := r.paramList()
+ results := r.paramList()
+ variadic := params.Len() > 0 && r.bool()
+ return types.NewSignature(recv, params, results, variadic)
+}
+
+func (r *importReader) paramList() *types.Tuple {
+ xs := make([]*types.Var, r.uint64())
+ for i := range xs {
+ xs[i] = r.param()
+ }
+ return types.NewTuple(xs...)
+}
+
+func (r *importReader) param() *types.Var {
+ pos := r.pos()
+ name := r.ident()
+ typ := r.typ()
+ return types.NewParam(pos, r.currPkg, name, typ)
+}
+
+func (r *importReader) bool() bool {
+ return r.uint64() != 0
+}
+
+func (r *importReader) int64() int64 {
+ n, err := binary.ReadVarint(&r.declReader)
+ if err != nil {
+ errorf("readVarint: %v", err)
+ }
+ return n
+}
+
+func (r *importReader) uint64() uint64 {
+ n, err := binary.ReadUvarint(&r.declReader)
+ if err != nil {
+ errorf("readUvarint: %v", err)
+ }
+ return n
+}
+
+func (r *importReader) byte() byte {
+ x, err := r.declReader.ReadByte()
+ if err != nil {
+ errorf("declReader.ReadByte: %v", err)
+ }
+ return x
+}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go b/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go
new file mode 100644
index 000000000..463f25227
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go
@@ -0,0 +1,21 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build !go1.11
+
+package gcimporter
+
+import "go/types"
+
+func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface {
+ named := make([]*types.Named, len(embeddeds))
+ for i, e := range embeddeds {
+ var ok bool
+ named[i], ok = e.(*types.Named)
+ if !ok {
+ panic("embedding of non-defined interfaces in interfaces is not supported before Go 1.11")
+ }
+ }
+ return types.NewInterface(methods, named)
+}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go b/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go
new file mode 100644
index 000000000..ab28b95cb
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go
@@ -0,0 +1,13 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.11
+
+package gcimporter
+
+import "go/types"
+
+func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface {
+ return types.NewInterfaceType(methods, embeddeds)
+}
diff --git a/vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go b/vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go
new file mode 100644
index 000000000..fdc7da056
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go
@@ -0,0 +1,160 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package packagesdriver fetches type sizes for go/packages and go/analysis.
+package packagesdriver
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "go/types"
+ "log"
+ "os"
+ "os/exec"
+ "strings"
+ "time"
+)
+
+var debug = false
+
+// GetSizes returns the sizes used by the underlying driver with the given parameters.
+func GetSizes(ctx context.Context, buildFlags, env []string, dir string, usesExportData bool) (types.Sizes, error) {
+ // TODO(matloob): Clean this up. This code is mostly a copy of packages.findExternalDriver.
+ const toolPrefix = "GOPACKAGESDRIVER="
+ tool := ""
+ for _, env := range env {
+ if val := strings.TrimPrefix(env, toolPrefix); val != env {
+ tool = val
+ }
+ }
+
+ if tool == "" {
+ var err error
+ tool, err = exec.LookPath("gopackagesdriver")
+ if err != nil {
+ // We did not find the driver, so use "go list".
+ tool = "off"
+ }
+ }
+
+ if tool == "off" {
+ return GetSizesGolist(ctx, buildFlags, env, dir, usesExportData)
+ }
+
+ req, err := json.Marshal(struct {
+ Command string `json:"command"`
+ Env []string `json:"env"`
+ BuildFlags []string `json:"build_flags"`
+ }{
+ Command: "sizes",
+ Env: env,
+ BuildFlags: buildFlags,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("failed to encode message to driver tool: %v", err)
+ }
+
+ buf := new(bytes.Buffer)
+ cmd := exec.CommandContext(ctx, tool)
+ cmd.Dir = dir
+ cmd.Env = env
+ cmd.Stdin = bytes.NewReader(req)
+ cmd.Stdout = buf
+ cmd.Stderr = new(bytes.Buffer)
+ if err := cmd.Run(); err != nil {
+ return nil, fmt.Errorf("%v: %v: %s", tool, err, cmd.Stderr)
+ }
+ var response struct {
+ // Sizes, if not nil, is the types.Sizes to use when type checking.
+ Sizes *types.StdSizes
+ }
+ if err := json.Unmarshal(buf.Bytes(), &response); err != nil {
+ return nil, err
+ }
+ return response.Sizes, nil
+}
+
+func GetSizesGolist(ctx context.Context, buildFlags, env []string, dir string, usesExportData bool) (types.Sizes, error) {
+ args := []string{"list", "-f", "{{context.GOARCH}} {{context.Compiler}}"}
+ args = append(args, buildFlags...)
+ args = append(args, "--", "unsafe")
+ stdout, err := InvokeGo(ctx, env, dir, usesExportData, args...)
+ if err != nil {
+ return nil, err
+ }
+ fields := strings.Fields(stdout.String())
+ if len(fields) < 2 {
+ return nil, fmt.Errorf("could not determine GOARCH and Go compiler")
+ }
+ goarch := fields[0]
+ compiler := fields[1]
+ return types.SizesFor(compiler, goarch), nil
+}
+
+// InvokeGo returns the stdout of a go command invocation.
+func InvokeGo(ctx context.Context, env []string, dir string, usesExportData bool, args ...string) (*bytes.Buffer, error) {
+ if debug {
+ defer func(start time.Time) { log.Printf("%s for %v", time.Since(start), cmdDebugStr(env, args...)) }(time.Now())
+ }
+ stdout := new(bytes.Buffer)
+ stderr := new(bytes.Buffer)
+ cmd := exec.CommandContext(ctx, "go", args...)
+ // On darwin the cwd gets resolved to the real path, which breaks anything that
+ // expects the working directory to keep the original path, including the
+ // go command when dealing with modules.
+ // The Go stdlib has a special feature where if the cwd and the PWD are the
+ // same node then it trusts the PWD, so by setting it in the env for the child
+ // process we fix up all the paths returned by the go command.
+ cmd.Env = append(append([]string{}, env...), "PWD="+dir)
+ cmd.Dir = dir
+ cmd.Stdout = stdout
+ cmd.Stderr = stderr
+ if err := cmd.Run(); err != nil {
+ exitErr, ok := err.(*exec.ExitError)
+ if !ok {
+ // Catastrophic error:
+ // - executable not found
+ // - context cancellation
+ return nil, fmt.Errorf("couldn't exec 'go %v': %s %T", args, err, err)
+ }
+
+ // Export mode entails a build.
+ // If that build fails, errors appear on stderr
+ // (despite the -e flag) and the Export field is blank.
+ // Do not fail in that case.
+ if !usesExportData {
+ return nil, fmt.Errorf("go %v: %s: %s", args, exitErr, stderr)
+ }
+ }
+
+ // As of writing, go list -export prints some non-fatal compilation
+ // errors to stderr, even with -e set. We would prefer that it put
+ // them in the Package.Error JSON (see https://golang.org/issue/26319).
+ // In the meantime, there's nowhere good to put them, but they can
+ // be useful for debugging. Print them if $GOPACKAGESPRINTGOLISTERRORS
+ // is set.
+ if len(stderr.Bytes()) != 0 && os.Getenv("GOPACKAGESPRINTGOLISTERRORS") != "" {
+ fmt.Fprintf(os.Stderr, "%s stderr: <<%s>>\n", cmdDebugStr(env, args...), stderr)
+ }
+
+ // debugging
+ if false {
+ fmt.Fprintf(os.Stderr, "%s stdout: <<%s>>\n", cmdDebugStr(env, args...), stdout)
+ }
+
+ return stdout, nil
+}
+
+func cmdDebugStr(envlist []string, args ...string) string {
+ env := make(map[string]string)
+ for _, kv := range envlist {
+ split := strings.Split(kv, "=")
+ k, v := split[0], split[1]
+ env[k] = v
+ }
+
+ return fmt.Sprintf("GOROOT=%v GOPATH=%v GO111MODULE=%v PWD=%v go %v", env["GOROOT"], env["GOPATH"], env["GO111MODULE"], env["PWD"], args)
+}
diff --git a/vendor/golang.org/x/tools/go/packages/doc.go b/vendor/golang.org/x/tools/go/packages/doc.go
new file mode 100644
index 000000000..0ec0fab24
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/doc.go
@@ -0,0 +1,226 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+Package packages loads Go packages for inspection and analysis.
+
+The Load function takes as input a list of patterns and return a list of Package
+structs describing individual packages matched by those patterns.
+The LoadMode controls the amount of detail in the loaded packages.
+
+Load passes most patterns directly to the underlying build tool,
+but all patterns with the prefix "query=", where query is a
+non-empty string of letters from [a-z], are reserved and may be
+interpreted as query operators.
+
+Three query operators are currently supported: "file", "pattern", and "name".
+
+The query "file=path/to/file.go" matches the package or packages enclosing
+the Go source file path/to/file.go. For example "file=~/go/src/fmt/print.go"
+might return the packages "fmt" and "fmt [fmt.test]".
+
+The query "pattern=string" causes "string" to be passed directly to
+the underlying build tool. In most cases this is unnecessary,
+but an application can use Load("pattern=" + x) as an escaping mechanism
+to ensure that x is not interpreted as a query operator if it contains '='.
+
+The query "name=identifier" matches packages whose package declaration contains
+the specified identifier. For example, "name=rand" would match the packages
+"math/rand" and "crypto/rand", and "name=main" would match all executables.
+
+All other query operators are reserved for future use and currently
+cause Load to report an error.
+
+The Package struct provides basic information about the package, including
+
+ - ID, a unique identifier for the package in the returned set;
+ - GoFiles, the names of the package's Go source files;
+ - Imports, a map from source import strings to the Packages they name;
+ - Types, the type information for the package's exported symbols;
+ - Syntax, the parsed syntax trees for the package's source code; and
+ - TypeInfo, the result of a complete type-check of the package syntax trees.
+
+(See the documentation for type Package for the complete list of fields
+and more detailed descriptions.)
+
+For example,
+
+ Load(nil, "bytes", "unicode...")
+
+returns four Package structs describing the standard library packages
+bytes, unicode, unicode/utf16, and unicode/utf8. Note that one pattern
+can match multiple packages and that a package might be matched by
+multiple patterns: in general it is not possible to determine which
+packages correspond to which patterns.
+
+Note that the list returned by Load contains only the packages matched
+by the patterns. Their dependencies can be found by walking the import
+graph using the Imports fields.
+
+The Load function can be configured by passing a pointer to a Config as
+the first argument. A nil Config is equivalent to the zero Config, which
+causes Load to run in LoadFiles mode, collecting minimal information.
+See the documentation for type Config for details.
+
+As noted earlier, the Config.Mode controls the amount of detail
+reported about the loaded packages, with each mode returning all the data of the
+previous mode with some extra added. See the documentation for type LoadMode
+for details.
+
+Most tools should pass their command-line arguments (after any flags)
+uninterpreted to the loader, so that the loader can interpret them
+according to the conventions of the underlying build system.
+See the Example function for typical usage.
+
+*/
+package packages // import "golang.org/x/tools/go/packages"
+
+/*
+
+Motivation and design considerations
+
+The new package's design solves problems addressed by two existing
+packages: go/build, which locates and describes packages, and
+golang.org/x/tools/go/loader, which loads, parses and type-checks them.
+The go/build.Package structure encodes too much of the 'go build' way
+of organizing projects, leaving us in need of a data type that describes a
+package of Go source code independent of the underlying build system.
+We wanted something that works equally well with go build and vgo, and
+also other build systems such as Bazel and Blaze, making it possible to
+construct analysis tools that work in all these environments.
+Tools such as errcheck and staticcheck were essentially unavailable to
+the Go community at Google, and some of Google's internal tools for Go
+are unavailable externally.
+This new package provides a uniform way to obtain package metadata by
+querying each of these build systems, optionally supporting their
+preferred command-line notations for packages, so that tools integrate
+neatly with users' build environments. The Metadata query function
+executes an external query tool appropriate to the current workspace.
+
+Loading packages always returns the complete import graph "all the way down",
+even if all you want is information about a single package, because the query
+mechanisms of all the build systems we currently support ({go,vgo} list, and
+blaze/bazel aspect-based query) cannot provide detailed information
+about one package without visiting all its dependencies too, so there is
+no additional asymptotic cost to providing transitive information.
+(This property might not be true of a hypothetical 5th build system.)
+
+In calls to TypeCheck, all initial packages, and any package that
+transitively depends on one of them, must be loaded from source.
+Consider A->B->C->D->E: if A,C are initial, A,B,C must be loaded from
+source; D may be loaded from export data, and E may not be loaded at all
+(though it's possible that D's export data mentions it, so a
+types.Package may be created for it and exposed.)
+
+The old loader had a feature to suppress type-checking of function
+bodies on a per-package basis, primarily intended to reduce the work of
+obtaining type information for imported packages. Now that imports are
+satisfied by export data, the optimization no longer seems necessary.
+
+Despite some early attempts, the old loader did not exploit export data,
+instead always using the equivalent of WholeProgram mode. This was due
+to the complexity of mixing source and export data packages (now
+resolved by the upward traversal mentioned above), and because export data
+files were nearly always missing or stale. Now that 'go build' supports
+caching, all the underlying build systems can guarantee to produce
+export data in a reasonable (amortized) time.
+
+Test "main" packages synthesized by the build system are now reported as
+first-class packages, avoiding the need for clients (such as go/ssa) to
+reinvent this generation logic.
+
+One way in which go/packages is simpler than the old loader is in its
+treatment of in-package tests. In-package tests are packages that
+consist of all the files of the library under test, plus the test files.
+The old loader constructed in-package tests by a two-phase process of
+mutation called "augmentation": first it would construct and type check
+all the ordinary library packages and type-check the packages that
+depend on them; then it would add more (test) files to the package and
+type-check again. This two-phase approach had four major problems:
+1) in processing the tests, the loader modified the library package,
+ leaving no way for a client application to see both the test
+ package and the library package; one would mutate into the other.
+2) because test files can declare additional methods on types defined in
+ the library portion of the package, the dispatch of method calls in
+ the library portion was affected by the presence of the test files.
+ This should have been a clue that the packages were logically
+ different.
+3) this model of "augmentation" assumed at most one in-package test
+ per library package, which is true of projects using 'go build',
+ but not other build systems.
+4) because of the two-phase nature of test processing, all packages that
+ import the library package had to be processed before augmentation,
+ forcing a "one-shot" API and preventing the client from calling Load
+ in several times in sequence as is now possible in WholeProgram mode.
+ (TypeCheck mode has a similar one-shot restriction for a different reason.)
+
+Early drafts of this package supported "multi-shot" operation.
+Although it allowed clients to make a sequence of calls (or concurrent
+calls) to Load, building up the graph of Packages incrementally,
+it was of marginal value: it complicated the API
+(since it allowed some options to vary across calls but not others),
+it complicated the implementation,
+it cannot be made to work in Types mode, as explained above,
+and it was less efficient than making one combined call (when this is possible).
+Among the clients we have inspected, none made multiple calls to load
+but could not be easily and satisfactorily modified to make only a single call.
+However, applications changes may be required.
+For example, the ssadump command loads the user-specified packages
+and in addition the runtime package. It is tempting to simply append
+"runtime" to the user-provided list, but that does not work if the user
+specified an ad-hoc package such as [a.go b.go].
+Instead, ssadump no longer requests the runtime package,
+but seeks it among the dependencies of the user-specified packages,
+and emits an error if it is not found.
+
+Overlays: The Overlay field in the Config allows providing alternate contents
+for Go source files, by providing a mapping from file path to contents.
+go/packages will pull in new imports added in overlay files when go/packages
+is run in LoadImports mode or greater.
+Overlay support for the go list driver isn't complete yet: if the file doesn't
+exist on disk, it will only be recognized in an overlay if it is a non-test file
+and the package would be reported even without the overlay.
+
+Questions & Tasks
+
+- Add GOARCH/GOOS?
+ They are not portable concepts, but could be made portable.
+ Our goal has been to allow users to express themselves using the conventions
+ of the underlying build system: if the build system honors GOARCH
+ during a build and during a metadata query, then so should
+ applications built atop that query mechanism.
+ Conversely, if the target architecture of the build is determined by
+ command-line flags, the application can pass the relevant
+ flags through to the build system using a command such as:
+ myapp -query_flag="--cpu=amd64" -query_flag="--os=darwin"
+ However, this approach is low-level, unwieldy, and non-portable.
+ GOOS and GOARCH seem important enough to warrant a dedicated option.
+
+- How should we handle partial failures such as a mixture of good and
+ malformed patterns, existing and non-existent packages, successful and
+ failed builds, import failures, import cycles, and so on, in a call to
+ Load?
+
+- Support bazel, blaze, and go1.10 list, not just go1.11 list.
+
+- Handle (and test) various partial success cases, e.g.
+ a mixture of good packages and:
+ invalid patterns
+ nonexistent packages
+ empty packages
+ packages with malformed package or import declarations
+ unreadable files
+ import cycles
+ other parse errors
+ type errors
+ Make sure we record errors at the correct place in the graph.
+
+- Missing packages among initial arguments are not reported.
+ Return bogus packages for them, like golist does.
+
+- "undeclared name" errors (for example) are reported out of source file
+ order. I suspect this is due to the breadth-first resolution now used
+ by go/types. Is that a bug? Discuss with gri.
+
+*/
diff --git a/vendor/golang.org/x/tools/go/packages/external.go b/vendor/golang.org/x/tools/go/packages/external.go
new file mode 100644
index 000000000..860c3ec15
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/external.go
@@ -0,0 +1,79 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file enables an external tool to intercept package requests.
+// If the tool is present then its results are used in preference to
+// the go list command.
+
+package packages
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "os/exec"
+ "strings"
+)
+
+// Driver
+type driverRequest struct {
+ Command string `json "command"`
+ Mode LoadMode `json:"mode"`
+ Env []string `json:"env"`
+ BuildFlags []string `json:"build_flags"`
+ Tests bool `json:"tests"`
+ Overlay map[string][]byte `json:"overlay"`
+}
+
+// findExternalDriver returns the file path of a tool that supplies
+// the build system package structure, or "" if not found."
+// If GOPACKAGESDRIVER is set in the environment findExternalTool returns its
+// value, otherwise it searches for a binary named gopackagesdriver on the PATH.
+func findExternalDriver(cfg *Config) driver {
+ const toolPrefix = "GOPACKAGESDRIVER="
+ tool := ""
+ for _, env := range cfg.Env {
+ if val := strings.TrimPrefix(env, toolPrefix); val != env {
+ tool = val
+ }
+ }
+ if tool != "" && tool == "off" {
+ return nil
+ }
+ if tool == "" {
+ var err error
+ tool, err = exec.LookPath("gopackagesdriver")
+ if err != nil {
+ return nil
+ }
+ }
+ return func(cfg *Config, words ...string) (*driverResponse, error) {
+ req, err := json.Marshal(driverRequest{
+ Mode: cfg.Mode,
+ Env: cfg.Env,
+ BuildFlags: cfg.BuildFlags,
+ Tests: cfg.Tests,
+ Overlay: cfg.Overlay,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("failed to encode message to driver tool: %v", err)
+ }
+
+ buf := new(bytes.Buffer)
+ cmd := exec.CommandContext(cfg.Context, tool, words...)
+ cmd.Dir = cfg.Dir
+ cmd.Env = cfg.Env
+ cmd.Stdin = bytes.NewReader(req)
+ cmd.Stdout = buf
+ cmd.Stderr = new(bytes.Buffer)
+ if err := cmd.Run(); err != nil {
+ return nil, fmt.Errorf("%v: %v: %s", tool, err, cmd.Stderr)
+ }
+ var response driverResponse
+ if err := json.Unmarshal(buf.Bytes(), &response); err != nil {
+ return nil, err
+ }
+ return &response, nil
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/packages/golist.go b/vendor/golang.org/x/tools/go/packages/golist.go
new file mode 100644
index 000000000..a5717cc06
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/golist.go
@@ -0,0 +1,806 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packages
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "go/types"
+ "io/ioutil"
+ "log"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "reflect"
+ "regexp"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+
+ "golang.org/x/tools/go/internal/packagesdriver"
+ "golang.org/x/tools/internal/gopathwalk"
+ "golang.org/x/tools/internal/semver"
+)
+
+// debug controls verbose logging.
+var debug, _ = strconv.ParseBool(os.Getenv("GOPACKAGESDEBUG"))
+
+// A goTooOldError reports that the go command
+// found by exec.LookPath is too old to use the new go list behavior.
+type goTooOldError struct {
+ error
+}
+
+// responseDeduper wraps a driverResponse, deduplicating its contents.
+type responseDeduper struct {
+ seenRoots map[string]bool
+ seenPackages map[string]*Package
+ dr *driverResponse
+}
+
+// init fills in r with a driverResponse.
+func (r *responseDeduper) init(dr *driverResponse) {
+ r.dr = dr
+ r.seenRoots = map[string]bool{}
+ r.seenPackages = map[string]*Package{}
+ for _, pkg := range dr.Packages {
+ r.seenPackages[pkg.ID] = pkg
+ }
+ for _, root := range dr.Roots {
+ r.seenRoots[root] = true
+ }
+}
+
+func (r *responseDeduper) addPackage(p *Package) {
+ if r.seenPackages[p.ID] != nil {
+ return
+ }
+ r.seenPackages[p.ID] = p
+ r.dr.Packages = append(r.dr.Packages, p)
+}
+
+func (r *responseDeduper) addRoot(id string) {
+ if r.seenRoots[id] {
+ return
+ }
+ r.seenRoots[id] = true
+ r.dr.Roots = append(r.dr.Roots, id)
+}
+
+// goListDriver uses the go list command to interpret the patterns and produce
+// the build system package structure.
+// See driver for more details.
+func goListDriver(cfg *Config, patterns ...string) (*driverResponse, error) {
+ var sizes types.Sizes
+ var sizeserr error
+ var sizeswg sync.WaitGroup
+ if cfg.Mode >= LoadTypes {
+ sizeswg.Add(1)
+ go func() {
+ sizes, sizeserr = getSizes(cfg)
+ sizeswg.Done()
+ }()
+ }
+
+ // Determine files requested in contains patterns
+ var containFiles []string
+ var packagesNamed []string
+ restPatterns := make([]string, 0, len(patterns))
+ // Extract file= and other [querytype]= patterns. Report an error if querytype
+ // doesn't exist.
+extractQueries:
+ for _, pattern := range patterns {
+ eqidx := strings.Index(pattern, "=")
+ if eqidx < 0 {
+ restPatterns = append(restPatterns, pattern)
+ } else {
+ query, value := pattern[:eqidx], pattern[eqidx+len("="):]
+ switch query {
+ case "file":
+ containFiles = append(containFiles, value)
+ case "pattern":
+ restPatterns = append(restPatterns, value)
+ case "name":
+ packagesNamed = append(packagesNamed, value)
+ case "": // not a reserved query
+ restPatterns = append(restPatterns, pattern)
+ default:
+ for _, rune := range query {
+ if rune < 'a' || rune > 'z' { // not a reserved query
+ restPatterns = append(restPatterns, pattern)
+ continue extractQueries
+ }
+ }
+ // Reject all other patterns containing "="
+ return nil, fmt.Errorf("invalid query type %q in query pattern %q", query, pattern)
+ }
+ }
+ }
+
+ // TODO(matloob): Remove the definition of listfunc and just use golistPackages once go1.12 is released.
+ var listfunc driver
+ var isFallback bool
+ listfunc = func(cfg *Config, words ...string) (*driverResponse, error) {
+ response, err := golistDriverCurrent(cfg, words...)
+ if _, ok := err.(goTooOldError); ok {
+ isFallback = true
+ listfunc = golistDriverFallback
+ return listfunc(cfg, words...)
+ }
+ listfunc = golistDriverCurrent
+ return response, err
+ }
+
+ response := &responseDeduper{}
+ var err error
+
+ // See if we have any patterns to pass through to go list. Zero initial
+ // patterns also requires a go list call, since it's the equivalent of
+ // ".".
+ if len(restPatterns) > 0 || len(patterns) == 0 {
+ dr, err := listfunc(cfg, restPatterns...)
+ if err != nil {
+ return nil, err
+ }
+ response.init(dr)
+ } else {
+ response.init(&driverResponse{})
+ }
+
+ sizeswg.Wait()
+ if sizeserr != nil {
+ return nil, sizeserr
+ }
+ // types.SizesFor always returns nil or a *types.StdSizes
+ response.dr.Sizes, _ = sizes.(*types.StdSizes)
+
+ var containsCandidates []string
+
+ if len(containFiles) != 0 {
+ if err := runContainsQueries(cfg, listfunc, isFallback, response, containFiles); err != nil {
+ return nil, err
+ }
+ }
+
+ if len(packagesNamed) != 0 {
+ if err := runNamedQueries(cfg, listfunc, response, packagesNamed); err != nil {
+ return nil, err
+ }
+ }
+
+ modifiedPkgs, needPkgs, err := processGolistOverlay(cfg, response.dr)
+ if err != nil {
+ return nil, err
+ }
+ if len(containFiles) > 0 {
+ containsCandidates = append(containsCandidates, modifiedPkgs...)
+ containsCandidates = append(containsCandidates, needPkgs...)
+ }
+
+ if len(needPkgs) > 0 {
+ addNeededOverlayPackages(cfg, listfunc, response, needPkgs)
+ if err != nil {
+ return nil, err
+ }
+ }
+ // Check candidate packages for containFiles.
+ if len(containFiles) > 0 {
+ for _, id := range containsCandidates {
+ pkg := response.seenPackages[id]
+ for _, f := range containFiles {
+ for _, g := range pkg.GoFiles {
+ if sameFile(f, g) {
+ response.addRoot(id)
+ }
+ }
+ }
+ }
+ }
+
+ return response.dr, nil
+}
+
+func addNeededOverlayPackages(cfg *Config, driver driver, response *responseDeduper, pkgs []string) error {
+ dr, err := driver(cfg, pkgs...)
+ if err != nil {
+ return err
+ }
+ for _, pkg := range dr.Packages {
+ response.addPackage(pkg)
+ }
+ return nil
+}
+
+func runContainsQueries(cfg *Config, driver driver, isFallback bool, response *responseDeduper, queries []string) error {
+ for _, query := range queries {
+ // TODO(matloob): Do only one query per directory.
+ fdir := filepath.Dir(query)
+ // Pass absolute path of directory to go list so that it knows to treat it as a directory,
+ // not a package path.
+ pattern, err := filepath.Abs(fdir)
+ if err != nil {
+ return fmt.Errorf("could not determine absolute path of file= query path %q: %v", query, err)
+ }
+ if isFallback {
+ pattern = "."
+ cfg.Dir = fdir
+ }
+
+ dirResponse, err := driver(cfg, pattern)
+ if err != nil {
+ return err
+ }
+ isRoot := make(map[string]bool, len(dirResponse.Roots))
+ for _, root := range dirResponse.Roots {
+ isRoot[root] = true
+ }
+ for _, pkg := range dirResponse.Packages {
+ // Add any new packages to the main set
+ // We don't bother to filter packages that will be dropped by the changes of roots,
+ // that will happen anyway during graph construction outside this function.
+ // Over-reporting packages is not a problem.
+ response.addPackage(pkg)
+ // if the package was not a root one, it cannot have the file
+ if !isRoot[pkg.ID] {
+ continue
+ }
+ for _, pkgFile := range pkg.GoFiles {
+ if filepath.Base(query) == filepath.Base(pkgFile) {
+ response.addRoot(pkg.ID)
+ break
+ }
+ }
+ }
+ }
+ return nil
+}
+
+// modCacheRegexp splits a path in a module cache into module, module version, and package.
+var modCacheRegexp = regexp.MustCompile(`(.*)@([^/\\]*)(.*)`)
+
+func runNamedQueries(cfg *Config, driver driver, response *responseDeduper, queries []string) error {
+ // calling `go env` isn't free; bail out if there's nothing to do.
+ if len(queries) == 0 {
+ return nil
+ }
+ // Determine which directories are relevant to scan.
+ roots, modRoot, err := roots(cfg)
+ if err != nil {
+ return err
+ }
+
+ // Scan the selected directories. Simple matches, from GOPATH/GOROOT
+ // or the local module, can simply be "go list"ed. Matches from the
+ // module cache need special treatment.
+ var matchesMu sync.Mutex
+ var simpleMatches, modCacheMatches []string
+ add := func(root gopathwalk.Root, dir string) {
+ // Walk calls this concurrently; protect the result slices.
+ matchesMu.Lock()
+ defer matchesMu.Unlock()
+
+ path := dir[len(root.Path)+1:]
+ if pathMatchesQueries(path, queries) {
+ switch root.Type {
+ case gopathwalk.RootModuleCache:
+ modCacheMatches = append(modCacheMatches, path)
+ case gopathwalk.RootCurrentModule:
+ // We'd need to read go.mod to find the full
+ // import path. Relative's easier.
+ rel, err := filepath.Rel(cfg.Dir, dir)
+ if err != nil {
+ // This ought to be impossible, since
+ // we found dir in the current module.
+ panic(err)
+ }
+ simpleMatches = append(simpleMatches, "./"+rel)
+ case gopathwalk.RootGOPATH, gopathwalk.RootGOROOT:
+ simpleMatches = append(simpleMatches, path)
+ }
+ }
+ }
+
+ startWalk := time.Now()
+ gopathwalk.Walk(roots, add, gopathwalk.Options{ModulesEnabled: modRoot != "", Debug: debug})
+ if debug {
+ log.Printf("%v for walk", time.Since(startWalk))
+ }
+
+ // Weird special case: the top-level package in a module will be in
+ // whatever directory the user checked the repository out into. It's
+ // more reasonable for that to not match the package name. So, if there
+ // are any Go files in the mod root, query it just to be safe.
+ if modRoot != "" {
+ rel, err := filepath.Rel(cfg.Dir, modRoot)
+ if err != nil {
+ panic(err) // See above.
+ }
+
+ files, err := ioutil.ReadDir(modRoot)
+ for _, f := range files {
+ if strings.HasSuffix(f.Name(), ".go") {
+ simpleMatches = append(simpleMatches, rel)
+ break
+ }
+ }
+ }
+
+ addResponse := func(r *driverResponse) {
+ for _, pkg := range r.Packages {
+ response.addPackage(pkg)
+ for _, name := range queries {
+ if pkg.Name == name {
+ response.addRoot(pkg.ID)
+ break
+ }
+ }
+ }
+ }
+
+ if len(simpleMatches) != 0 {
+ resp, err := driver(cfg, simpleMatches...)
+ if err != nil {
+ return err
+ }
+ addResponse(resp)
+ }
+
+ // Module cache matches are tricky. We want to avoid downloading new
+ // versions of things, so we need to use the ones present in the cache.
+ // go list doesn't accept version specifiers, so we have to write out a
+ // temporary module, and do the list in that module.
+ if len(modCacheMatches) != 0 {
+ // Collect all the matches, deduplicating by major version
+ // and preferring the newest.
+ type modInfo struct {
+ mod string
+ major string
+ }
+ mods := make(map[modInfo]string)
+ var imports []string
+ for _, modPath := range modCacheMatches {
+ matches := modCacheRegexp.FindStringSubmatch(modPath)
+ mod, ver := filepath.ToSlash(matches[1]), matches[2]
+ importPath := filepath.ToSlash(filepath.Join(matches[1], matches[3]))
+
+ major := semver.Major(ver)
+ if prevVer, ok := mods[modInfo{mod, major}]; !ok || semver.Compare(ver, prevVer) > 0 {
+ mods[modInfo{mod, major}] = ver
+ }
+
+ imports = append(imports, importPath)
+ }
+
+ // Build the temporary module.
+ var gomod bytes.Buffer
+ gomod.WriteString("module modquery\nrequire (\n")
+ for mod, version := range mods {
+ gomod.WriteString("\t" + mod.mod + " " + version + "\n")
+ }
+ gomod.WriteString(")\n")
+
+ tmpCfg := *cfg
+
+ // We're only trying to look at stuff in the module cache, so
+ // disable the network. This should speed things up, and has
+ // prevented errors in at least one case, #28518.
+ tmpCfg.Env = append(append([]string{"GOPROXY=off"}, cfg.Env...))
+
+ var err error
+ tmpCfg.Dir, err = ioutil.TempDir("", "gopackages-modquery")
+ if err != nil {
+ return err
+ }
+ defer os.RemoveAll(tmpCfg.Dir)
+
+ if err := ioutil.WriteFile(filepath.Join(tmpCfg.Dir, "go.mod"), gomod.Bytes(), 0777); err != nil {
+ return fmt.Errorf("writing go.mod for module cache query: %v", err)
+ }
+
+ // Run the query, using the import paths calculated from the matches above.
+ resp, err := driver(&tmpCfg, imports...)
+ if err != nil {
+ return fmt.Errorf("querying module cache matches: %v", err)
+ }
+ addResponse(resp)
+ }
+
+ return nil
+}
+
+func getSizes(cfg *Config) (types.Sizes, error) {
+ return packagesdriver.GetSizesGolist(cfg.Context, cfg.BuildFlags, cfg.Env, cfg.Dir, usesExportData(cfg))
+}
+
+// roots selects the appropriate paths to walk based on the passed-in configuration,
+// particularly the environment and the presence of a go.mod in cfg.Dir's parents.
+func roots(cfg *Config) ([]gopathwalk.Root, string, error) {
+ stdout, err := invokeGo(cfg, "env", "GOROOT", "GOPATH", "GOMOD")
+ if err != nil {
+ return nil, "", err
+ }
+
+ fields := strings.Split(stdout.String(), "\n")
+ if len(fields) != 4 || len(fields[3]) != 0 {
+ return nil, "", fmt.Errorf("go env returned unexpected output: %q", stdout.String())
+ }
+ goroot, gopath, gomod := fields[0], filepath.SplitList(fields[1]), fields[2]
+ var modDir string
+ if gomod != "" {
+ modDir = filepath.Dir(gomod)
+ }
+
+ var roots []gopathwalk.Root
+ // Always add GOROOT.
+ roots = append(roots, gopathwalk.Root{filepath.Join(goroot, "/src"), gopathwalk.RootGOROOT})
+ // If modules are enabled, scan the module dir.
+ if modDir != "" {
+ roots = append(roots, gopathwalk.Root{modDir, gopathwalk.RootCurrentModule})
+ }
+ // Add either GOPATH/src or GOPATH/pkg/mod, depending on module mode.
+ for _, p := range gopath {
+ if modDir != "" {
+ roots = append(roots, gopathwalk.Root{filepath.Join(p, "/pkg/mod"), gopathwalk.RootModuleCache})
+ } else {
+ roots = append(roots, gopathwalk.Root{filepath.Join(p, "/src"), gopathwalk.RootGOPATH})
+ }
+ }
+
+ return roots, modDir, nil
+}
+
+// These functions were copied from goimports. See further documentation there.
+
+// pathMatchesQueries is adapted from pkgIsCandidate.
+// TODO: is it reasonable to do Contains here, rather than an exact match on a path component?
+func pathMatchesQueries(path string, queries []string) bool {
+ lastTwo := lastTwoComponents(path)
+ for _, query := range queries {
+ if strings.Contains(lastTwo, query) {
+ return true
+ }
+ if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(query) {
+ lastTwo = lowerASCIIAndRemoveHyphen(lastTwo)
+ if strings.Contains(lastTwo, query) {
+ return true
+ }
+ }
+ }
+ return false
+}
+
+// lastTwoComponents returns at most the last two path components
+// of v, using either / or \ as the path separator.
+func lastTwoComponents(v string) string {
+ nslash := 0
+ for i := len(v) - 1; i >= 0; i-- {
+ if v[i] == '/' || v[i] == '\\' {
+ nslash++
+ if nslash == 2 {
+ return v[i:]
+ }
+ }
+ }
+ return v
+}
+
+func hasHyphenOrUpperASCII(s string) bool {
+ for i := 0; i < len(s); i++ {
+ b := s[i]
+ if b == '-' || ('A' <= b && b <= 'Z') {
+ return true
+ }
+ }
+ return false
+}
+
+func lowerASCIIAndRemoveHyphen(s string) (ret string) {
+ buf := make([]byte, 0, len(s))
+ for i := 0; i < len(s); i++ {
+ b := s[i]
+ switch {
+ case b == '-':
+ continue
+ case 'A' <= b && b <= 'Z':
+ buf = append(buf, b+('a'-'A'))
+ default:
+ buf = append(buf, b)
+ }
+ }
+ return string(buf)
+}
+
+// Fields must match go list;
+// see $GOROOT/src/cmd/go/internal/load/pkg.go.
+type jsonPackage struct {
+ ImportPath string
+ Dir string
+ Name string
+ Export string
+ GoFiles []string
+ CompiledGoFiles []string
+ CFiles []string
+ CgoFiles []string
+ CXXFiles []string
+ MFiles []string
+ HFiles []string
+ FFiles []string
+ SFiles []string
+ SwigFiles []string
+ SwigCXXFiles []string
+ SysoFiles []string
+ Imports []string
+ ImportMap map[string]string
+ Deps []string
+ TestGoFiles []string
+ TestImports []string
+ XTestGoFiles []string
+ XTestImports []string
+ ForTest string // q in a "p [q.test]" package, else ""
+ DepOnly bool
+
+ Error *jsonPackageError
+}
+
+type jsonPackageError struct {
+ ImportStack []string
+ Pos string
+ Err string
+}
+
+func otherFiles(p *jsonPackage) [][]string {
+ return [][]string{p.CFiles, p.CXXFiles, p.MFiles, p.HFiles, p.FFiles, p.SFiles, p.SwigFiles, p.SwigCXXFiles, p.SysoFiles}
+}
+
+// golistDriverCurrent uses the "go list" command to expand the
+// pattern words and return metadata for the specified packages.
+// dir may be "" and env may be nil, as per os/exec.Command.
+func golistDriverCurrent(cfg *Config, words ...string) (*driverResponse, error) {
+ // go list uses the following identifiers in ImportPath and Imports:
+ //
+ // "p" -- importable package or main (command)
+ // "q.test" -- q's test executable
+ // "p [q.test]" -- variant of p as built for q's test executable
+ // "q_test [q.test]" -- q's external test package
+ //
+ // The packages p that are built differently for a test q.test
+ // are q itself, plus any helpers used by the external test q_test,
+ // typically including "testing" and all its dependencies.
+
+ // Run "go list" for complete
+ // information on the specified packages.
+ buf, err := invokeGo(cfg, golistargs(cfg, words)...)
+ if err != nil {
+ return nil, err
+ }
+ seen := make(map[string]*jsonPackage)
+ // Decode the JSON and convert it to Package form.
+ var response driverResponse
+ for dec := json.NewDecoder(buf); dec.More(); {
+ p := new(jsonPackage)
+ if err := dec.Decode(p); err != nil {
+ return nil, fmt.Errorf("JSON decoding failed: %v", err)
+ }
+
+ if p.ImportPath == "" {
+ // The documentation for go list says that “[e]rroneous packages will have
+ // a non-empty ImportPath”. If for some reason it comes back empty, we
+ // prefer to error out rather than silently discarding data or handing
+ // back a package without any way to refer to it.
+ if p.Error != nil {
+ return nil, Error{
+ Pos: p.Error.Pos,
+ Msg: p.Error.Err,
+ }
+ }
+ return nil, fmt.Errorf("package missing import path: %+v", p)
+ }
+
+ if old, found := seen[p.ImportPath]; found {
+ if !reflect.DeepEqual(p, old) {
+ return nil, fmt.Errorf("go list repeated package %v with different values", p.ImportPath)
+ }
+ // skip the duplicate
+ continue
+ }
+ seen[p.ImportPath] = p
+
+ pkg := &Package{
+ Name: p.Name,
+ ID: p.ImportPath,
+ GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles),
+ CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles),
+ OtherFiles: absJoin(p.Dir, otherFiles(p)...),
+ }
+
+ // Workaround for https://golang.org/issue/28749.
+ // TODO(adonovan): delete before go1.12 release.
+ out := pkg.CompiledGoFiles[:0]
+ for _, f := range pkg.CompiledGoFiles {
+ if strings.HasSuffix(f, ".s") {
+ continue
+ }
+ out = append(out, f)
+ }
+ pkg.CompiledGoFiles = out
+
+ // Extract the PkgPath from the package's ID.
+ if i := strings.IndexByte(pkg.ID, ' '); i >= 0 {
+ pkg.PkgPath = pkg.ID[:i]
+ } else {
+ pkg.PkgPath = pkg.ID
+ }
+
+ if pkg.PkgPath == "unsafe" {
+ pkg.GoFiles = nil // ignore fake unsafe.go file
+ }
+
+ // Assume go list emits only absolute paths for Dir.
+ if p.Dir != "" && !filepath.IsAbs(p.Dir) {
+ log.Fatalf("internal error: go list returned non-absolute Package.Dir: %s", p.Dir)
+ }
+
+ if p.Export != "" && !filepath.IsAbs(p.Export) {
+ pkg.ExportFile = filepath.Join(p.Dir, p.Export)
+ } else {
+ pkg.ExportFile = p.Export
+ }
+
+ // imports
+ //
+ // Imports contains the IDs of all imported packages.
+ // ImportsMap records (path, ID) only where they differ.
+ ids := make(map[string]bool)
+ for _, id := range p.Imports {
+ ids[id] = true
+ }
+ pkg.Imports = make(map[string]*Package)
+ for path, id := range p.ImportMap {
+ pkg.Imports[path] = &Package{ID: id} // non-identity import
+ delete(ids, id)
+ }
+ for id := range ids {
+ if id == "C" {
+ continue
+ }
+
+ pkg.Imports[id] = &Package{ID: id} // identity import
+ }
+ if !p.DepOnly {
+ response.Roots = append(response.Roots, pkg.ID)
+ }
+
+ // Work around for pre-go.1.11 versions of go list.
+ // TODO(matloob): they should be handled by the fallback.
+ // Can we delete this?
+ if len(pkg.CompiledGoFiles) == 0 {
+ pkg.CompiledGoFiles = pkg.GoFiles
+ }
+
+ if p.Error != nil {
+ pkg.Errors = append(pkg.Errors, Error{
+ Pos: p.Error.Pos,
+ Msg: p.Error.Err,
+ })
+ }
+
+ response.Packages = append(response.Packages, pkg)
+ }
+
+ return &response, nil
+}
+
+// absJoin absolutizes and flattens the lists of files.
+func absJoin(dir string, fileses ...[]string) (res []string) {
+ for _, files := range fileses {
+ for _, file := range files {
+ if !filepath.IsAbs(file) {
+ file = filepath.Join(dir, file)
+ }
+ res = append(res, file)
+ }
+ }
+ return res
+}
+
+func golistargs(cfg *Config, words []string) []string {
+ fullargs := []string{
+ "list", "-e", "-json", "-compiled",
+ fmt.Sprintf("-test=%t", cfg.Tests),
+ fmt.Sprintf("-export=%t", usesExportData(cfg)),
+ fmt.Sprintf("-deps=%t", cfg.Mode >= LoadImports),
+ // go list doesn't let you pass -test and -find together,
+ // probably because you'd just get the TestMain.
+ fmt.Sprintf("-find=%t", cfg.Mode < LoadImports && !cfg.Tests),
+ }
+ fullargs = append(fullargs, cfg.BuildFlags...)
+ fullargs = append(fullargs, "--")
+ fullargs = append(fullargs, words...)
+ return fullargs
+}
+
+// invokeGo returns the stdout of a go command invocation.
+func invokeGo(cfg *Config, args ...string) (*bytes.Buffer, error) {
+ if debug {
+ defer func(start time.Time) { log.Printf("%s for %v", time.Since(start), cmdDebugStr(cfg, args...)) }(time.Now())
+ }
+ stdout := new(bytes.Buffer)
+ stderr := new(bytes.Buffer)
+ cmd := exec.CommandContext(cfg.Context, "go", args...)
+ // On darwin the cwd gets resolved to the real path, which breaks anything that
+ // expects the working directory to keep the original path, including the
+ // go command when dealing with modules.
+ // The Go stdlib has a special feature where if the cwd and the PWD are the
+ // same node then it trusts the PWD, so by setting it in the env for the child
+ // process we fix up all the paths returned by the go command.
+ cmd.Env = append(append([]string{}, cfg.Env...), "PWD="+cfg.Dir)
+ cmd.Dir = cfg.Dir
+ cmd.Stdout = stdout
+ cmd.Stderr = stderr
+ if err := cmd.Run(); err != nil {
+ exitErr, ok := err.(*exec.ExitError)
+ if !ok {
+ // Catastrophic error:
+ // - executable not found
+ // - context cancellation
+ return nil, fmt.Errorf("couldn't exec 'go %v': %s %T", args, err, err)
+ }
+
+ // Old go version?
+ if strings.Contains(stderr.String(), "flag provided but not defined") {
+ return nil, goTooOldError{fmt.Errorf("unsupported version of go: %s: %s", exitErr, stderr)}
+ }
+
+ // Export mode entails a build.
+ // If that build fails, errors appear on stderr
+ // (despite the -e flag) and the Export field is blank.
+ // Do not fail in that case.
+ // The same is true if an ad-hoc package given to go list doesn't exist.
+ // TODO(matloob): Remove these once we can depend on go list to exit with a zero status with -e even when
+ // packages don't exist or a build fails.
+ if !usesExportData(cfg) && !containsGoFile(args) {
+ return nil, fmt.Errorf("go %v: %s: %s", args, exitErr, stderr)
+ }
+ }
+
+ // As of writing, go list -export prints some non-fatal compilation
+ // errors to stderr, even with -e set. We would prefer that it put
+ // them in the Package.Error JSON (see https://golang.org/issue/26319).
+ // In the meantime, there's nowhere good to put them, but they can
+ // be useful for debugging. Print them if $GOPACKAGESPRINTGOLISTERRORS
+ // is set.
+ if len(stderr.Bytes()) != 0 && os.Getenv("GOPACKAGESPRINTGOLISTERRORS") != "" {
+ fmt.Fprintf(os.Stderr, "%s stderr: <<%s>>\n", cmdDebugStr(cfg, args...), stderr)
+ }
+
+ // debugging
+ if false {
+ fmt.Fprintf(os.Stderr, "%s stdout: <<%s>>\n", cmdDebugStr(cfg, args...), stdout)
+ }
+
+ return stdout, nil
+}
+
+func containsGoFile(s []string) bool {
+ for _, f := range s {
+ if strings.HasSuffix(f, ".go") {
+ return true
+ }
+ }
+ return false
+}
+
+func cmdDebugStr(cfg *Config, args ...string) string {
+ env := make(map[string]string)
+ for _, kv := range cfg.Env {
+ split := strings.Split(kv, "=")
+ k, v := split[0], split[1]
+ env[k] = v
+ }
+
+ return fmt.Sprintf("GOROOT=%v GOPATH=%v GO111MODULE=%v PWD=%v go %v", env["GOROOT"], env["GOPATH"], env["GO111MODULE"], env["PWD"], args)
+}
diff --git a/vendor/golang.org/x/tools/go/packages/golist_fallback.go b/vendor/golang.org/x/tools/go/packages/golist_fallback.go
new file mode 100644
index 000000000..141fa19ac
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/golist_fallback.go
@@ -0,0 +1,450 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packages
+
+import (
+ "encoding/json"
+ "fmt"
+ "go/build"
+ "io/ioutil"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "sort"
+ "strings"
+
+ "golang.org/x/tools/go/internal/cgo"
+)
+
+// TODO(matloob): Delete this file once Go 1.12 is released.
+
+// This file provides backwards compatibility support for
+// loading for versions of Go earlier than 1.11. This support is meant to
+// assist with migration to the Package API until there's
+// widespread adoption of these newer Go versions.
+// This support will be removed once Go 1.12 is released
+// in Q1 2019.
+
+func golistDriverFallback(cfg *Config, words ...string) (*driverResponse, error) {
+ // Turn absolute paths into GOROOT and GOPATH-relative paths to provide to go list.
+ // This will have surprising behavior if GOROOT or GOPATH contain multiple packages with the same
+ // path and a user provides an absolute path to a directory that's shadowed by an earlier
+ // directory in GOROOT or GOPATH with the same package path.
+ words = cleanAbsPaths(cfg, words)
+
+ original, deps, err := getDeps(cfg, words...)
+ if err != nil {
+ return nil, err
+ }
+
+ var tmpdir string // used for generated cgo files
+ var needsTestVariant []struct {
+ pkg, xtestPkg *Package
+ }
+
+ var response driverResponse
+ allPkgs := make(map[string]bool)
+ addPackage := func(p *jsonPackage, isRoot bool) {
+ id := p.ImportPath
+
+ if allPkgs[id] {
+ return
+ }
+ allPkgs[id] = true
+
+ pkgpath := id
+
+ if pkgpath == "unsafe" {
+ p.GoFiles = nil // ignore fake unsafe.go file
+ }
+
+ importMap := func(importlist []string) map[string]*Package {
+ importMap := make(map[string]*Package)
+ for _, id := range importlist {
+
+ if id == "C" {
+ for _, path := range []string{"unsafe", "syscall", "runtime/cgo"} {
+ if pkgpath != path && importMap[path] == nil {
+ importMap[path] = &Package{ID: path}
+ }
+ }
+ continue
+ }
+ importMap[vendorlessPath(id)] = &Package{ID: id}
+ }
+ return importMap
+ }
+ compiledGoFiles := absJoin(p.Dir, p.GoFiles)
+ // Use a function to simplify control flow. It's just a bunch of gotos.
+ var cgoErrors []error
+ var outdir string
+ getOutdir := func() (string, error) {
+ if outdir != "" {
+ return outdir, nil
+ }
+ if tmpdir == "" {
+ if tmpdir, err = ioutil.TempDir("", "gopackages"); err != nil {
+ return "", err
+ }
+ }
+ outdir = filepath.Join(tmpdir, strings.Replace(p.ImportPath, "/", "_", -1))
+ if err := os.MkdirAll(outdir, 0755); err != nil {
+ outdir = ""
+ return "", err
+ }
+ return outdir, nil
+ }
+ processCgo := func() bool {
+ // Suppress any cgo errors. Any relevant errors will show up in typechecking.
+ // TODO(matloob): Skip running cgo if Mode < LoadTypes.
+ outdir, err := getOutdir()
+ if err != nil {
+ cgoErrors = append(cgoErrors, err)
+ return false
+ }
+ files, _, err := runCgo(p.Dir, outdir, cfg.Env)
+ if err != nil {
+ cgoErrors = append(cgoErrors, err)
+ return false
+ }
+ compiledGoFiles = append(compiledGoFiles, files...)
+ return true
+ }
+ if len(p.CgoFiles) == 0 || !processCgo() {
+ compiledGoFiles = append(compiledGoFiles, absJoin(p.Dir, p.CgoFiles)...) // Punt to typechecker.
+ }
+ if isRoot {
+ response.Roots = append(response.Roots, id)
+ }
+ pkg := &Package{
+ ID: id,
+ Name: p.Name,
+ GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles),
+ CompiledGoFiles: compiledGoFiles,
+ OtherFiles: absJoin(p.Dir, otherFiles(p)...),
+ PkgPath: pkgpath,
+ Imports: importMap(p.Imports),
+ // TODO(matloob): set errors on the Package to cgoErrors
+ }
+ if p.Error != nil {
+ pkg.Errors = append(pkg.Errors, Error{
+ Pos: p.Error.Pos,
+ Msg: p.Error.Err,
+ })
+ }
+ response.Packages = append(response.Packages, pkg)
+ if cfg.Tests && isRoot {
+ testID := fmt.Sprintf("%s [%s.test]", id, id)
+ if len(p.TestGoFiles) > 0 || len(p.XTestGoFiles) > 0 {
+ response.Roots = append(response.Roots, testID)
+ testPkg := &Package{
+ ID: testID,
+ Name: p.Name,
+ GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles, p.TestGoFiles),
+ CompiledGoFiles: append(compiledGoFiles, absJoin(p.Dir, p.TestGoFiles)...),
+ OtherFiles: absJoin(p.Dir, otherFiles(p)...),
+ PkgPath: pkgpath,
+ Imports: importMap(append(p.Imports, p.TestImports...)),
+ // TODO(matloob): set errors on the Package to cgoErrors
+ }
+ response.Packages = append(response.Packages, testPkg)
+ var xtestPkg *Package
+ if len(p.XTestGoFiles) > 0 {
+ xtestID := fmt.Sprintf("%s_test [%s.test]", id, id)
+ response.Roots = append(response.Roots, xtestID)
+ // Generate test variants for all packages q where a path exists
+ // such that xtestPkg -> ... -> q -> ... -> p (where p is the package under test)
+ // and rewrite all import map entries of p to point to testPkg (the test variant of
+ // p), and of each q to point to the test variant of that q.
+ xtestPkg = &Package{
+ ID: xtestID,
+ Name: p.Name + "_test",
+ GoFiles: absJoin(p.Dir, p.XTestGoFiles),
+ CompiledGoFiles: absJoin(p.Dir, p.XTestGoFiles),
+ PkgPath: pkgpath + "_test",
+ Imports: importMap(p.XTestImports),
+ }
+ // Add to list of packages we need to rewrite imports for to refer to test variants.
+ // We may need to create a test variant of a package that hasn't been loaded yet, so
+ // the test variants need to be created later.
+ needsTestVariant = append(needsTestVariant, struct{ pkg, xtestPkg *Package }{pkg, xtestPkg})
+ response.Packages = append(response.Packages, xtestPkg)
+ }
+ // testmain package
+ testmainID := id + ".test"
+ response.Roots = append(response.Roots, testmainID)
+ imports := map[string]*Package{}
+ imports[testPkg.PkgPath] = &Package{ID: testPkg.ID}
+ if xtestPkg != nil {
+ imports[xtestPkg.PkgPath] = &Package{ID: xtestPkg.ID}
+ }
+ testmainPkg := &Package{
+ ID: testmainID,
+ Name: "main",
+ PkgPath: testmainID,
+ Imports: imports,
+ }
+ response.Packages = append(response.Packages, testmainPkg)
+ outdir, err := getOutdir()
+ if err != nil {
+ testmainPkg.Errors = append(testmainPkg.Errors, Error{
+ Pos: "-",
+ Msg: fmt.Sprintf("failed to generate testmain: %v", err),
+ Kind: ListError,
+ })
+ return
+ }
+ // Don't use a .go extension on the file, so that the tests think the file is inside GOCACHE.
+ // This allows the same test to test the pre- and post-Go 1.11 go list logic because the Go 1.11
+ // go list generates test mains in the cache, and the test code knows not to rely on paths in the
+ // cache to stay stable.
+ testmain := filepath.Join(outdir, "testmain-go")
+ extraimports, extradeps, err := generateTestmain(testmain, testPkg, xtestPkg)
+ if err != nil {
+ testmainPkg.Errors = append(testmainPkg.Errors, Error{
+ Pos: "-",
+ Msg: fmt.Sprintf("failed to generate testmain: %v", err),
+ Kind: ListError,
+ })
+ }
+ deps = append(deps, extradeps...)
+ for _, imp := range extraimports { // testing, testing/internal/testdeps, and maybe os
+ imports[imp] = &Package{ID: imp}
+ }
+ testmainPkg.GoFiles = []string{testmain}
+ testmainPkg.CompiledGoFiles = []string{testmain}
+ }
+ }
+ }
+
+ for _, pkg := range original {
+ addPackage(pkg, true)
+ }
+ if cfg.Mode < LoadImports || len(deps) == 0 {
+ return &response, nil
+ }
+
+ buf, err := invokeGo(cfg, golistArgsFallback(cfg, deps)...)
+ if err != nil {
+ return nil, err
+ }
+
+ // Decode the JSON and convert it to Package form.
+ for dec := json.NewDecoder(buf); dec.More(); {
+ p := new(jsonPackage)
+ if err := dec.Decode(p); err != nil {
+ return nil, fmt.Errorf("JSON decoding failed: %v", err)
+ }
+
+ addPackage(p, false)
+ }
+
+ for _, v := range needsTestVariant {
+ createTestVariants(&response, v.pkg, v.xtestPkg)
+ }
+
+ return &response, nil
+}
+
+func createTestVariants(response *driverResponse, pkgUnderTest, xtestPkg *Package) {
+ allPkgs := make(map[string]*Package)
+ for _, pkg := range response.Packages {
+ allPkgs[pkg.ID] = pkg
+ }
+ needsTestVariant := make(map[string]bool)
+ needsTestVariant[pkgUnderTest.ID] = true
+ var needsVariantRec func(p *Package) bool
+ needsVariantRec = func(p *Package) bool {
+ if needsTestVariant[p.ID] {
+ return true
+ }
+ for _, imp := range p.Imports {
+ if needsVariantRec(allPkgs[imp.ID]) {
+ // Don't break because we want to make sure all dependencies
+ // have been processed, and all required test variants of our dependencies
+ // exist.
+ needsTestVariant[p.ID] = true
+ }
+ }
+ if !needsTestVariant[p.ID] {
+ return false
+ }
+ // Create a clone of the package. It will share the same strings and lists of source files,
+ // but that's okay. It's only necessary for the Imports map to have a separate identity.
+ testVariant := *p
+ testVariant.ID = fmt.Sprintf("%s [%s.test]", p.ID, pkgUnderTest.ID)
+ testVariant.Imports = make(map[string]*Package)
+ for imp, pkg := range p.Imports {
+ testVariant.Imports[imp] = pkg
+ if needsTestVariant[pkg.ID] {
+ testVariant.Imports[imp] = &Package{ID: fmt.Sprintf("%s [%s.test]", pkg.ID, pkgUnderTest.ID)}
+ }
+ }
+ response.Packages = append(response.Packages, &testVariant)
+ return needsTestVariant[p.ID]
+ }
+ // finally, update the xtest package's imports
+ for imp, pkg := range xtestPkg.Imports {
+ if allPkgs[pkg.ID] == nil {
+ fmt.Printf("for %s: package %s doesn't exist\n", xtestPkg.ID, pkg.ID)
+ }
+ if needsVariantRec(allPkgs[pkg.ID]) {
+ xtestPkg.Imports[imp] = &Package{ID: fmt.Sprintf("%s [%s.test]", pkg.ID, pkgUnderTest.ID)}
+ }
+ }
+}
+
+// cleanAbsPaths replaces all absolute paths with GOPATH- and GOROOT-relative
+// paths. If an absolute path is not GOPATH- or GOROOT- relative, it is left as an
+// absolute path so an error can be returned later.
+func cleanAbsPaths(cfg *Config, words []string) []string {
+ var searchpaths []string
+ var cleaned = make([]string, len(words))
+ for i := range cleaned {
+ cleaned[i] = words[i]
+ // Ignore relative directory paths (they must already be goroot-relative) and Go source files
+ // (absolute source files are already allowed for ad-hoc packages).
+ // TODO(matloob): Can there be non-.go files in ad-hoc packages.
+ if !filepath.IsAbs(cleaned[i]) || strings.HasSuffix(cleaned[i], ".go") {
+ continue
+ }
+ // otherwise, it's an absolute path. Search GOPATH and GOROOT to find it.
+ if searchpaths == nil {
+ cmd := exec.Command("go", "env", "GOPATH", "GOROOT")
+ cmd.Env = cfg.Env
+ out, err := cmd.Output()
+ if err != nil {
+ searchpaths = []string{}
+ continue // suppress the error, it will show up again when running go list
+ }
+ lines := strings.Split(string(out), "\n")
+ if len(lines) != 3 || lines[0] == "" || lines[1] == "" || lines[2] != "" {
+ continue // suppress error
+ }
+ // first line is GOPATH
+ for _, path := range filepath.SplitList(lines[0]) {
+ searchpaths = append(searchpaths, filepath.Join(path, "src"))
+ }
+ // second line is GOROOT
+ searchpaths = append(searchpaths, filepath.Join(lines[1], "src"))
+ }
+ for _, sp := range searchpaths {
+ if strings.HasPrefix(cleaned[i], sp) {
+ cleaned[i] = strings.TrimPrefix(cleaned[i], sp)
+ cleaned[i] = strings.TrimLeft(cleaned[i], string(filepath.Separator))
+ }
+ }
+ }
+ return cleaned
+}
+
+// vendorlessPath returns the devendorized version of the import path ipath.
+// For example, VendorlessPath("foo/bar/vendor/a/b") returns "a/b".
+// Copied from golang.org/x/tools/imports/fix.go.
+func vendorlessPath(ipath string) string {
+ // Devendorize for use in import statement.
+ if i := strings.LastIndex(ipath, "/vendor/"); i >= 0 {
+ return ipath[i+len("/vendor/"):]
+ }
+ if strings.HasPrefix(ipath, "vendor/") {
+ return ipath[len("vendor/"):]
+ }
+ return ipath
+}
+
+// getDeps runs an initial go list to determine all the dependency packages.
+func getDeps(cfg *Config, words ...string) (initial []*jsonPackage, deps []string, err error) {
+ buf, err := invokeGo(cfg, golistArgsFallback(cfg, words)...)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ depsSet := make(map[string]bool)
+ var testImports []string
+
+ // Extract deps from the JSON.
+ for dec := json.NewDecoder(buf); dec.More(); {
+ p := new(jsonPackage)
+ if err := dec.Decode(p); err != nil {
+ return nil, nil, fmt.Errorf("JSON decoding failed: %v", err)
+ }
+
+ initial = append(initial, p)
+ for _, dep := range p.Deps {
+ depsSet[dep] = true
+ }
+ if cfg.Tests {
+ // collect the additional imports of the test packages.
+ pkgTestImports := append(p.TestImports, p.XTestImports...)
+ for _, imp := range pkgTestImports {
+ if depsSet[imp] {
+ continue
+ }
+ depsSet[imp] = true
+ testImports = append(testImports, imp)
+ }
+ }
+ }
+ // Get the deps of the packages imported by tests.
+ if len(testImports) > 0 {
+ buf, err = invokeGo(cfg, golistArgsFallback(cfg, testImports)...)
+ if err != nil {
+ return nil, nil, err
+ }
+ // Extract deps from the JSON.
+ for dec := json.NewDecoder(buf); dec.More(); {
+ p := new(jsonPackage)
+ if err := dec.Decode(p); err != nil {
+ return nil, nil, fmt.Errorf("JSON decoding failed: %v", err)
+ }
+ for _, dep := range p.Deps {
+ depsSet[dep] = true
+ }
+ }
+ }
+
+ for _, orig := range initial {
+ delete(depsSet, orig.ImportPath)
+ }
+
+ deps = make([]string, 0, len(depsSet))
+ for dep := range depsSet {
+ deps = append(deps, dep)
+ }
+ sort.Strings(deps) // ensure output is deterministic
+ return initial, deps, nil
+}
+
+func golistArgsFallback(cfg *Config, words []string) []string {
+ fullargs := []string{"list", "-e", "-json"}
+ fullargs = append(fullargs, cfg.BuildFlags...)
+ fullargs = append(fullargs, "--")
+ fullargs = append(fullargs, words...)
+ return fullargs
+}
+
+func runCgo(pkgdir, tmpdir string, env []string) (files, displayfiles []string, err error) {
+ // Use go/build to open cgo files and determine the cgo flags, etc, from them.
+ // This is tricky so it's best to avoid reimplementing as much as we can, and
+ // we plan to delete this support once Go 1.12 is released anyways.
+ // TODO(matloob): This isn't completely correct because we're using the Default
+ // context. Perhaps we should more accurately fill in the context.
+ bp, err := build.ImportDir(pkgdir, build.ImportMode(0))
+ if err != nil {
+ return nil, nil, err
+ }
+ for _, ev := range env {
+ if v := strings.TrimPrefix(ev, "CGO_CPPFLAGS"); v != ev {
+ bp.CgoCPPFLAGS = append(bp.CgoCPPFLAGS, strings.Fields(v)...)
+ } else if v := strings.TrimPrefix(ev, "CGO_CFLAGS"); v != ev {
+ bp.CgoCFLAGS = append(bp.CgoCFLAGS, strings.Fields(v)...)
+ } else if v := strings.TrimPrefix(ev, "CGO_CXXFLAGS"); v != ev {
+ bp.CgoCXXFLAGS = append(bp.CgoCXXFLAGS, strings.Fields(v)...)
+ } else if v := strings.TrimPrefix(ev, "CGO_LDFLAGS"); v != ev {
+ bp.CgoLDFLAGS = append(bp.CgoLDFLAGS, strings.Fields(v)...)
+ }
+ }
+ return cgo.Run(bp, pkgdir, tmpdir, true)
+}
diff --git a/vendor/golang.org/x/tools/go/packages/golist_fallback_testmain.go b/vendor/golang.org/x/tools/go/packages/golist_fallback_testmain.go
new file mode 100644
index 000000000..128e00e25
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/golist_fallback_testmain.go
@@ -0,0 +1,318 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file is largely based on the Go 1.10-era cmd/go/internal/test/test.go
+// testmain generation code.
+
+package packages
+
+import (
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/doc"
+ "go/parser"
+ "go/token"
+ "os"
+ "sort"
+ "strings"
+ "text/template"
+ "unicode"
+ "unicode/utf8"
+)
+
+// TODO(matloob): Delete this file once Go 1.12 is released.
+
+// This file complements golist_fallback.go by providing
+// support for generating testmains.
+
+func generateTestmain(out string, testPkg, xtestPkg *Package) (extraimports, extradeps []string, err error) {
+ testFuncs, err := loadTestFuncs(testPkg, xtestPkg)
+ if err != nil {
+ return nil, nil, err
+ }
+ extraimports = []string{"testing", "testing/internal/testdeps"}
+ if testFuncs.TestMain == nil {
+ extraimports = append(extraimports, "os")
+ }
+ // Transitive dependencies of ("testing", "testing/internal/testdeps").
+ // os is part of the transitive closure so it and its transitive dependencies are
+ // included regardless of whether it's imported in the template below.
+ extradeps = []string{
+ "errors",
+ "internal/cpu",
+ "unsafe",
+ "internal/bytealg",
+ "internal/race",
+ "runtime/internal/atomic",
+ "runtime/internal/sys",
+ "runtime",
+ "sync/atomic",
+ "sync",
+ "io",
+ "unicode",
+ "unicode/utf8",
+ "bytes",
+ "math",
+ "syscall",
+ "time",
+ "internal/poll",
+ "internal/syscall/unix",
+ "internal/testlog",
+ "os",
+ "math/bits",
+ "strconv",
+ "reflect",
+ "fmt",
+ "sort",
+ "strings",
+ "flag",
+ "runtime/debug",
+ "context",
+ "runtime/trace",
+ "testing",
+ "bufio",
+ "regexp/syntax",
+ "regexp",
+ "compress/flate",
+ "encoding/binary",
+ "hash",
+ "hash/crc32",
+ "compress/gzip",
+ "path/filepath",
+ "io/ioutil",
+ "text/tabwriter",
+ "runtime/pprof",
+ "testing/internal/testdeps",
+ }
+ return extraimports, extradeps, writeTestmain(out, testFuncs)
+}
+
+// The following is adapted from the cmd/go testmain generation code.
+
+// isTestFunc tells whether fn has the type of a testing function. arg
+// specifies the parameter type we look for: B, M or T.
+func isTestFunc(fn *ast.FuncDecl, arg string) bool {
+ if fn.Type.Results != nil && len(fn.Type.Results.List) > 0 ||
+ fn.Type.Params.List == nil ||
+ len(fn.Type.Params.List) != 1 ||
+ len(fn.Type.Params.List[0].Names) > 1 {
+ return false
+ }
+ ptr, ok := fn.Type.Params.List[0].Type.(*ast.StarExpr)
+ if !ok {
+ return false
+ }
+ // We can't easily check that the type is *testing.M
+ // because we don't know how testing has been imported,
+ // but at least check that it's *M or *something.M.
+ // Same applies for B and T.
+ if name, ok := ptr.X.(*ast.Ident); ok && name.Name == arg {
+ return true
+ }
+ if sel, ok := ptr.X.(*ast.SelectorExpr); ok && sel.Sel.Name == arg {
+ return true
+ }
+ return false
+}
+
+// isTest tells whether name looks like a test (or benchmark, according to prefix).
+// It is a Test (say) if there is a character after Test that is not a lower-case letter.
+// We don't want TesticularCancer.
+func isTest(name, prefix string) bool {
+ if !strings.HasPrefix(name, prefix) {
+ return false
+ }
+ if len(name) == len(prefix) { // "Test" is ok
+ return true
+ }
+ rune, _ := utf8.DecodeRuneInString(name[len(prefix):])
+ return !unicode.IsLower(rune)
+}
+
+// loadTestFuncs returns the testFuncs describing the tests that will be run.
+func loadTestFuncs(ptest, pxtest *Package) (*testFuncs, error) {
+ t := &testFuncs{
+ TestPackage: ptest,
+ XTestPackage: pxtest,
+ }
+ for _, file := range ptest.GoFiles {
+ if !strings.HasSuffix(file, "_test.go") {
+ continue
+ }
+ if err := t.load(file, "_test", &t.ImportTest, &t.NeedTest); err != nil {
+ return nil, err
+ }
+ }
+ if pxtest != nil {
+ for _, file := range pxtest.GoFiles {
+ if err := t.load(file, "_xtest", &t.ImportXtest, &t.NeedXtest); err != nil {
+ return nil, err
+ }
+ }
+ }
+ return t, nil
+}
+
+// writeTestmain writes the _testmain.go file for t to the file named out.
+func writeTestmain(out string, t *testFuncs) error {
+ f, err := os.Create(out)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ if err := testmainTmpl.Execute(f, t); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+type testFuncs struct {
+ Tests []testFunc
+ Benchmarks []testFunc
+ Examples []testFunc
+ TestMain *testFunc
+ TestPackage *Package
+ XTestPackage *Package
+ ImportTest bool
+ NeedTest bool
+ ImportXtest bool
+ NeedXtest bool
+}
+
+// Tested returns the name of the package being tested.
+func (t *testFuncs) Tested() string {
+ return t.TestPackage.Name
+}
+
+type testFunc struct {
+ Package string // imported package name (_test or _xtest)
+ Name string // function name
+ Output string // output, for examples
+ Unordered bool // output is allowed to be unordered.
+}
+
+func (t *testFuncs) load(filename, pkg string, doImport, seen *bool) error {
+ var fset = token.NewFileSet()
+
+ f, err := parser.ParseFile(fset, filename, nil, parser.ParseComments)
+ if err != nil {
+ return errors.New("failed to parse test file " + filename)
+ }
+ for _, d := range f.Decls {
+ n, ok := d.(*ast.FuncDecl)
+ if !ok {
+ continue
+ }
+ if n.Recv != nil {
+ continue
+ }
+ name := n.Name.String()
+ switch {
+ case name == "TestMain":
+ if isTestFunc(n, "T") {
+ t.Tests = append(t.Tests, testFunc{pkg, name, "", false})
+ *doImport, *seen = true, true
+ continue
+ }
+ err := checkTestFunc(fset, n, "M")
+ if err != nil {
+ return err
+ }
+ if t.TestMain != nil {
+ return errors.New("multiple definitions of TestMain")
+ }
+ t.TestMain = &testFunc{pkg, name, "", false}
+ *doImport, *seen = true, true
+ case isTest(name, "Test"):
+ err := checkTestFunc(fset, n, "T")
+ if err != nil {
+ return err
+ }
+ t.Tests = append(t.Tests, testFunc{pkg, name, "", false})
+ *doImport, *seen = true, true
+ case isTest(name, "Benchmark"):
+ err := checkTestFunc(fset, n, "B")
+ if err != nil {
+ return err
+ }
+ t.Benchmarks = append(t.Benchmarks, testFunc{pkg, name, "", false})
+ *doImport, *seen = true, true
+ }
+ }
+ ex := doc.Examples(f)
+ sort.Slice(ex, func(i, j int) bool { return ex[i].Order < ex[j].Order })
+ for _, e := range ex {
+ *doImport = true // import test file whether executed or not
+ if e.Output == "" && !e.EmptyOutput {
+ // Don't run examples with no output.
+ continue
+ }
+ t.Examples = append(t.Examples, testFunc{pkg, "Example" + e.Name, e.Output, e.Unordered})
+ *seen = true
+ }
+ return nil
+}
+
+func checkTestFunc(fset *token.FileSet, fn *ast.FuncDecl, arg string) error {
+ if !isTestFunc(fn, arg) {
+ name := fn.Name.String()
+ pos := fset.Position(fn.Pos())
+ return fmt.Errorf("%s: wrong signature for %s, must be: func %s(%s *testing.%s)", pos, name, name, strings.ToLower(arg), arg)
+ }
+ return nil
+}
+
+var testmainTmpl = template.Must(template.New("main").Parse(`
+package main
+
+import (
+{{if not .TestMain}}
+ "os"
+{{end}}
+ "testing"
+ "testing/internal/testdeps"
+
+{{if .ImportTest}}
+ {{if .NeedTest}}_test{{else}}_{{end}} {{.TestPackage.PkgPath | printf "%q"}}
+{{end}}
+{{if .ImportXtest}}
+ {{if .NeedXtest}}_xtest{{else}}_{{end}} {{.XTestPackage.PkgPath | printf "%q"}}
+{{end}}
+)
+
+var tests = []testing.InternalTest{
+{{range .Tests}}
+ {"{{.Name}}", {{.Package}}.{{.Name}}},
+{{end}}
+}
+
+var benchmarks = []testing.InternalBenchmark{
+{{range .Benchmarks}}
+ {"{{.Name}}", {{.Package}}.{{.Name}}},
+{{end}}
+}
+
+var examples = []testing.InternalExample{
+{{range .Examples}}
+ {"{{.Name}}", {{.Package}}.{{.Name}}, {{.Output | printf "%q"}}, {{.Unordered}}},
+{{end}}
+}
+
+func init() {
+ testdeps.ImportPath = {{.TestPackage.PkgPath | printf "%q"}}
+}
+
+func main() {
+ m := testing.MainStart(testdeps.TestDeps{}, tests, benchmarks, examples)
+{{with .TestMain}}
+ {{.Package}}.{{.Name}}(m)
+{{else}}
+ os.Exit(m.Run())
+{{end}}
+}
+
+`))
diff --git a/vendor/golang.org/x/tools/go/packages/golist_overlay.go b/vendor/golang.org/x/tools/go/packages/golist_overlay.go
new file mode 100644
index 000000000..71ffcd9d5
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/golist_overlay.go
@@ -0,0 +1,104 @@
+package packages
+
+import (
+ "go/parser"
+ "go/token"
+ "path/filepath"
+ "strconv"
+ "strings"
+)
+
+// processGolistOverlay provides rudimentary support for adding
+// files that don't exist on disk to an overlay. The results can be
+// sometimes incorrect.
+// TODO(matloob): Handle unsupported cases, including the following:
+// - test files
+// - adding test and non-test files to test variants of packages
+// - determining the correct package to add given a new import path
+// - creating packages that don't exist
+func processGolistOverlay(cfg *Config, response *driverResponse) (modifiedPkgs, needPkgs []string, err error) {
+ havePkgs := make(map[string]string) // importPath -> non-test package ID
+ needPkgsSet := make(map[string]bool)
+ modifiedPkgsSet := make(map[string]bool)
+
+ for _, pkg := range response.Packages {
+ // This is an approximation of import path to id. This can be
+ // wrong for tests, vendored packages, and a number of other cases.
+ havePkgs[pkg.PkgPath] = pkg.ID
+ }
+
+outer:
+ for path, contents := range cfg.Overlay {
+ base := filepath.Base(path)
+ if strings.HasSuffix(path, "_test.go") {
+ // Overlays don't support adding new test files yet.
+ // TODO(matloob): support adding new test files.
+ continue
+ }
+ dir := filepath.Dir(path)
+ for _, pkg := range response.Packages {
+ var dirContains, fileExists bool
+ for _, f := range pkg.GoFiles {
+ if sameFile(filepath.Dir(f), dir) {
+ dirContains = true
+ }
+ if filepath.Base(f) == base {
+ fileExists = true
+ }
+ }
+ if dirContains {
+ if !fileExists {
+ pkg.GoFiles = append(pkg.GoFiles, path) // TODO(matloob): should the file just be added to GoFiles?
+ pkg.CompiledGoFiles = append(pkg.CompiledGoFiles, path)
+ modifiedPkgsSet[pkg.ID] = true
+ }
+ imports, err := extractImports(path, contents)
+ if err != nil {
+ // Let the parser or type checker report errors later.
+ continue outer
+ }
+ for _, imp := range imports {
+ _, found := pkg.Imports[imp]
+ if !found {
+ needPkgsSet[imp] = true
+ // TODO(matloob): Handle cases when the following block isn't correct.
+ // These include imports of test variants, imports of vendored packages, etc.
+ id, ok := havePkgs[imp]
+ if !ok {
+ id = imp
+ }
+ pkg.Imports[imp] = &Package{ID: id}
+ }
+ }
+ continue outer
+ }
+ }
+ }
+
+ needPkgs = make([]string, 0, len(needPkgsSet))
+ for pkg := range needPkgsSet {
+ needPkgs = append(needPkgs, pkg)
+ }
+ modifiedPkgs = make([]string, 0, len(modifiedPkgsSet))
+ for pkg := range modifiedPkgsSet {
+ modifiedPkgs = append(modifiedPkgs, pkg)
+ }
+ return modifiedPkgs, needPkgs, err
+}
+
+func extractImports(filename string, contents []byte) ([]string, error) {
+ f, err := parser.ParseFile(token.NewFileSet(), filename, contents, parser.ImportsOnly) // TODO(matloob): reuse fileset?
+ if err != nil {
+ return nil, err
+ }
+ var res []string
+ for _, imp := range f.Imports {
+ quotedPath := imp.Path.Value
+ path, err := strconv.Unquote(quotedPath)
+ if err != nil {
+ return nil, err
+ }
+ res = append(res, path)
+ }
+ return res, nil
+}
diff --git a/vendor/golang.org/x/tools/go/packages/packages.go b/vendor/golang.org/x/tools/go/packages/packages.go
new file mode 100644
index 000000000..e69ddfcb4
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/packages.go
@@ -0,0 +1,945 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packages
+
+// See doc.go for package documentation and implementation notes.
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/scanner"
+ "go/token"
+ "go/types"
+ "io/ioutil"
+ "log"
+ "os"
+ "path/filepath"
+ "sync"
+
+ "golang.org/x/tools/go/gcexportdata"
+)
+
+// A LoadMode specifies the amount of detail to return when loading.
+// Higher-numbered modes cause Load to return more information,
+// but may be slower. Load may return more information than requested.
+type LoadMode int
+
+const (
+ // LoadFiles finds the packages and computes their source file lists.
+ // Package fields: ID, Name, Errors, GoFiles, and OtherFiles.
+ LoadFiles LoadMode = iota
+
+ // LoadImports adds import information for each package
+ // and its dependencies.
+ // Package fields added: Imports.
+ LoadImports
+
+ // LoadTypes adds type information for package-level
+ // declarations in the packages matching the patterns.
+ // Package fields added: Types, Fset, and IllTyped.
+ // This mode uses type information provided by the build system when
+ // possible, and may fill in the ExportFile field.
+ LoadTypes
+
+ // LoadSyntax adds typed syntax trees for the packages matching the patterns.
+ // Package fields added: Syntax, and TypesInfo, for direct pattern matches only.
+ LoadSyntax
+
+ // LoadAllSyntax adds typed syntax trees for the packages matching the patterns
+ // and all dependencies.
+ // Package fields added: Types, Fset, IllTyped, Syntax, and TypesInfo,
+ // for all packages in the import graph.
+ LoadAllSyntax
+)
+
+// A Config specifies details about how packages should be loaded.
+// The zero value is a valid configuration.
+// Calls to Load do not modify this struct.
+type Config struct {
+ // Mode controls the level of information returned for each package.
+ Mode LoadMode
+
+ // Context specifies the context for the load operation.
+ // If the context is cancelled, the loader may stop early
+ // and return an ErrCancelled error.
+ // If Context is nil, the load cannot be cancelled.
+ Context context.Context
+
+ // Dir is the directory in which to run the build system's query tool
+ // that provides information about the packages.
+ // If Dir is empty, the tool is run in the current directory.
+ Dir string
+
+ // Env is the environment to use when invoking the build system's query tool.
+ // If Env is nil, the current environment is used.
+ // As in os/exec's Cmd, only the last value in the slice for
+ // each environment key is used. To specify the setting of only
+ // a few variables, append to the current environment, as in:
+ //
+ // opt.Env = append(os.Environ(), "GOOS=plan9", "GOARCH=386")
+ //
+ Env []string
+
+ // BuildFlags is a list of command-line flags to be passed through to
+ // the build system's query tool.
+ BuildFlags []string
+
+ // Fset provides source position information for syntax trees and types.
+ // If Fset is nil, the loader will create a new FileSet.
+ Fset *token.FileSet
+
+ // ParseFile is called to read and parse each file
+ // when preparing a package's type-checked syntax tree.
+ // It must be safe to call ParseFile simultaneously from multiple goroutines.
+ // If ParseFile is nil, the loader will uses parser.ParseFile.
+ //
+ // ParseFile should parse the source from src and use filename only for
+ // recording position information.
+ //
+ // An application may supply a custom implementation of ParseFile
+ // to change the effective file contents or the behavior of the parser,
+ // or to modify the syntax tree. For example, selectively eliminating
+ // unwanted function bodies can significantly accelerate type checking.
+ ParseFile func(fset *token.FileSet, filename string, src []byte) (*ast.File, error)
+
+ // If Tests is set, the loader includes not just the packages
+ // matching a particular pattern but also any related test packages,
+ // including test-only variants of the package and the test executable.
+ //
+ // For example, when using the go command, loading "fmt" with Tests=true
+ // returns four packages, with IDs "fmt" (the standard package),
+ // "fmt [fmt.test]" (the package as compiled for the test),
+ // "fmt_test" (the test functions from source files in package fmt_test),
+ // and "fmt.test" (the test binary).
+ //
+ // In build systems with explicit names for tests,
+ // setting Tests may have no effect.
+ Tests bool
+
+ // Overlay provides a mapping of absolute file paths to file contents.
+ // If the file with the given path already exists, the parser will use the
+ // alternative file contents provided by the map.
+ //
+ // Overlays provide incomplete support for when a given file doesn't
+ // already exist on disk. See the package doc above for more details.
+ Overlay map[string][]byte
+}
+
+// driver is the type for functions that query the build system for the
+// packages named by the patterns.
+type driver func(cfg *Config, patterns ...string) (*driverResponse, error)
+
+// driverResponse contains the results for a driver query.
+type driverResponse struct {
+ // Sizes, if not nil, is the types.Sizes to use when type checking.
+ Sizes *types.StdSizes
+
+ // Roots is the set of package IDs that make up the root packages.
+ // We have to encode this separately because when we encode a single package
+ // we cannot know if it is one of the roots as that requires knowledge of the
+ // graph it is part of.
+ Roots []string `json:",omitempty"`
+
+ // Packages is the full set of packages in the graph.
+ // The packages are not connected into a graph.
+ // The Imports if populated will be stubs that only have their ID set.
+ // Imports will be connected and then type and syntax information added in a
+ // later pass (see refine).
+ Packages []*Package
+}
+
+// Load loads and returns the Go packages named by the given patterns.
+//
+// Config specifies loading options;
+// nil behaves the same as an empty Config.
+//
+// Load returns an error if any of the patterns was invalid
+// as defined by the underlying build system.
+// It may return an empty list of packages without an error,
+// for instance for an empty expansion of a valid wildcard.
+// Errors associated with a particular package are recorded in the
+// corresponding Package's Errors list, and do not cause Load to
+// return an error. Clients may need to handle such errors before
+// proceeding with further analysis. The PrintErrors function is
+// provided for convenient display of all errors.
+func Load(cfg *Config, patterns ...string) ([]*Package, error) {
+ l := newLoader(cfg)
+ response, err := defaultDriver(&l.Config, patterns...)
+ if err != nil {
+ return nil, err
+ }
+ l.sizes = response.Sizes
+ return l.refine(response.Roots, response.Packages...)
+}
+
+// defaultDriver is a driver that looks for an external driver binary, and if
+// it does not find it falls back to the built in go list driver.
+func defaultDriver(cfg *Config, patterns ...string) (*driverResponse, error) {
+ driver := findExternalDriver(cfg)
+ if driver == nil {
+ driver = goListDriver
+ }
+ return driver(cfg, patterns...)
+}
+
+// A Package describes a loaded Go package.
+type Package struct {
+ // ID is a unique identifier for a package,
+ // in a syntax provided by the underlying build system.
+ //
+ // Because the syntax varies based on the build system,
+ // clients should treat IDs as opaque and not attempt to
+ // interpret them.
+ ID string
+
+ // Name is the package name as it appears in the package source code.
+ Name string
+
+ // PkgPath is the package path as used by the go/types package.
+ PkgPath string
+
+ // Errors contains any errors encountered querying the metadata
+ // of the package, or while parsing or type-checking its files.
+ Errors []Error
+
+ // GoFiles lists the absolute file paths of the package's Go source files.
+ GoFiles []string
+
+ // CompiledGoFiles lists the absolute file paths of the package's source
+ // files that were presented to the compiler.
+ // This may differ from GoFiles if files are processed before compilation.
+ CompiledGoFiles []string
+
+ // OtherFiles lists the absolute file paths of the package's non-Go source files,
+ // including assembly, C, C++, Fortran, Objective-C, SWIG, and so on.
+ OtherFiles []string
+
+ // ExportFile is the absolute path to a file containing type
+ // information for the package as provided by the build system.
+ ExportFile string
+
+ // Imports maps import paths appearing in the package's Go source files
+ // to corresponding loaded Packages.
+ Imports map[string]*Package
+
+ // Types provides type information for the package.
+ // Modes LoadTypes and above set this field for packages matching the
+ // patterns; type information for dependencies may be missing or incomplete.
+ // Mode LoadAllSyntax sets this field for all packages, including dependencies.
+ Types *types.Package
+
+ // Fset provides position information for Types, TypesInfo, and Syntax.
+ // It is set only when Types is set.
+ Fset *token.FileSet
+
+ // IllTyped indicates whether the package or any dependency contains errors.
+ // It is set only when Types is set.
+ IllTyped bool
+
+ // Syntax is the package's syntax trees, for the files listed in CompiledGoFiles.
+ //
+ // Mode LoadSyntax sets this field for packages matching the patterns.
+ // Mode LoadAllSyntax sets this field for all packages, including dependencies.
+ Syntax []*ast.File
+
+ // TypesInfo provides type information about the package's syntax trees.
+ // It is set only when Syntax is set.
+ TypesInfo *types.Info
+}
+
+// An Error describes a problem with a package's metadata, syntax, or types.
+type Error struct {
+ Pos string // "file:line:col" or "file:line" or "" or "-"
+ Msg string
+ Kind ErrorKind
+}
+
+// ErrorKind describes the source of the error, allowing the user to
+// differentiate between errors generated by the driver, the parser, or the
+// type-checker.
+type ErrorKind int
+
+const (
+ UnknownError ErrorKind = iota
+ ListError
+ ParseError
+ TypeError
+)
+
+func (err Error) Error() string {
+ pos := err.Pos
+ if pos == "" {
+ pos = "-" // like token.Position{}.String()
+ }
+ return pos + ": " + err.Msg
+}
+
+// flatPackage is the JSON form of Package
+// It drops all the type and syntax fields, and transforms the Imports
+//
+// TODO(adonovan): identify this struct with Package, effectively
+// publishing the JSON protocol.
+type flatPackage struct {
+ ID string
+ Name string `json:",omitempty"`
+ PkgPath string `json:",omitempty"`
+ Errors []Error `json:",omitempty"`
+ GoFiles []string `json:",omitempty"`
+ CompiledGoFiles []string `json:",omitempty"`
+ OtherFiles []string `json:",omitempty"`
+ ExportFile string `json:",omitempty"`
+ Imports map[string]string `json:",omitempty"`
+}
+
+// MarshalJSON returns the Package in its JSON form.
+// For the most part, the structure fields are written out unmodified, and
+// the type and syntax fields are skipped.
+// The imports are written out as just a map of path to package id.
+// The errors are written using a custom type that tries to preserve the
+// structure of error types we know about.
+//
+// This method exists to enable support for additional build systems. It is
+// not intended for use by clients of the API and we may change the format.
+func (p *Package) MarshalJSON() ([]byte, error) {
+ flat := &flatPackage{
+ ID: p.ID,
+ Name: p.Name,
+ PkgPath: p.PkgPath,
+ Errors: p.Errors,
+ GoFiles: p.GoFiles,
+ CompiledGoFiles: p.CompiledGoFiles,
+ OtherFiles: p.OtherFiles,
+ ExportFile: p.ExportFile,
+ }
+ if len(p.Imports) > 0 {
+ flat.Imports = make(map[string]string, len(p.Imports))
+ for path, ipkg := range p.Imports {
+ flat.Imports[path] = ipkg.ID
+ }
+ }
+ return json.Marshal(flat)
+}
+
+// UnmarshalJSON reads in a Package from its JSON format.
+// See MarshalJSON for details about the format accepted.
+func (p *Package) UnmarshalJSON(b []byte) error {
+ flat := &flatPackage{}
+ if err := json.Unmarshal(b, &flat); err != nil {
+ return err
+ }
+ *p = Package{
+ ID: flat.ID,
+ Name: flat.Name,
+ PkgPath: flat.PkgPath,
+ Errors: flat.Errors,
+ GoFiles: flat.GoFiles,
+ CompiledGoFiles: flat.CompiledGoFiles,
+ OtherFiles: flat.OtherFiles,
+ ExportFile: flat.ExportFile,
+ }
+ if len(flat.Imports) > 0 {
+ p.Imports = make(map[string]*Package, len(flat.Imports))
+ for path, id := range flat.Imports {
+ p.Imports[path] = &Package{ID: id}
+ }
+ }
+ return nil
+}
+
+func (p *Package) String() string { return p.ID }
+
+// loaderPackage augments Package with state used during the loading phase
+type loaderPackage struct {
+ *Package
+ importErrors map[string]error // maps each bad import to its error
+ loadOnce sync.Once
+ color uint8 // for cycle detection
+ needsrc bool // load from source (Mode >= LoadTypes)
+ needtypes bool // type information is either requested or depended on
+ initial bool // package was matched by a pattern
+}
+
+// loader holds the working state of a single call to load.
+type loader struct {
+ pkgs map[string]*loaderPackage
+ Config
+ sizes types.Sizes
+ exportMu sync.Mutex // enforces mutual exclusion of exportdata operations
+}
+
+func newLoader(cfg *Config) *loader {
+ ld := &loader{}
+ if cfg != nil {
+ ld.Config = *cfg
+ }
+ if ld.Config.Env == nil {
+ ld.Config.Env = os.Environ()
+ }
+ if ld.Context == nil {
+ ld.Context = context.Background()
+ }
+ if ld.Dir == "" {
+ if dir, err := os.Getwd(); err == nil {
+ ld.Dir = dir
+ }
+ }
+
+ if ld.Mode >= LoadTypes {
+ if ld.Fset == nil {
+ ld.Fset = token.NewFileSet()
+ }
+
+ // ParseFile is required even in LoadTypes mode
+ // because we load source if export data is missing.
+ if ld.ParseFile == nil {
+ ld.ParseFile = func(fset *token.FileSet, filename string, src []byte) (*ast.File, error) {
+ var isrc interface{}
+ if src != nil {
+ isrc = src
+ }
+ const mode = parser.AllErrors | parser.ParseComments
+ return parser.ParseFile(fset, filename, isrc, mode)
+ }
+ }
+ }
+ return ld
+}
+
+// refine connects the supplied packages into a graph and then adds type and
+// and syntax information as requested by the LoadMode.
+func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
+ rootMap := make(map[string]int, len(roots))
+ for i, root := range roots {
+ rootMap[root] = i
+ }
+ ld.pkgs = make(map[string]*loaderPackage)
+ // first pass, fixup and build the map and roots
+ var initial = make([]*loaderPackage, len(roots))
+ for _, pkg := range list {
+ rootIndex := -1
+ if i, found := rootMap[pkg.ID]; found {
+ rootIndex = i
+ }
+ lpkg := &loaderPackage{
+ Package: pkg,
+ needtypes: ld.Mode >= LoadAllSyntax ||
+ ld.Mode >= LoadTypes && rootIndex >= 0,
+ needsrc: ld.Mode >= LoadAllSyntax ||
+ ld.Mode >= LoadSyntax && rootIndex >= 0 ||
+ len(ld.Overlay) > 0 || // Overlays can invalidate export data. TODO(matloob): make this check fine-grained based on dependencies on overlaid files
+ pkg.ExportFile == "" && pkg.PkgPath != "unsafe",
+ }
+ ld.pkgs[lpkg.ID] = lpkg
+ if rootIndex >= 0 {
+ initial[rootIndex] = lpkg
+ lpkg.initial = true
+ }
+ }
+ for i, root := range roots {
+ if initial[i] == nil {
+ return nil, fmt.Errorf("root package %v is missing", root)
+ }
+ }
+
+ // Materialize the import graph.
+
+ const (
+ white = 0 // new
+ grey = 1 // in progress
+ black = 2 // complete
+ )
+
+ // visit traverses the import graph, depth-first,
+ // and materializes the graph as Packages.Imports.
+ //
+ // Valid imports are saved in the Packages.Import map.
+ // Invalid imports (cycles and missing nodes) are saved in the importErrors map.
+ // Thus, even in the presence of both kinds of errors, the Import graph remains a DAG.
+ //
+ // visit returns whether the package needs src or has a transitive
+ // dependency on a package that does. These are the only packages
+ // for which we load source code.
+ var stack []*loaderPackage
+ var visit func(lpkg *loaderPackage) bool
+ var srcPkgs []*loaderPackage
+ visit = func(lpkg *loaderPackage) bool {
+ switch lpkg.color {
+ case black:
+ return lpkg.needsrc
+ case grey:
+ panic("internal error: grey node")
+ }
+ lpkg.color = grey
+ stack = append(stack, lpkg) // push
+ stubs := lpkg.Imports // the structure form has only stubs with the ID in the Imports
+ lpkg.Imports = make(map[string]*Package, len(stubs))
+ for importPath, ipkg := range stubs {
+ var importErr error
+ imp := ld.pkgs[ipkg.ID]
+ if imp == nil {
+ // (includes package "C" when DisableCgo)
+ importErr = fmt.Errorf("missing package: %q", ipkg.ID)
+ } else if imp.color == grey {
+ importErr = fmt.Errorf("import cycle: %s", stack)
+ }
+ if importErr != nil {
+ if lpkg.importErrors == nil {
+ lpkg.importErrors = make(map[string]error)
+ }
+ lpkg.importErrors[importPath] = importErr
+ continue
+ }
+
+ if visit(imp) {
+ lpkg.needsrc = true
+ }
+ lpkg.Imports[importPath] = imp.Package
+ }
+ if lpkg.needsrc {
+ srcPkgs = append(srcPkgs, lpkg)
+ }
+ stack = stack[:len(stack)-1] // pop
+ lpkg.color = black
+
+ return lpkg.needsrc
+ }
+
+ if ld.Mode < LoadImports {
+ //we do this to drop the stub import packages that we are not even going to try to resolve
+ for _, lpkg := range initial {
+ lpkg.Imports = nil
+ }
+ } else {
+ // For each initial package, create its import DAG.
+ for _, lpkg := range initial {
+ visit(lpkg)
+ }
+ }
+ for _, lpkg := range srcPkgs {
+ // Complete type information is required for the
+ // immediate dependencies of each source package.
+ for _, ipkg := range lpkg.Imports {
+ imp := ld.pkgs[ipkg.ID]
+ imp.needtypes = true
+ }
+ }
+ // Load type data if needed, starting at
+ // the initial packages (roots of the import DAG).
+ if ld.Mode >= LoadTypes {
+ var wg sync.WaitGroup
+ for _, lpkg := range initial {
+ wg.Add(1)
+ go func(lpkg *loaderPackage) {
+ ld.loadRecursive(lpkg)
+ wg.Done()
+ }(lpkg)
+ }
+ wg.Wait()
+ }
+
+ result := make([]*Package, len(initial))
+ for i, lpkg := range initial {
+ result[i] = lpkg.Package
+ }
+ return result, nil
+}
+
+// loadRecursive loads the specified package and its dependencies,
+// recursively, in parallel, in topological order.
+// It is atomic and idempotent.
+// Precondition: ld.Mode >= LoadTypes.
+func (ld *loader) loadRecursive(lpkg *loaderPackage) {
+ lpkg.loadOnce.Do(func() {
+ // Load the direct dependencies, in parallel.
+ var wg sync.WaitGroup
+ for _, ipkg := range lpkg.Imports {
+ imp := ld.pkgs[ipkg.ID]
+ wg.Add(1)
+ go func(imp *loaderPackage) {
+ ld.loadRecursive(imp)
+ wg.Done()
+ }(imp)
+ }
+ wg.Wait()
+
+ ld.loadPackage(lpkg)
+ })
+}
+
+// loadPackage loads the specified package.
+// It must be called only once per Package,
+// after immediate dependencies are loaded.
+// Precondition: ld.Mode >= LoadTypes.
+func (ld *loader) loadPackage(lpkg *loaderPackage) {
+ if lpkg.PkgPath == "unsafe" {
+ // Fill in the blanks to avoid surprises.
+ lpkg.Types = types.Unsafe
+ lpkg.Fset = ld.Fset
+ lpkg.Syntax = []*ast.File{}
+ lpkg.TypesInfo = new(types.Info)
+ return
+ }
+
+ // Call NewPackage directly with explicit name.
+ // This avoids skew between golist and go/types when the files'
+ // package declarations are inconsistent.
+ lpkg.Types = types.NewPackage(lpkg.PkgPath, lpkg.Name)
+ lpkg.Fset = ld.Fset
+
+ // Subtle: we populate all Types fields with an empty Package
+ // before loading export data so that export data processing
+ // never has to create a types.Package for an indirect dependency,
+ // which would then require that such created packages be explicitly
+ // inserted back into the Import graph as a final step after export data loading.
+ // The Diamond test exercises this case.
+ if !lpkg.needtypes {
+ return
+ }
+ if !lpkg.needsrc {
+ ld.loadFromExportData(lpkg)
+ return // not a source package, don't get syntax trees
+ }
+
+ appendError := func(err error) {
+ // Convert various error types into the one true Error.
+ var errs []Error
+ switch err := err.(type) {
+ case Error:
+ // from driver
+ errs = append(errs, err)
+
+ case *os.PathError:
+ // from parser
+ errs = append(errs, Error{
+ Pos: err.Path + ":1",
+ Msg: err.Err.Error(),
+ Kind: ParseError,
+ })
+
+ case scanner.ErrorList:
+ // from parser
+ for _, err := range err {
+ errs = append(errs, Error{
+ Pos: err.Pos.String(),
+ Msg: err.Msg,
+ Kind: ParseError,
+ })
+ }
+
+ case types.Error:
+ // from type checker
+ errs = append(errs, Error{
+ Pos: err.Fset.Position(err.Pos).String(),
+ Msg: err.Msg,
+ Kind: TypeError,
+ })
+
+ default:
+ // unexpected impoverished error from parser?
+ errs = append(errs, Error{
+ Pos: "-",
+ Msg: err.Error(),
+ Kind: UnknownError,
+ })
+
+ // If you see this error message, please file a bug.
+ log.Printf("internal error: error %q (%T) without position", err, err)
+ }
+
+ lpkg.Errors = append(lpkg.Errors, errs...)
+ }
+
+ files, errs := ld.parseFiles(lpkg.CompiledGoFiles)
+ for _, err := range errs {
+ appendError(err)
+ }
+
+ lpkg.Syntax = files
+
+ lpkg.TypesInfo = &types.Info{
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Defs: make(map[*ast.Ident]types.Object),
+ Uses: make(map[*ast.Ident]types.Object),
+ Implicits: make(map[ast.Node]types.Object),
+ Scopes: make(map[ast.Node]*types.Scope),
+ Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ }
+
+ importer := importerFunc(func(path string) (*types.Package, error) {
+ if path == "unsafe" {
+ return types.Unsafe, nil
+ }
+
+ // The imports map is keyed by import path.
+ ipkg := lpkg.Imports[path]
+ if ipkg == nil {
+ if err := lpkg.importErrors[path]; err != nil {
+ return nil, err
+ }
+ // There was skew between the metadata and the
+ // import declarations, likely due to an edit
+ // race, or because the ParseFile feature was
+ // used to supply alternative file contents.
+ return nil, fmt.Errorf("no metadata for %s", path)
+ }
+
+ if ipkg.Types != nil && ipkg.Types.Complete() {
+ return ipkg.Types, nil
+ }
+ log.Fatalf("internal error: nil Pkg importing %q from %q", path, lpkg)
+ panic("unreachable")
+ })
+
+ // type-check
+ tc := &types.Config{
+ Importer: importer,
+
+ // Type-check bodies of functions only in non-initial packages.
+ // Example: for import graph A->B->C and initial packages {A,C},
+ // we can ignore function bodies in B.
+ IgnoreFuncBodies: ld.Mode < LoadAllSyntax && !lpkg.initial,
+
+ Error: appendError,
+ Sizes: ld.sizes,
+ }
+ types.NewChecker(tc, ld.Fset, lpkg.Types, lpkg.TypesInfo).Files(lpkg.Syntax)
+
+ lpkg.importErrors = nil // no longer needed
+
+ // If !Cgo, the type-checker uses FakeImportC mode, so
+ // it doesn't invoke the importer for import "C",
+ // nor report an error for the import,
+ // or for any undefined C.f reference.
+ // We must detect this explicitly and correctly
+ // mark the package as IllTyped (by reporting an error).
+ // TODO(adonovan): if these errors are annoying,
+ // we could just set IllTyped quietly.
+ if tc.FakeImportC {
+ outer:
+ for _, f := range lpkg.Syntax {
+ for _, imp := range f.Imports {
+ if imp.Path.Value == `"C"` {
+ err := types.Error{Fset: ld.Fset, Pos: imp.Pos(), Msg: `import "C" ignored`}
+ appendError(err)
+ break outer
+ }
+ }
+ }
+ }
+
+ // Record accumulated errors.
+ illTyped := len(lpkg.Errors) > 0
+ if !illTyped {
+ for _, imp := range lpkg.Imports {
+ if imp.IllTyped {
+ illTyped = true
+ break
+ }
+ }
+ }
+ lpkg.IllTyped = illTyped
+}
+
+// An importFunc is an implementation of the single-method
+// types.Importer interface based on a function value.
+type importerFunc func(path string) (*types.Package, error)
+
+func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) }
+
+// We use a counting semaphore to limit
+// the number of parallel I/O calls per process.
+var ioLimit = make(chan bool, 20)
+
+// parseFiles reads and parses the Go source files and returns the ASTs
+// of the ones that could be at least partially parsed, along with a
+// list of I/O and parse errors encountered.
+//
+// Because files are scanned in parallel, the token.Pos
+// positions of the resulting ast.Files are not ordered.
+//
+func (ld *loader) parseFiles(filenames []string) ([]*ast.File, []error) {
+ var wg sync.WaitGroup
+ n := len(filenames)
+ parsed := make([]*ast.File, n)
+ errors := make([]error, n)
+ for i, file := range filenames {
+ wg.Add(1)
+ go func(i int, filename string) {
+ ioLimit <- true // wait
+ // ParseFile may return both an AST and an error.
+ var src []byte
+ for f, contents := range ld.Config.Overlay {
+ if sameFile(f, filename) {
+ src = contents
+ }
+ }
+ var err error
+ if src == nil {
+ src, err = ioutil.ReadFile(filename)
+ }
+ if err != nil {
+ parsed[i], errors[i] = nil, err
+ } else {
+ parsed[i], errors[i] = ld.ParseFile(ld.Fset, filename, src)
+ }
+ <-ioLimit // signal
+ wg.Done()
+ }(i, file)
+ }
+ wg.Wait()
+
+ // Eliminate nils, preserving order.
+ var o int
+ for _, f := range parsed {
+ if f != nil {
+ parsed[o] = f
+ o++
+ }
+ }
+ parsed = parsed[:o]
+
+ o = 0
+ for _, err := range errors {
+ if err != nil {
+ errors[o] = err
+ o++
+ }
+ }
+ errors = errors[:o]
+
+ return parsed, errors
+}
+
+// sameFile returns true if x and y have the same basename and denote
+// the same file.
+//
+func sameFile(x, y string) bool {
+ if x == y {
+ // It could be the case that y doesn't exist.
+ // For instance, it may be an overlay file that
+ // hasn't been written to disk. To handle that case
+ // let x == y through. (We added the exact absolute path
+ // string to the CompiledGoFiles list, so the unwritten
+ // overlay case implies x==y.)
+ return true
+ }
+ if filepath.Base(x) == filepath.Base(y) { // (optimisation)
+ if xi, err := os.Stat(x); err == nil {
+ if yi, err := os.Stat(y); err == nil {
+ return os.SameFile(xi, yi)
+ }
+ }
+ }
+ return false
+}
+
+// loadFromExportData returns type information for the specified
+// package, loading it from an export data file on the first request.
+func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error) {
+ if lpkg.PkgPath == "" {
+ log.Fatalf("internal error: Package %s has no PkgPath", lpkg)
+ }
+
+ // Because gcexportdata.Read has the potential to create or
+ // modify the types.Package for each node in the transitive
+ // closure of dependencies of lpkg, all exportdata operations
+ // must be sequential. (Finer-grained locking would require
+ // changes to the gcexportdata API.)
+ //
+ // The exportMu lock guards the Package.Pkg field and the
+ // types.Package it points to, for each Package in the graph.
+ //
+ // Not all accesses to Package.Pkg need to be protected by exportMu:
+ // graph ordering ensures that direct dependencies of source
+ // packages are fully loaded before the importer reads their Pkg field.
+ ld.exportMu.Lock()
+ defer ld.exportMu.Unlock()
+
+ if tpkg := lpkg.Types; tpkg != nil && tpkg.Complete() {
+ return tpkg, nil // cache hit
+ }
+
+ lpkg.IllTyped = true // fail safe
+
+ if lpkg.ExportFile == "" {
+ // Errors while building export data will have been printed to stderr.
+ return nil, fmt.Errorf("no export data file")
+ }
+ f, err := os.Open(lpkg.ExportFile)
+ if err != nil {
+ return nil, err
+ }
+ defer f.Close()
+
+ // Read gc export data.
+ //
+ // We don't currently support gccgo export data because all
+ // underlying workspaces use the gc toolchain. (Even build
+ // systems that support gccgo don't use it for workspace
+ // queries.)
+ r, err := gcexportdata.NewReader(f)
+ if err != nil {
+ return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
+ }
+
+ // Build the view.
+ //
+ // The gcexportdata machinery has no concept of package ID.
+ // It identifies packages by their PkgPath, which although not
+ // globally unique is unique within the scope of one invocation
+ // of the linker, type-checker, or gcexportdata.
+ //
+ // So, we must build a PkgPath-keyed view of the global
+ // (conceptually ID-keyed) cache of packages and pass it to
+ // gcexportdata. The view must contain every existing
+ // package that might possibly be mentioned by the
+ // current package---its transitive closure.
+ //
+ // In loadPackage, we unconditionally create a types.Package for
+ // each dependency so that export data loading does not
+ // create new ones.
+ //
+ // TODO(adonovan): it would be simpler and more efficient
+ // if the export data machinery invoked a callback to
+ // get-or-create a package instead of a map.
+ //
+ view := make(map[string]*types.Package) // view seen by gcexportdata
+ seen := make(map[*loaderPackage]bool) // all visited packages
+ var visit func(pkgs map[string]*Package)
+ visit = func(pkgs map[string]*Package) {
+ for _, p := range pkgs {
+ lpkg := ld.pkgs[p.ID]
+ if !seen[lpkg] {
+ seen[lpkg] = true
+ view[lpkg.PkgPath] = lpkg.Types
+ visit(lpkg.Imports)
+ }
+ }
+ }
+ visit(lpkg.Imports)
+
+ viewLen := len(view) + 1 // adding the self package
+ // Parse the export data.
+ // (May modify incomplete packages in view but not create new ones.)
+ tpkg, err := gcexportdata.Read(r, ld.Fset, view, lpkg.PkgPath)
+ if err != nil {
+ return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
+ }
+ if viewLen != len(view) {
+ log.Fatalf("Unexpected package creation during export data loading")
+ }
+
+ lpkg.Types = tpkg
+ lpkg.IllTyped = false
+
+ return tpkg, nil
+}
+
+func usesExportData(cfg *Config) bool {
+ return LoadTypes <= cfg.Mode && cfg.Mode < LoadAllSyntax
+}
diff --git a/vendor/golang.org/x/tools/go/packages/visit.go b/vendor/golang.org/x/tools/go/packages/visit.go
new file mode 100644
index 000000000..b13cb081f
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/visit.go
@@ -0,0 +1,55 @@
+package packages
+
+import (
+ "fmt"
+ "os"
+ "sort"
+)
+
+// Visit visits all the packages in the import graph whose roots are
+// pkgs, calling the optional pre function the first time each package
+// is encountered (preorder), and the optional post function after a
+// package's dependencies have been visited (postorder).
+// The boolean result of pre(pkg) determines whether
+// the imports of package pkg are visited.
+func Visit(pkgs []*Package, pre func(*Package) bool, post func(*Package)) {
+ seen := make(map[*Package]bool)
+ var visit func(*Package)
+ visit = func(pkg *Package) {
+ if !seen[pkg] {
+ seen[pkg] = true
+
+ if pre == nil || pre(pkg) {
+ paths := make([]string, 0, len(pkg.Imports))
+ for path := range pkg.Imports {
+ paths = append(paths, path)
+ }
+ sort.Strings(paths) // Imports is a map, this makes visit stable
+ for _, path := range paths {
+ visit(pkg.Imports[path])
+ }
+ }
+
+ if post != nil {
+ post(pkg)
+ }
+ }
+ }
+ for _, pkg := range pkgs {
+ visit(pkg)
+ }
+}
+
+// PrintErrors prints to os.Stderr the accumulated errors of all
+// packages in the import graph rooted at pkgs, dependencies first.
+// PrintErrors returns the number of errors printed.
+func PrintErrors(pkgs []*Package) int {
+ var n int
+ Visit(pkgs, nil, func(pkg *Package) {
+ for _, err := range pkg.Errors {
+ fmt.Fprintln(os.Stderr, err)
+ n++
+ }
+ })
+ return n
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/callee.go b/vendor/golang.org/x/tools/go/types/typeutil/callee.go
new file mode 100644
index 000000000..38f596daf
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/callee.go
@@ -0,0 +1,46 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeutil
+
+import (
+ "go/ast"
+ "go/types"
+
+ "golang.org/x/tools/go/ast/astutil"
+)
+
+// Callee returns the named target of a function call, if any:
+// a function, method, builtin, or variable.
+func Callee(info *types.Info, call *ast.CallExpr) types.Object {
+ var obj types.Object
+ switch fun := astutil.Unparen(call.Fun).(type) {
+ case *ast.Ident:
+ obj = info.Uses[fun] // type, var, builtin, or declared func
+ case *ast.SelectorExpr:
+ if sel, ok := info.Selections[fun]; ok {
+ obj = sel.Obj() // method or field
+ } else {
+ obj = info.Uses[fun.Sel] // qualified identifier?
+ }
+ }
+ if _, ok := obj.(*types.TypeName); ok {
+ return nil // T(x) is a conversion, not a call
+ }
+ return obj
+}
+
+// StaticCallee returns the target (function or method) of a static
+// function call, if any. It returns nil for calls to builtins.
+func StaticCallee(info *types.Info, call *ast.CallExpr) *types.Func {
+ if f, ok := Callee(info, call).(*types.Func); ok && !interfaceMethod(f) {
+ return f
+ }
+ return nil
+}
+
+func interfaceMethod(f *types.Func) bool {
+ recv := f.Type().(*types.Signature).Recv()
+ return recv != nil && types.IsInterface(recv.Type())
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/imports.go b/vendor/golang.org/x/tools/go/types/typeutil/imports.go
new file mode 100644
index 000000000..9c441dba9
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/imports.go
@@ -0,0 +1,31 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeutil
+
+import "go/types"
+
+// Dependencies returns all dependencies of the specified packages.
+//
+// Dependent packages appear in topological order: if package P imports
+// package Q, Q appears earlier than P in the result.
+// The algorithm follows import statements in the order they
+// appear in the source code, so the result is a total order.
+//
+func Dependencies(pkgs ...*types.Package) []*types.Package {
+ var result []*types.Package
+ seen := make(map[*types.Package]bool)
+ var visit func(pkgs []*types.Package)
+ visit = func(pkgs []*types.Package) {
+ for _, p := range pkgs {
+ if !seen[p] {
+ seen[p] = true
+ visit(p.Imports())
+ result = append(result, p)
+ }
+ }
+ }
+ visit(pkgs)
+ return result
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/map.go b/vendor/golang.org/x/tools/go/types/typeutil/map.go
new file mode 100644
index 000000000..c7f754500
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/map.go
@@ -0,0 +1,313 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package typeutil defines various utilities for types, such as Map,
+// a mapping from types.Type to interface{} values.
+package typeutil // import "golang.org/x/tools/go/types/typeutil"
+
+import (
+ "bytes"
+ "fmt"
+ "go/types"
+ "reflect"
+)
+
+// Map is a hash-table-based mapping from types (types.Type) to
+// arbitrary interface{} values. The concrete types that implement
+// the Type interface are pointers. Since they are not canonicalized,
+// == cannot be used to check for equivalence, and thus we cannot
+// simply use a Go map.
+//
+// Just as with map[K]V, a nil *Map is a valid empty map.
+//
+// Not thread-safe.
+//
+type Map struct {
+ hasher Hasher // shared by many Maps
+ table map[uint32][]entry // maps hash to bucket; entry.key==nil means unused
+ length int // number of map entries
+}
+
+// entry is an entry (key/value association) in a hash bucket.
+type entry struct {
+ key types.Type
+ value interface{}
+}
+
+// SetHasher sets the hasher used by Map.
+//
+// All Hashers are functionally equivalent but contain internal state
+// used to cache the results of hashing previously seen types.
+//
+// A single Hasher created by MakeHasher() may be shared among many
+// Maps. This is recommended if the instances have many keys in
+// common, as it will amortize the cost of hash computation.
+//
+// A Hasher may grow without bound as new types are seen. Even when a
+// type is deleted from the map, the Hasher never shrinks, since other
+// types in the map may reference the deleted type indirectly.
+//
+// Hashers are not thread-safe, and read-only operations such as
+// Map.Lookup require updates to the hasher, so a full Mutex lock (not a
+// read-lock) is require around all Map operations if a shared
+// hasher is accessed from multiple threads.
+//
+// If SetHasher is not called, the Map will create a private hasher at
+// the first call to Insert.
+//
+func (m *Map) SetHasher(hasher Hasher) {
+ m.hasher = hasher
+}
+
+// Delete removes the entry with the given key, if any.
+// It returns true if the entry was found.
+//
+func (m *Map) Delete(key types.Type) bool {
+ if m != nil && m.table != nil {
+ hash := m.hasher.Hash(key)
+ bucket := m.table[hash]
+ for i, e := range bucket {
+ if e.key != nil && types.Identical(key, e.key) {
+ // We can't compact the bucket as it
+ // would disturb iterators.
+ bucket[i] = entry{}
+ m.length--
+ return true
+ }
+ }
+ }
+ return false
+}
+
+// At returns the map entry for the given key.
+// The result is nil if the entry is not present.
+//
+func (m *Map) At(key types.Type) interface{} {
+ if m != nil && m.table != nil {
+ for _, e := range m.table[m.hasher.Hash(key)] {
+ if e.key != nil && types.Identical(key, e.key) {
+ return e.value
+ }
+ }
+ }
+ return nil
+}
+
+// Set sets the map entry for key to val,
+// and returns the previous entry, if any.
+func (m *Map) Set(key types.Type, value interface{}) (prev interface{}) {
+ if m.table != nil {
+ hash := m.hasher.Hash(key)
+ bucket := m.table[hash]
+ var hole *entry
+ for i, e := range bucket {
+ if e.key == nil {
+ hole = &bucket[i]
+ } else if types.Identical(key, e.key) {
+ prev = e.value
+ bucket[i].value = value
+ return
+ }
+ }
+
+ if hole != nil {
+ *hole = entry{key, value} // overwrite deleted entry
+ } else {
+ m.table[hash] = append(bucket, entry{key, value})
+ }
+ } else {
+ if m.hasher.memo == nil {
+ m.hasher = MakeHasher()
+ }
+ hash := m.hasher.Hash(key)
+ m.table = map[uint32][]entry{hash: {entry{key, value}}}
+ }
+
+ m.length++
+ return
+}
+
+// Len returns the number of map entries.
+func (m *Map) Len() int {
+ if m != nil {
+ return m.length
+ }
+ return 0
+}
+
+// Iterate calls function f on each entry in the map in unspecified order.
+//
+// If f should mutate the map, Iterate provides the same guarantees as
+// Go maps: if f deletes a map entry that Iterate has not yet reached,
+// f will not be invoked for it, but if f inserts a map entry that
+// Iterate has not yet reached, whether or not f will be invoked for
+// it is unspecified.
+//
+func (m *Map) Iterate(f func(key types.Type, value interface{})) {
+ if m != nil {
+ for _, bucket := range m.table {
+ for _, e := range bucket {
+ if e.key != nil {
+ f(e.key, e.value)
+ }
+ }
+ }
+ }
+}
+
+// Keys returns a new slice containing the set of map keys.
+// The order is unspecified.
+func (m *Map) Keys() []types.Type {
+ keys := make([]types.Type, 0, m.Len())
+ m.Iterate(func(key types.Type, _ interface{}) {
+ keys = append(keys, key)
+ })
+ return keys
+}
+
+func (m *Map) toString(values bool) string {
+ if m == nil {
+ return "{}"
+ }
+ var buf bytes.Buffer
+ fmt.Fprint(&buf, "{")
+ sep := ""
+ m.Iterate(func(key types.Type, value interface{}) {
+ fmt.Fprint(&buf, sep)
+ sep = ", "
+ fmt.Fprint(&buf, key)
+ if values {
+ fmt.Fprintf(&buf, ": %q", value)
+ }
+ })
+ fmt.Fprint(&buf, "}")
+ return buf.String()
+}
+
+// String returns a string representation of the map's entries.
+// Values are printed using fmt.Sprintf("%v", v).
+// Order is unspecified.
+//
+func (m *Map) String() string {
+ return m.toString(true)
+}
+
+// KeysString returns a string representation of the map's key set.
+// Order is unspecified.
+//
+func (m *Map) KeysString() string {
+ return m.toString(false)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Hasher
+
+// A Hasher maps each type to its hash value.
+// For efficiency, a hasher uses memoization; thus its memory
+// footprint grows monotonically over time.
+// Hashers are not thread-safe.
+// Hashers have reference semantics.
+// Call MakeHasher to create a Hasher.
+type Hasher struct {
+ memo map[types.Type]uint32
+}
+
+// MakeHasher returns a new Hasher instance.
+func MakeHasher() Hasher {
+ return Hasher{make(map[types.Type]uint32)}
+}
+
+// Hash computes a hash value for the given type t such that
+// Identical(t, t') => Hash(t) == Hash(t').
+func (h Hasher) Hash(t types.Type) uint32 {
+ hash, ok := h.memo[t]
+ if !ok {
+ hash = h.hashFor(t)
+ h.memo[t] = hash
+ }
+ return hash
+}
+
+// hashString computes the Fowler–Noll–Vo hash of s.
+func hashString(s string) uint32 {
+ var h uint32
+ for i := 0; i < len(s); i++ {
+ h ^= uint32(s[i])
+ h *= 16777619
+ }
+ return h
+}
+
+// hashFor computes the hash of t.
+func (h Hasher) hashFor(t types.Type) uint32 {
+ // See Identical for rationale.
+ switch t := t.(type) {
+ case *types.Basic:
+ return uint32(t.Kind())
+
+ case *types.Array:
+ return 9043 + 2*uint32(t.Len()) + 3*h.Hash(t.Elem())
+
+ case *types.Slice:
+ return 9049 + 2*h.Hash(t.Elem())
+
+ case *types.Struct:
+ var hash uint32 = 9059
+ for i, n := 0, t.NumFields(); i < n; i++ {
+ f := t.Field(i)
+ if f.Anonymous() {
+ hash += 8861
+ }
+ hash += hashString(t.Tag(i))
+ hash += hashString(f.Name()) // (ignore f.Pkg)
+ hash += h.Hash(f.Type())
+ }
+ return hash
+
+ case *types.Pointer:
+ return 9067 + 2*h.Hash(t.Elem())
+
+ case *types.Signature:
+ var hash uint32 = 9091
+ if t.Variadic() {
+ hash *= 8863
+ }
+ return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results())
+
+ case *types.Interface:
+ var hash uint32 = 9103
+ for i, n := 0, t.NumMethods(); i < n; i++ {
+ // See go/types.identicalMethods for rationale.
+ // Method order is not significant.
+ // Ignore m.Pkg().
+ m := t.Method(i)
+ hash += 3*hashString(m.Name()) + 5*h.Hash(m.Type())
+ }
+ return hash
+
+ case *types.Map:
+ return 9109 + 2*h.Hash(t.Key()) + 3*h.Hash(t.Elem())
+
+ case *types.Chan:
+ return 9127 + 2*uint32(t.Dir()) + 3*h.Hash(t.Elem())
+
+ case *types.Named:
+ // Not safe with a copying GC; objects may move.
+ return uint32(reflect.ValueOf(t.Obj()).Pointer())
+
+ case *types.Tuple:
+ return h.hashTuple(t)
+ }
+ panic(t)
+}
+
+func (h Hasher) hashTuple(tuple *types.Tuple) uint32 {
+ // See go/types.identicalTypes for rationale.
+ n := tuple.Len()
+ var hash uint32 = 9137 + 2*uint32(n)
+ for i := 0; i < n; i++ {
+ hash += 3 * h.Hash(tuple.At(i).Type())
+ }
+ return hash
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go b/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go
new file mode 100644
index 000000000..32084610f
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go
@@ -0,0 +1,72 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file implements a cache of method sets.
+
+package typeutil
+
+import (
+ "go/types"
+ "sync"
+)
+
+// A MethodSetCache records the method set of each type T for which
+// MethodSet(T) is called so that repeat queries are fast.
+// The zero value is a ready-to-use cache instance.
+type MethodSetCache struct {
+ mu sync.Mutex
+ named map[*types.Named]struct{ value, pointer *types.MethodSet } // method sets for named N and *N
+ others map[types.Type]*types.MethodSet // all other types
+}
+
+// MethodSet returns the method set of type T. It is thread-safe.
+//
+// If cache is nil, this function is equivalent to types.NewMethodSet(T).
+// Utility functions can thus expose an optional *MethodSetCache
+// parameter to clients that care about performance.
+//
+func (cache *MethodSetCache) MethodSet(T types.Type) *types.MethodSet {
+ if cache == nil {
+ return types.NewMethodSet(T)
+ }
+ cache.mu.Lock()
+ defer cache.mu.Unlock()
+
+ switch T := T.(type) {
+ case *types.Named:
+ return cache.lookupNamed(T).value
+
+ case *types.Pointer:
+ if N, ok := T.Elem().(*types.Named); ok {
+ return cache.lookupNamed(N).pointer
+ }
+ }
+
+ // all other types
+ // (The map uses pointer equivalence, not type identity.)
+ mset := cache.others[T]
+ if mset == nil {
+ mset = types.NewMethodSet(T)
+ if cache.others == nil {
+ cache.others = make(map[types.Type]*types.MethodSet)
+ }
+ cache.others[T] = mset
+ }
+ return mset
+}
+
+func (cache *MethodSetCache) lookupNamed(named *types.Named) struct{ value, pointer *types.MethodSet } {
+ if cache.named == nil {
+ cache.named = make(map[*types.Named]struct{ value, pointer *types.MethodSet })
+ }
+ // Avoid recomputing mset(*T) for each distinct Pointer
+ // instance whose underlying type is a named type.
+ msets, ok := cache.named[named]
+ if !ok {
+ msets.value = types.NewMethodSet(named)
+ msets.pointer = types.NewMethodSet(types.NewPointer(named))
+ cache.named[named] = msets
+ }
+ return msets
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/ui.go b/vendor/golang.org/x/tools/go/types/typeutil/ui.go
new file mode 100644
index 000000000..9849c24ce
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/ui.go
@@ -0,0 +1,52 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeutil
+
+// This file defines utilities for user interfaces that display types.
+
+import "go/types"
+
+// IntuitiveMethodSet returns the intuitive method set of a type T,
+// which is the set of methods you can call on an addressable value of
+// that type.
+//
+// The result always contains MethodSet(T), and is exactly MethodSet(T)
+// for interface types and for pointer-to-concrete types.
+// For all other concrete types T, the result additionally
+// contains each method belonging to *T if there is no identically
+// named method on T itself.
+//
+// This corresponds to user intuition about method sets;
+// this function is intended only for user interfaces.
+//
+// The order of the result is as for types.MethodSet(T).
+//
+func IntuitiveMethodSet(T types.Type, msets *MethodSetCache) []*types.Selection {
+ isPointerToConcrete := func(T types.Type) bool {
+ ptr, ok := T.(*types.Pointer)
+ return ok && !types.IsInterface(ptr.Elem())
+ }
+
+ var result []*types.Selection
+ mset := msets.MethodSet(T)
+ if types.IsInterface(T) || isPointerToConcrete(T) {
+ for i, n := 0, mset.Len(); i < n; i++ {
+ result = append(result, mset.At(i))
+ }
+ } else {
+ // T is some other concrete type.
+ // Report methods of T and *T, preferring those of T.
+ pmset := msets.MethodSet(types.NewPointer(T))
+ for i, n := 0, pmset.Len(); i < n; i++ {
+ meth := pmset.At(i)
+ if m := mset.Lookup(meth.Obj().Pkg(), meth.Obj().Name()); m != nil {
+ meth = m
+ }
+ result = append(result, meth)
+ }
+
+ }
+ return result
+}
diff --git a/vendor/golang.org/x/tools/imports/fix.go b/vendor/golang.org/x/tools/imports/fix.go
new file mode 100644
index 000000000..1565f9294
--- /dev/null
+++ b/vendor/golang.org/x/tools/imports/fix.go
@@ -0,0 +1,1160 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package imports
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/parser"
+ "go/token"
+ "io/ioutil"
+ "log"
+ "os"
+ "os/exec"
+ "path"
+ "path/filepath"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/go/packages"
+ "golang.org/x/tools/internal/gopathwalk"
+)
+
+// Debug controls verbose logging.
+var Debug = false
+
+// LocalPrefix is a comma-separated string of import path prefixes, which, if
+// set, instructs Process to sort the import paths with the given prefixes
+// into another group after 3rd-party packages.
+var LocalPrefix string
+
+func localPrefixes() []string {
+ if LocalPrefix != "" {
+ return strings.Split(LocalPrefix, ",")
+ }
+ return nil
+}
+
+// importToGroup is a list of functions which map from an import path to
+// a group number.
+var importToGroup = []func(importPath string) (num int, ok bool){
+ func(importPath string) (num int, ok bool) {
+ for _, p := range localPrefixes() {
+ if strings.HasPrefix(importPath, p) || strings.TrimSuffix(p, "/") == importPath {
+ return 3, true
+ }
+ }
+ return
+ },
+ func(importPath string) (num int, ok bool) {
+ if strings.HasPrefix(importPath, "appengine") {
+ return 2, true
+ }
+ return
+ },
+ func(importPath string) (num int, ok bool) {
+ if strings.Contains(importPath, ".") {
+ return 1, true
+ }
+ return
+ },
+}
+
+func importGroup(importPath string) int {
+ for _, fn := range importToGroup {
+ if n, ok := fn(importPath); ok {
+ return n
+ }
+ }
+ return 0
+}
+
+// An importInfo represents a single import statement.
+type importInfo struct {
+ importPath string // import path, e.g. "crypto/rand".
+ name string // import name, e.g. "crand", or "" if none.
+}
+
+// A packageInfo represents what's known about a package.
+type packageInfo struct {
+ name string // discovered package name.
+ exports map[string]bool // known exports.
+}
+
+// parseOtherFiles parses all the Go files in srcDir except filename, including
+// test files if filename looks like a test.
+func parseOtherFiles(fset *token.FileSet, srcDir, filename string) []*ast.File {
+ // This could use go/packages but it doesn't buy much, and it fails
+ // with https://golang.org/issue/26296 in LoadFiles mode in some cases.
+ considerTests := strings.HasSuffix(filename, "_test.go")
+
+ fileBase := filepath.Base(filename)
+ packageFileInfos, err := ioutil.ReadDir(srcDir)
+ if err != nil {
+ return nil
+ }
+
+ var files []*ast.File
+ for _, fi := range packageFileInfos {
+ if fi.Name() == fileBase || !strings.HasSuffix(fi.Name(), ".go") {
+ continue
+ }
+ if !considerTests && strings.HasSuffix(fi.Name(), "_test.go") {
+ continue
+ }
+
+ f, err := parser.ParseFile(fset, filepath.Join(srcDir, fi.Name()), nil, 0)
+ if err != nil {
+ continue
+ }
+
+ files = append(files, f)
+ }
+
+ return files
+}
+
+// addGlobals puts the names of package vars into the provided map.
+func addGlobals(f *ast.File, globals map[string]bool) {
+ for _, decl := range f.Decls {
+ genDecl, ok := decl.(*ast.GenDecl)
+ if !ok {
+ continue
+ }
+
+ for _, spec := range genDecl.Specs {
+ valueSpec, ok := spec.(*ast.ValueSpec)
+ if !ok {
+ continue
+ }
+ globals[valueSpec.Names[0].Name] = true
+ }
+ }
+}
+
+// collectReferences builds a map of selector expressions, from
+// left hand side (X) to a set of right hand sides (Sel).
+func collectReferences(f *ast.File) map[string]map[string]bool {
+ refs := map[string]map[string]bool{}
+
+ var visitor visitFn
+ visitor = func(node ast.Node) ast.Visitor {
+ if node == nil {
+ return visitor
+ }
+ switch v := node.(type) {
+ case *ast.SelectorExpr:
+ xident, ok := v.X.(*ast.Ident)
+ if !ok {
+ break
+ }
+ if xident.Obj != nil {
+ // If the parser can resolve it, it's not a package ref.
+ break
+ }
+ if !ast.IsExported(v.Sel.Name) {
+ // Whatever this is, it's not exported from a package.
+ break
+ }
+ pkgName := xident.Name
+ r := refs[pkgName]
+ if r == nil {
+ r = make(map[string]bool)
+ refs[pkgName] = r
+ }
+ r[v.Sel.Name] = true
+ }
+ return visitor
+ }
+ ast.Walk(visitor, f)
+ return refs
+}
+
+// collectImports returns all the imports in f, keyed by their package name as
+// determined by pathToName. Unnamed imports (., _) and "C" are ignored.
+func collectImports(f *ast.File) []*importInfo {
+ var imports []*importInfo
+ for _, imp := range f.Imports {
+ var name string
+ if imp.Name != nil {
+ name = imp.Name.Name
+ }
+ if imp.Path.Value == `"C"` || name == "_" || name == "." {
+ continue
+ }
+ path := strings.Trim(imp.Path.Value, `"`)
+ imports = append(imports, &importInfo{
+ name: name,
+ importPath: path,
+ })
+ }
+ return imports
+}
+
+// findMissingImport searches pass's candidates for an import that provides
+// pkg, containing all of syms.
+func (p *pass) findMissingImport(pkg string, syms map[string]bool) *importInfo {
+ for _, candidate := range p.candidates {
+ pkgInfo, ok := p.knownPackages[candidate.importPath]
+ if !ok {
+ continue
+ }
+ // If the candidate import has a name, it must match pkg.
+ if candidate.name != "" && candidate.name != pkg {
+ continue
+ }
+ // Otherwise, the real name of the package must match.
+ if candidate.name == "" && pkgInfo.name != pkg {
+ continue
+ }
+
+ allFound := true
+ for right := range syms {
+ if !pkgInfo.exports[right] {
+ allFound = false
+ break
+ }
+ }
+
+ if allFound {
+ return candidate
+ }
+ }
+ return nil
+}
+
+// A pass contains all the inputs and state necessary to fix a file's imports.
+// It can be modified in some ways during use; see comments below.
+type pass struct {
+ // Inputs. These must be set before a call to load, and not modified after.
+ fset *token.FileSet // fset used to parse f and its siblings.
+ f *ast.File // the file being fixed.
+ srcDir string // the directory containing f.
+ useGoPackages bool // use go/packages to load package information.
+ loadRealPackageNames bool // if true, load package names from disk rather than guessing them.
+ otherFiles []*ast.File // sibling files.
+
+ // Intermediate state, generated by load.
+ existingImports map[string]*importInfo
+ allRefs map[string]map[string]bool
+ missingRefs map[string]map[string]bool
+
+ // Inputs to fix. These can be augmented between successive fix calls.
+ lastTry bool // indicates that this is the last call and fix should clean up as best it can.
+ candidates []*importInfo // candidate imports in priority order.
+ knownPackages map[string]*packageInfo // information about all known packages.
+}
+
+// loadPackageNames saves the package names for everything referenced by imports.
+func (p *pass) loadPackageNames(imports []*importInfo) error {
+ var unknown []string
+ for _, imp := range imports {
+ if _, ok := p.knownPackages[imp.importPath]; ok {
+ continue
+ }
+ unknown = append(unknown, imp.importPath)
+ }
+
+ if !p.useGoPackages || !p.loadRealPackageNames {
+ pathToName := importPathToNameBasic
+ if p.loadRealPackageNames {
+ pathToName = importPathToName
+ }
+ for _, path := range unknown {
+ p.knownPackages[path] = &packageInfo{
+ name: pathToName(path, p.srcDir),
+ exports: map[string]bool{},
+ }
+ }
+ return nil
+ }
+
+ cfg := newPackagesConfig(packages.LoadFiles)
+ pkgs, err := packages.Load(cfg, unknown...)
+ if err != nil {
+ return err
+ }
+ for _, pkg := range pkgs {
+ p.knownPackages[VendorlessPath(pkg.PkgPath)] = &packageInfo{
+ name: pkg.Name,
+ exports: map[string]bool{},
+ }
+ }
+ // We may not have found all the packages. Guess the rest.
+ for _, path := range unknown {
+ if _, ok := p.knownPackages[path]; ok {
+ continue
+ }
+ p.knownPackages[path] = &packageInfo{
+ name: importPathToNameBasic(path, p.srcDir),
+ exports: map[string]bool{},
+ }
+ }
+ return nil
+}
+
+// importIdentifier returns the indentifier that imp will introduce.
+func (p *pass) importIdentifier(imp *importInfo) string {
+ if imp.name != "" {
+ return imp.name
+ }
+ return p.knownPackages[imp.importPath].name
+}
+
+// load reads in everything necessary to run a pass, and reports whether the
+// file already has all the imports it needs. It fills in p.missingRefs with the
+// file's missing symbols, if any, or removes unused imports if not.
+func (p *pass) load() bool {
+ p.knownPackages = map[string]*packageInfo{}
+ p.missingRefs = map[string]map[string]bool{}
+ p.existingImports = map[string]*importInfo{}
+
+ // Load basic information about the file in question.
+ p.allRefs = collectReferences(p.f)
+
+ // Load stuff from other files in the same package:
+ // global variables so we know they don't need resolving, and imports
+ // that we might want to mimic.
+ globals := map[string]bool{}
+ for _, otherFile := range p.otherFiles {
+ // Don't load globals from files that are in the same directory
+ // but a different package. Using them to suggest imports is OK.
+ if p.f.Name.Name == otherFile.Name.Name {
+ addGlobals(otherFile, globals)
+ }
+ p.candidates = append(p.candidates, collectImports(otherFile)...)
+ }
+
+ // Resolve all the import paths we've seen to package names, and store
+ // f's imports by the identifier they introduce.
+ imports := collectImports(p.f)
+ p.loadPackageNames(append(imports, p.candidates...))
+ for _, imp := range imports {
+ p.existingImports[p.importIdentifier(imp)] = imp
+ }
+
+ // Find missing references.
+ for left, rights := range p.allRefs {
+ if globals[left] {
+ continue
+ }
+ _, ok := p.existingImports[left]
+ if !ok {
+ p.missingRefs[left] = rights
+ continue
+ }
+ }
+ if len(p.missingRefs) != 0 {
+ return false
+ }
+
+ return p.fix()
+}
+
+// fix attempts to satisfy missing imports using p.candidates. If it finds
+// everything, or if p.lastTry is true, it adds the imports it found,
+// removes anything unused, and returns true.
+func (p *pass) fix() bool {
+ // Find missing imports.
+ var selected []*importInfo
+ for left, rights := range p.missingRefs {
+ if imp := p.findMissingImport(left, rights); imp != nil {
+ selected = append(selected, imp)
+ }
+ }
+
+ if !p.lastTry && len(selected) != len(p.missingRefs) {
+ return false
+ }
+
+ // Found everything, or giving up. Add the new imports and remove any unused.
+ for _, imp := range p.existingImports {
+ // We deliberately ignore globals here, because we can't be sure
+ // they're in the same package. People do things like put multiple
+ // main packages in the same directory, and we don't want to
+ // remove imports if they happen to have the same name as a var in
+ // a different package.
+ if _, ok := p.allRefs[p.importIdentifier(imp)]; !ok {
+ astutil.DeleteNamedImport(p.fset, p.f, imp.name, imp.importPath)
+ }
+ }
+
+ for _, imp := range selected {
+ astutil.AddNamedImport(p.fset, p.f, imp.name, imp.importPath)
+ }
+
+ if p.loadRealPackageNames {
+ for _, imp := range p.f.Imports {
+ if imp.Name != nil {
+ continue
+ }
+ path := strings.Trim(imp.Path.Value, `""`)
+ pkg, ok := p.knownPackages[path]
+ if !ok {
+ continue
+ }
+ if pkg.name != importPathToNameBasic(path, p.srcDir) {
+ imp.Name = &ast.Ident{Name: pkg.name, NamePos: imp.Pos()}
+ }
+ }
+ }
+
+ return true
+}
+
+// assumeSiblingImportsValid assumes that siblings' use of packages is valid,
+// adding the exports they use.
+func (p *pass) assumeSiblingImportsValid() {
+ for _, f := range p.otherFiles {
+ refs := collectReferences(f)
+ imports := collectImports(f)
+ importsByName := map[string]*importInfo{}
+ for _, imp := range imports {
+ importsByName[p.importIdentifier(imp)] = imp
+ }
+ for left, rights := range refs {
+ if imp, ok := importsByName[left]; ok {
+ if _, ok := stdlib[imp.importPath]; ok {
+ // We have the stdlib in memory; no need to guess.
+ rights = stdlib[imp.importPath]
+ }
+ p.addCandidate(imp, &packageInfo{
+ // no name; we already know it.
+ exports: rights,
+ })
+ }
+ }
+ }
+}
+
+// addCandidate adds a candidate import to p, and merges in the information
+// in pkg.
+func (p *pass) addCandidate(imp *importInfo, pkg *packageInfo) {
+ p.candidates = append(p.candidates, imp)
+ if existing, ok := p.knownPackages[imp.importPath]; ok {
+ if existing.name == "" {
+ existing.name = pkg.name
+ }
+ for export := range pkg.exports {
+ existing.exports[export] = true
+ }
+ } else {
+ p.knownPackages[imp.importPath] = pkg
+ }
+}
+
+// fixImports adds and removes imports from f so that all its references are
+// satisfied and there are no unused imports.
+//
+// This is declared as a variable rather than a function so goimports can
+// easily be extended by adding a file with an init function.
+var fixImports = fixImportsDefault
+
+func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string) error {
+ abs, err := filepath.Abs(filename)
+ if err != nil {
+ return err
+ }
+ srcDir := filepath.Dir(abs)
+ if Debug {
+ log.Printf("fixImports(filename=%q), abs=%q, srcDir=%q ...", filename, abs, srcDir)
+ }
+
+ // First pass: looking only at f, and using the naive algorithm to
+ // derive package names from import paths, see if the file is already
+ // complete. We can't add any imports yet, because we don't know
+ // if missing references are actually package vars.
+ p := &pass{fset: fset, f: f, srcDir: srcDir}
+ if p.load() {
+ return nil
+ }
+
+ otherFiles := parseOtherFiles(fset, srcDir, filename)
+
+ // Second pass: add information from other files in the same package,
+ // like their package vars and imports.
+ p = &pass{fset: fset, f: f, srcDir: srcDir}
+ p.otherFiles = otherFiles
+ if p.load() {
+ return nil
+ }
+
+ // Now we can try adding imports from the stdlib.
+ p.assumeSiblingImportsValid()
+ addStdlibCandidates(p, p.missingRefs)
+ if p.fix() {
+ return nil
+ }
+
+ // The only things that use go/packages happen in the third pass,
+ // so we can delay calling go env until this point.
+ useGoPackages := shouldUseGoPackages()
+
+ // Third pass: get real package names where we had previously used
+ // the naive algorithm.
+ p = &pass{fset: fset, f: f, srcDir: srcDir, useGoPackages: useGoPackages}
+ p.loadRealPackageNames = true
+ p.otherFiles = otherFiles
+ if p.load() {
+ return nil
+ }
+
+ addStdlibCandidates(p, p.missingRefs)
+ p.assumeSiblingImportsValid()
+ if p.fix() {
+ return nil
+ }
+
+ // Go look for candidates in $GOPATH, etc. We don't necessarily load
+ // the real exports of sibling imports, so keep assuming their contents.
+ if err := addExternalCandidates(p, p.missingRefs, filename); err != nil {
+ return err
+ }
+
+ p.lastTry = true
+ p.fix()
+ return nil
+}
+
+// Values controlling the use of go/packages, for testing only.
+var forceGoPackages, _ = strconv.ParseBool(os.Getenv("GOIMPORTSFORCEGOPACKAGES"))
+var goPackagesDir string
+var go111ModuleEnv string
+
+func shouldUseGoPackages() bool {
+ if forceGoPackages {
+ return true
+ }
+
+ cmd := exec.Command("go", "env", "GOMOD")
+ cmd.Dir = goPackagesDir
+ out, err := cmd.Output()
+ if err != nil {
+ return false
+ }
+ return len(bytes.TrimSpace(out)) > 0
+}
+
+func newPackagesConfig(mode packages.LoadMode) *packages.Config {
+ cfg := &packages.Config{
+ Mode: mode,
+ Dir: goPackagesDir,
+ Env: append(os.Environ(), "GOROOT="+build.Default.GOROOT, "GOPATH="+build.Default.GOPATH),
+ }
+ if go111ModuleEnv != "" {
+ cfg.Env = append(cfg.Env, "GO111MODULE="+go111ModuleEnv)
+ }
+ return cfg
+}
+
+func addStdlibCandidates(pass *pass, refs map[string]map[string]bool) {
+ add := func(pkg string) {
+ pass.addCandidate(
+ &importInfo{importPath: pkg},
+ &packageInfo{name: path.Base(pkg), exports: stdlib[pkg]})
+ }
+ for left := range refs {
+ if left == "rand" {
+ // Make sure we try crypto/rand before math/rand.
+ add("crypto/rand")
+ add("math/rand")
+ continue
+ }
+ for importPath := range stdlib {
+ if path.Base(importPath) == left {
+ add(importPath)
+ }
+ }
+ }
+}
+
+func scanGoPackages(refs map[string]map[string]bool) ([]*pkg, error) {
+ var loadQueries []string
+ for pkgName := range refs {
+ loadQueries = append(loadQueries, "name="+pkgName)
+ }
+ sort.Strings(loadQueries)
+ cfg := newPackagesConfig(packages.LoadFiles)
+ goPackages, err := packages.Load(cfg, loadQueries...)
+ if err != nil {
+ return nil, err
+ }
+
+ var scan []*pkg
+ for _, goPackage := range goPackages {
+ scan = append(scan, &pkg{
+ dir: filepath.Dir(goPackage.CompiledGoFiles[0]),
+ importPathShort: VendorlessPath(goPackage.PkgPath),
+ goPackage: goPackage,
+ })
+ }
+ return scan, nil
+}
+
+var addExternalCandidates = addExternalCandidatesDefault
+
+func addExternalCandidatesDefault(pass *pass, refs map[string]map[string]bool, filename string) error {
+ var dirScan []*pkg
+ if pass.useGoPackages {
+ var err error
+ dirScan, err = scanGoPackages(refs)
+ if err != nil {
+ return err
+ }
+ } else {
+ dirScan = scanGoDirs()
+ }
+
+ // Search for imports matching potential package references.
+ type result struct {
+ imp *importInfo
+ pkg *packageInfo
+ }
+ results := make(chan result, len(refs))
+
+ ctx, cancel := context.WithCancel(context.TODO())
+ var wg sync.WaitGroup
+ defer func() {
+ cancel()
+ wg.Wait()
+ }()
+ var (
+ firstErr error
+ firstErrOnce sync.Once
+ )
+ for pkgName, symbols := range refs {
+ wg.Add(1)
+ go func(pkgName string, symbols map[string]bool) {
+ defer wg.Done()
+
+ found, err := findImport(ctx, dirScan, pkgName, symbols, filename)
+
+ if err != nil {
+ firstErrOnce.Do(func() {
+ firstErr = err
+ cancel()
+ })
+ return
+ }
+
+ if found == nil {
+ return // No matching package.
+ }
+
+ imp := &importInfo{
+ importPath: found.importPathShort,
+ }
+
+ pkg := &packageInfo{
+ name: pkgName,
+ exports: symbols,
+ }
+ results <- result{imp, pkg}
+ }(pkgName, symbols)
+ }
+ go func() {
+ wg.Wait()
+ close(results)
+ }()
+
+ for result := range results {
+ pass.addCandidate(result.imp, result.pkg)
+ }
+ return firstErr
+}
+
+// importPathToNameBasic assumes the package name is the base of import path,
+// except that if the path ends in foo/vN, it assumes the package name is foo.
+func importPathToNameBasic(importPath, srcDir string) (packageName string) {
+ base := path.Base(importPath)
+ if strings.HasPrefix(base, "v") {
+ if _, err := strconv.Atoi(base[1:]); err == nil {
+ dir := path.Dir(importPath)
+ if dir != "." {
+ return path.Base(dir)
+ }
+ }
+ }
+ return base
+}
+
+// importPathToNameGoPath finds out the actual package name, as declared in its .go files.
+// If there's a problem, it falls back to using importPathToNameBasic.
+func importPathToName(importPath, srcDir string) (packageName string) {
+ // Fast path for standard library without going to disk.
+ if _, ok := stdlib[importPath]; ok {
+ return path.Base(importPath) // stdlib packages always match their paths.
+ }
+
+ pkgName, err := importPathToNameGoPathParse(importPath, srcDir)
+ if Debug {
+ log.Printf("importPathToNameGoPathParse(%q, srcDir=%q) = %q, %v", importPath, srcDir, pkgName, err)
+ }
+ if err == nil {
+ return pkgName
+ }
+ return importPathToNameBasic(importPath, srcDir)
+}
+
+// importPathToNameGoPathParse is a faster version of build.Import if
+// the only thing desired is the package name. It uses build.FindOnly
+// to find the directory and then only parses one file in the package,
+// trusting that the files in the directory are consistent.
+func importPathToNameGoPathParse(importPath, srcDir string) (packageName string, err error) {
+ buildPkg, err := build.Import(importPath, srcDir, build.FindOnly)
+ if err != nil {
+ return "", err
+ }
+ d, err := os.Open(buildPkg.Dir)
+ if err != nil {
+ return "", err
+ }
+ names, err := d.Readdirnames(-1)
+ d.Close()
+ if err != nil {
+ return "", err
+ }
+ sort.Strings(names) // to have predictable behavior
+ var lastErr error
+ var nfile int
+ for _, name := range names {
+ if !strings.HasSuffix(name, ".go") {
+ continue
+ }
+ if strings.HasSuffix(name, "_test.go") {
+ continue
+ }
+ nfile++
+ fullFile := filepath.Join(buildPkg.Dir, name)
+
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, fullFile, nil, parser.PackageClauseOnly)
+ if err != nil {
+ lastErr = err
+ continue
+ }
+ pkgName := f.Name.Name
+ if pkgName == "documentation" {
+ // Special case from go/build.ImportDir, not
+ // handled by ctx.MatchFile.
+ continue
+ }
+ if pkgName == "main" {
+ // Also skip package main, assuming it's a +build ignore generator or example.
+ // Since you can't import a package main anyway, there's no harm here.
+ continue
+ }
+ return pkgName, nil
+ }
+ if lastErr != nil {
+ return "", lastErr
+ }
+ return "", fmt.Errorf("no importable package found in %d Go files", nfile)
+}
+
+type pkg struct {
+ goPackage *packages.Package
+ dir string // absolute file path to pkg directory ("/usr/lib/go/src/net/http")
+ importPathShort string // vendorless import path ("net/http", "a/b")
+}
+
+type pkgDistance struct {
+ pkg *pkg
+ distance int // relative distance to target
+}
+
+// byDistanceOrImportPathShortLength sorts by relative distance breaking ties
+// on the short import path length and then the import string itself.
+type byDistanceOrImportPathShortLength []pkgDistance
+
+func (s byDistanceOrImportPathShortLength) Len() int { return len(s) }
+func (s byDistanceOrImportPathShortLength) Less(i, j int) bool {
+ di, dj := s[i].distance, s[j].distance
+ if di == -1 {
+ return false
+ }
+ if dj == -1 {
+ return true
+ }
+ if di != dj {
+ return di < dj
+ }
+
+ vi, vj := s[i].pkg.importPathShort, s[j].pkg.importPathShort
+ if len(vi) != len(vj) {
+ return len(vi) < len(vj)
+ }
+ return vi < vj
+}
+func (s byDistanceOrImportPathShortLength) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
+
+func distance(basepath, targetpath string) int {
+ p, err := filepath.Rel(basepath, targetpath)
+ if err != nil {
+ return -1
+ }
+ if p == "." {
+ return 0
+ }
+ return strings.Count(p, string(filepath.Separator)) + 1
+}
+
+// scanGoDirs populates the dirScan map for GOPATH and GOROOT.
+func scanGoDirs() []*pkg {
+ dupCheck := make(map[string]bool)
+ var result []*pkg
+
+ var mu sync.Mutex
+
+ add := func(root gopathwalk.Root, dir string) {
+ mu.Lock()
+ defer mu.Unlock()
+
+ if _, dup := dupCheck[dir]; dup {
+ return
+ }
+ dupCheck[dir] = true
+ importpath := filepath.ToSlash(dir[len(root.Path)+len("/"):])
+ result = append(result, &pkg{
+ importPathShort: VendorlessPath(importpath),
+ dir: dir,
+ })
+ }
+ gopathwalk.Walk(gopathwalk.SrcDirsRoots(), add, gopathwalk.Options{Debug: Debug, ModulesEnabled: false})
+ return result
+}
+
+// VendorlessPath returns the devendorized version of the import path ipath.
+// For example, VendorlessPath("foo/bar/vendor/a/b") returns "a/b".
+func VendorlessPath(ipath string) string {
+ // Devendorize for use in import statement.
+ if i := strings.LastIndex(ipath, "/vendor/"); i >= 0 {
+ return ipath[i+len("/vendor/"):]
+ }
+ if strings.HasPrefix(ipath, "vendor/") {
+ return ipath[len("vendor/"):]
+ }
+ return ipath
+}
+
+// loadExports returns the set of exported symbols in the package at dir.
+// It returns nil on error or if the package name in dir does not match expectPackage.
+func loadExports(ctx context.Context, expectPackage string, pkg *pkg) (map[string]bool, error) {
+ if Debug {
+ log.Printf("loading exports in dir %s (seeking package %s)", pkg.dir, expectPackage)
+ }
+ if pkg.goPackage != nil {
+ exports := map[string]bool{}
+ fset := token.NewFileSet()
+ for _, fname := range pkg.goPackage.CompiledGoFiles {
+ f, err := parser.ParseFile(fset, fname, nil, 0)
+ if err != nil {
+ return nil, fmt.Errorf("parsing %s: %v", fname, err)
+ }
+ for name := range f.Scope.Objects {
+ if ast.IsExported(name) {
+ exports[name] = true
+ }
+ }
+ }
+ return exports, nil
+ }
+
+ exports := make(map[string]bool)
+
+ // Look for non-test, buildable .go files which could provide exports.
+ all, err := ioutil.ReadDir(pkg.dir)
+ if err != nil {
+ return nil, err
+ }
+ var files []os.FileInfo
+ for _, fi := range all {
+ name := fi.Name()
+ if !strings.HasSuffix(name, ".go") || strings.HasSuffix(name, "_test.go") {
+ continue
+ }
+ match, err := build.Default.MatchFile(pkg.dir, fi.Name())
+ if err != nil || !match {
+ continue
+ }
+ files = append(files, fi)
+ }
+
+ if len(files) == 0 {
+ return nil, fmt.Errorf("dir %v contains no buildable, non-test .go files", pkg.dir)
+ }
+
+ fset := token.NewFileSet()
+ for _, fi := range files {
+ select {
+ case <-ctx.Done():
+ return nil, ctx.Err()
+ default:
+ }
+
+ fullFile := filepath.Join(pkg.dir, fi.Name())
+ f, err := parser.ParseFile(fset, fullFile, nil, 0)
+ if err != nil {
+ return nil, fmt.Errorf("parsing %s: %v", fullFile, err)
+ }
+ pkgName := f.Name.Name
+ if pkgName == "documentation" {
+ // Special case from go/build.ImportDir, not
+ // handled by MatchFile above.
+ continue
+ }
+ if pkgName != expectPackage {
+ return nil, fmt.Errorf("scan of dir %v is not expected package %v (actually %v)", pkg.dir, expectPackage, pkgName)
+ }
+ for name := range f.Scope.Objects {
+ if ast.IsExported(name) {
+ exports[name] = true
+ }
+ }
+ }
+
+ if Debug {
+ exportList := make([]string, 0, len(exports))
+ for k := range exports {
+ exportList = append(exportList, k)
+ }
+ sort.Strings(exportList)
+ log.Printf("loaded exports in dir %v (package %v): %v", pkg.dir, expectPackage, strings.Join(exportList, ", "))
+ }
+ return exports, nil
+}
+
+// findImport searches for a package with the given symbols.
+// If no package is found, findImport returns ("", false, nil)
+func findImport(ctx context.Context, dirScan []*pkg, pkgName string, symbols map[string]bool, filename string) (*pkg, error) {
+ pkgDir, err := filepath.Abs(filename)
+ if err != nil {
+ return nil, err
+ }
+ pkgDir = filepath.Dir(pkgDir)
+
+ // Find candidate packages, looking only at their directory names first.
+ var candidates []pkgDistance
+ for _, pkg := range dirScan {
+ if pkgIsCandidate(filename, pkgName, pkg) {
+ candidates = append(candidates, pkgDistance{
+ pkg: pkg,
+ distance: distance(pkgDir, pkg.dir),
+ })
+ }
+ }
+
+ // Sort the candidates by their import package length,
+ // assuming that shorter package names are better than long
+ // ones. Note that this sorts by the de-vendored name, so
+ // there's no "penalty" for vendoring.
+ sort.Sort(byDistanceOrImportPathShortLength(candidates))
+ if Debug {
+ for i, c := range candidates {
+ log.Printf("%s candidate %d/%d: %v in %v", pkgName, i+1, len(candidates), c.pkg.importPathShort, c.pkg.dir)
+ }
+ }
+
+ // Collect exports for packages with matching names.
+
+ rescv := make([]chan *pkg, len(candidates))
+ for i := range candidates {
+ rescv[i] = make(chan *pkg, 1)
+ }
+ const maxConcurrentPackageImport = 4
+ loadExportsSem := make(chan struct{}, maxConcurrentPackageImport)
+
+ ctx, cancel := context.WithCancel(ctx)
+ var wg sync.WaitGroup
+ defer func() {
+ cancel()
+ wg.Wait()
+ }()
+
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+ for i, c := range candidates {
+ select {
+ case loadExportsSem <- struct{}{}:
+ case <-ctx.Done():
+ return
+ }
+
+ wg.Add(1)
+ go func(c pkgDistance, resc chan<- *pkg) {
+ defer func() {
+ <-loadExportsSem
+ wg.Done()
+ }()
+
+ exports, err := loadExports(ctx, pkgName, c.pkg)
+ if err != nil {
+ if Debug {
+ log.Printf("loading exports in dir %s (seeking package %s): %v", c.pkg.dir, pkgName, err)
+ }
+ resc <- nil
+ return
+ }
+
+ // If it doesn't have the right
+ // symbols, send nil to mean no match.
+ for symbol := range symbols {
+ if !exports[symbol] {
+ resc <- nil
+ return
+ }
+ }
+ resc <- c.pkg
+ }(c, rescv[i])
+ }
+ }()
+
+ for _, resc := range rescv {
+ pkg := <-resc
+ if pkg == nil {
+ continue
+ }
+ return pkg, nil
+ }
+ return nil, nil
+}
+
+// pkgIsCandidate reports whether pkg is a candidate for satisfying the
+// finding which package pkgIdent in the file named by filename is trying
+// to refer to.
+//
+// This check is purely lexical and is meant to be as fast as possible
+// because it's run over all $GOPATH directories to filter out poor
+// candidates in order to limit the CPU and I/O later parsing the
+// exports in candidate packages.
+//
+// filename is the file being formatted.
+// pkgIdent is the package being searched for, like "client" (if
+// searching for "client.New")
+func pkgIsCandidate(filename, pkgIdent string, pkg *pkg) bool {
+ // Check "internal" and "vendor" visibility:
+ if !canUse(filename, pkg.dir) {
+ return false
+ }
+
+ // Speed optimization to minimize disk I/O:
+ // the last two components on disk must contain the
+ // package name somewhere.
+ //
+ // This permits mismatch naming like directory
+ // "go-foo" being package "foo", or "pkg.v3" being "pkg",
+ // or directory "google.golang.org/api/cloudbilling/v1"
+ // being package "cloudbilling", but doesn't
+ // permit a directory "foo" to be package
+ // "bar", which is strongly discouraged
+ // anyway. There's no reason goimports needs
+ // to be slow just to accommodate that.
+ lastTwo := lastTwoComponents(pkg.importPathShort)
+ if strings.Contains(lastTwo, pkgIdent) {
+ return true
+ }
+ if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(pkgIdent) {
+ lastTwo = lowerASCIIAndRemoveHyphen(lastTwo)
+ if strings.Contains(lastTwo, pkgIdent) {
+ return true
+ }
+ }
+
+ return false
+}
+
+func hasHyphenOrUpperASCII(s string) bool {
+ for i := 0; i < len(s); i++ {
+ b := s[i]
+ if b == '-' || ('A' <= b && b <= 'Z') {
+ return true
+ }
+ }
+ return false
+}
+
+func lowerASCIIAndRemoveHyphen(s string) (ret string) {
+ buf := make([]byte, 0, len(s))
+ for i := 0; i < len(s); i++ {
+ b := s[i]
+ switch {
+ case b == '-':
+ continue
+ case 'A' <= b && b <= 'Z':
+ buf = append(buf, b+('a'-'A'))
+ default:
+ buf = append(buf, b)
+ }
+ }
+ return string(buf)
+}
+
+// canUse reports whether the package in dir is usable from filename,
+// respecting the Go "internal" and "vendor" visibility rules.
+func canUse(filename, dir string) bool {
+ // Fast path check, before any allocations. If it doesn't contain vendor
+ // or internal, it's not tricky:
+ // Note that this can false-negative on directories like "notinternal",
+ // but we check it correctly below. This is just a fast path.
+ if !strings.Contains(dir, "vendor") && !strings.Contains(dir, "internal") {
+ return true
+ }
+
+ dirSlash := filepath.ToSlash(dir)
+ if !strings.Contains(dirSlash, "/vendor/") && !strings.Contains(dirSlash, "/internal/") && !strings.HasSuffix(dirSlash, "/internal") {
+ return true
+ }
+ // Vendor or internal directory only visible from children of parent.
+ // That means the path from the current directory to the target directory
+ // can contain ../vendor or ../internal but not ../foo/vendor or ../foo/internal
+ // or bar/vendor or bar/internal.
+ // After stripping all the leading ../, the only okay place to see vendor or internal
+ // is at the very beginning of the path.
+ absfile, err := filepath.Abs(filename)
+ if err != nil {
+ return false
+ }
+ absdir, err := filepath.Abs(dir)
+ if err != nil {
+ return false
+ }
+ rel, err := filepath.Rel(absfile, absdir)
+ if err != nil {
+ return false
+ }
+ relSlash := filepath.ToSlash(rel)
+ if i := strings.LastIndex(relSlash, "../"); i >= 0 {
+ relSlash = relSlash[i+len("../"):]
+ }
+ return !strings.Contains(relSlash, "/vendor/") && !strings.Contains(relSlash, "/internal/") && !strings.HasSuffix(relSlash, "/internal")
+}
+
+// lastTwoComponents returns at most the last two path components
+// of v, using either / or \ as the path separator.
+func lastTwoComponents(v string) string {
+ nslash := 0
+ for i := len(v) - 1; i >= 0; i-- {
+ if v[i] == '/' || v[i] == '\\' {
+ nslash++
+ if nslash == 2 {
+ return v[i:]
+ }
+ }
+ }
+ return v
+}
+
+type visitFn func(node ast.Node) ast.Visitor
+
+func (fn visitFn) Visit(node ast.Node) ast.Visitor {
+ return fn(node)
+}
diff --git a/vendor/golang.org/x/tools/imports/imports.go b/vendor/golang.org/x/tools/imports/imports.go
new file mode 100644
index 000000000..717a6f3aa
--- /dev/null
+++ b/vendor/golang.org/x/tools/imports/imports.go
@@ -0,0 +1,309 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:generate go run mkstdlib.go
+
+// Package imports implements a Go pretty-printer (like package "go/format")
+// that also adds or removes import statements as necessary.
+package imports // import "golang.org/x/tools/imports"
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/format"
+ "go/parser"
+ "go/printer"
+ "go/token"
+ "io"
+ "io/ioutil"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "golang.org/x/tools/go/ast/astutil"
+)
+
+// Options specifies options for processing files.
+type Options struct {
+ Fragment bool // Accept fragment of a source file (no package statement)
+ AllErrors bool // Report all errors (not just the first 10 on different lines)
+
+ Comments bool // Print comments (true if nil *Options provided)
+ TabIndent bool // Use tabs for indent (true if nil *Options provided)
+ TabWidth int // Tab width (8 if nil *Options provided)
+
+ FormatOnly bool // Disable the insertion and deletion of imports
+}
+
+// Process formats and adjusts imports for the provided file.
+// If opt is nil the defaults are used.
+//
+// Note that filename's directory influences which imports can be chosen,
+// so it is important that filename be accurate.
+// To process data ``as if'' it were in filename, pass the data as a non-nil src.
+func Process(filename string, src []byte, opt *Options) ([]byte, error) {
+ if opt == nil {
+ opt = &Options{Comments: true, TabIndent: true, TabWidth: 8}
+ }
+ if src == nil {
+ b, err := ioutil.ReadFile(filename)
+ if err != nil {
+ return nil, err
+ }
+ src = b
+ }
+
+ fileSet := token.NewFileSet()
+ file, adjust, err := parse(fileSet, filename, src, opt)
+ if err != nil {
+ return nil, err
+ }
+
+ if !opt.FormatOnly {
+ if err := fixImports(fileSet, file, filename); err != nil {
+ return nil, err
+ }
+ }
+
+ sortImports(fileSet, file)
+ imps := astutil.Imports(fileSet, file)
+ var spacesBefore []string // import paths we need spaces before
+ for _, impSection := range imps {
+ // Within each block of contiguous imports, see if any
+ // import lines are in different group numbers. If so,
+ // we'll need to put a space between them so it's
+ // compatible with gofmt.
+ lastGroup := -1
+ for _, importSpec := range impSection {
+ importPath, _ := strconv.Unquote(importSpec.Path.Value)
+ groupNum := importGroup(importPath)
+ if groupNum != lastGroup && lastGroup != -1 {
+ spacesBefore = append(spacesBefore, importPath)
+ }
+ lastGroup = groupNum
+ }
+
+ }
+
+ printerMode := printer.UseSpaces
+ if opt.TabIndent {
+ printerMode |= printer.TabIndent
+ }
+ printConfig := &printer.Config{Mode: printerMode, Tabwidth: opt.TabWidth}
+
+ var buf bytes.Buffer
+ err = printConfig.Fprint(&buf, fileSet, file)
+ if err != nil {
+ return nil, err
+ }
+ out := buf.Bytes()
+ if adjust != nil {
+ out = adjust(src, out)
+ }
+ if len(spacesBefore) > 0 {
+ out, err = addImportSpaces(bytes.NewReader(out), spacesBefore)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ out, err = format.Source(out)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+// parse parses src, which was read from filename,
+// as a Go source file or statement list.
+func parse(fset *token.FileSet, filename string, src []byte, opt *Options) (*ast.File, func(orig, src []byte) []byte, error) {
+ parserMode := parser.Mode(0)
+ if opt.Comments {
+ parserMode |= parser.ParseComments
+ }
+ if opt.AllErrors {
+ parserMode |= parser.AllErrors
+ }
+
+ // Try as whole source file.
+ file, err := parser.ParseFile(fset, filename, src, parserMode)
+ if err == nil {
+ return file, nil, nil
+ }
+ // If the error is that the source file didn't begin with a
+ // package line and we accept fragmented input, fall through to
+ // try as a source fragment. Stop and return on any other error.
+ if !opt.Fragment || !strings.Contains(err.Error(), "expected 'package'") {
+ return nil, nil, err
+ }
+
+ // If this is a declaration list, make it a source file
+ // by inserting a package clause.
+ // Insert using a ;, not a newline, so that parse errors are on
+ // the correct line.
+ const prefix = "package main;"
+ psrc := append([]byte(prefix), src...)
+ file, err = parser.ParseFile(fset, filename, psrc, parserMode)
+ if err == nil {
+ // Gofmt will turn the ; into a \n.
+ // Do that ourselves now and update the file contents,
+ // so that positions and line numbers are correct going forward.
+ psrc[len(prefix)-1] = '\n'
+ fset.File(file.Package).SetLinesForContent(psrc)
+
+ // If a main function exists, we will assume this is a main
+ // package and leave the file.
+ if containsMainFunc(file) {
+ return file, nil, nil
+ }
+
+ adjust := func(orig, src []byte) []byte {
+ // Remove the package clause.
+ src = src[len(prefix):]
+ return matchSpace(orig, src)
+ }
+ return file, adjust, nil
+ }
+ // If the error is that the source file didn't begin with a
+ // declaration, fall through to try as a statement list.
+ // Stop and return on any other error.
+ if !strings.Contains(err.Error(), "expected declaration") {
+ return nil, nil, err
+ }
+
+ // If this is a statement list, make it a source file
+ // by inserting a package clause and turning the list
+ // into a function body. This handles expressions too.
+ // Insert using a ;, not a newline, so that the line numbers
+ // in fsrc match the ones in src.
+ fsrc := append(append([]byte("package p; func _() {"), src...), '}')
+ file, err = parser.ParseFile(fset, filename, fsrc, parserMode)
+ if err == nil {
+ adjust := func(orig, src []byte) []byte {
+ // Remove the wrapping.
+ // Gofmt has turned the ; into a \n\n.
+ src = src[len("package p\n\nfunc _() {"):]
+ src = src[:len(src)-len("}\n")]
+ // Gofmt has also indented the function body one level.
+ // Remove that indent.
+ src = bytes.Replace(src, []byte("\n\t"), []byte("\n"), -1)
+ return matchSpace(orig, src)
+ }
+ return file, adjust, nil
+ }
+
+ // Failed, and out of options.
+ return nil, nil, err
+}
+
+// containsMainFunc checks if a file contains a function declaration with the
+// function signature 'func main()'
+func containsMainFunc(file *ast.File) bool {
+ for _, decl := range file.Decls {
+ if f, ok := decl.(*ast.FuncDecl); ok {
+ if f.Name.Name != "main" {
+ continue
+ }
+
+ if len(f.Type.Params.List) != 0 {
+ continue
+ }
+
+ if f.Type.Results != nil && len(f.Type.Results.List) != 0 {
+ continue
+ }
+
+ return true
+ }
+ }
+
+ return false
+}
+
+func cutSpace(b []byte) (before, middle, after []byte) {
+ i := 0
+ for i < len(b) && (b[i] == ' ' || b[i] == '\t' || b[i] == '\n') {
+ i++
+ }
+ j := len(b)
+ for j > 0 && (b[j-1] == ' ' || b[j-1] == '\t' || b[j-1] == '\n') {
+ j--
+ }
+ if i <= j {
+ return b[:i], b[i:j], b[j:]
+ }
+ return nil, nil, b[j:]
+}
+
+// matchSpace reformats src to use the same space context as orig.
+// 1) If orig begins with blank lines, matchSpace inserts them at the beginning of src.
+// 2) matchSpace copies the indentation of the first non-blank line in orig
+// to every non-blank line in src.
+// 3) matchSpace copies the trailing space from orig and uses it in place
+// of src's trailing space.
+func matchSpace(orig []byte, src []byte) []byte {
+ before, _, after := cutSpace(orig)
+ i := bytes.LastIndex(before, []byte{'\n'})
+ before, indent := before[:i+1], before[i+1:]
+
+ _, src, _ = cutSpace(src)
+
+ var b bytes.Buffer
+ b.Write(before)
+ for len(src) > 0 {
+ line := src
+ if i := bytes.IndexByte(line, '\n'); i >= 0 {
+ line, src = line[:i+1], line[i+1:]
+ } else {
+ src = nil
+ }
+ if len(line) > 0 && line[0] != '\n' { // not blank
+ b.Write(indent)
+ }
+ b.Write(line)
+ }
+ b.Write(after)
+ return b.Bytes()
+}
+
+var impLine = regexp.MustCompile(`^\s+(?:[\w\.]+\s+)?"(.+)"`)
+
+func addImportSpaces(r io.Reader, breaks []string) ([]byte, error) {
+ var out bytes.Buffer
+ in := bufio.NewReader(r)
+ inImports := false
+ done := false
+ for {
+ s, err := in.ReadString('\n')
+ if err == io.EOF {
+ break
+ } else if err != nil {
+ return nil, err
+ }
+
+ if !inImports && !done && strings.HasPrefix(s, "import") {
+ inImports = true
+ }
+ if inImports && (strings.HasPrefix(s, "var") ||
+ strings.HasPrefix(s, "func") ||
+ strings.HasPrefix(s, "const") ||
+ strings.HasPrefix(s, "type")) {
+ done = true
+ inImports = false
+ }
+ if inImports && len(breaks) > 0 {
+ if m := impLine.FindStringSubmatch(s); m != nil {
+ if m[1] == breaks[0] {
+ out.WriteByte('\n')
+ breaks = breaks[1:]
+ }
+ }
+ }
+
+ fmt.Fprint(&out, s)
+ }
+ return out.Bytes(), nil
+}
diff --git a/vendor/golang.org/x/tools/imports/mkindex.go b/vendor/golang.org/x/tools/imports/mkindex.go
new file mode 100644
index 000000000..755e2394f
--- /dev/null
+++ b/vendor/golang.org/x/tools/imports/mkindex.go
@@ -0,0 +1,173 @@
+// +build ignore
+
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Command mkindex creates the file "pkgindex.go" containing an index of the Go
+// standard library. The file is intended to be built as part of the imports
+// package, so that the package may be used in environments where a GOROOT is
+// not available (such as App Engine).
+package main
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/format"
+ "go/parser"
+ "go/token"
+ "io/ioutil"
+ "log"
+ "os"
+ "path"
+ "path/filepath"
+ "strings"
+)
+
+var (
+ pkgIndex = make(map[string][]pkg)
+ exports = make(map[string]map[string]bool)
+)
+
+func main() {
+ // Don't use GOPATH.
+ ctx := build.Default
+ ctx.GOPATH = ""
+
+ // Populate pkgIndex global from GOROOT.
+ for _, path := range ctx.SrcDirs() {
+ f, err := os.Open(path)
+ if err != nil {
+ log.Print(err)
+ continue
+ }
+ children, err := f.Readdir(-1)
+ f.Close()
+ if err != nil {
+ log.Print(err)
+ continue
+ }
+ for _, child := range children {
+ if child.IsDir() {
+ loadPkg(path, child.Name())
+ }
+ }
+ }
+ // Populate exports global.
+ for _, ps := range pkgIndex {
+ for _, p := range ps {
+ e := loadExports(p.dir)
+ if e != nil {
+ exports[p.dir] = e
+ }
+ }
+ }
+
+ // Construct source file.
+ var buf bytes.Buffer
+ fmt.Fprint(&buf, pkgIndexHead)
+ fmt.Fprintf(&buf, "var pkgIndexMaster = %#v\n", pkgIndex)
+ fmt.Fprintf(&buf, "var exportsMaster = %#v\n", exports)
+ src := buf.Bytes()
+
+ // Replace main.pkg type name with pkg.
+ src = bytes.Replace(src, []byte("main.pkg"), []byte("pkg"), -1)
+ // Replace actual GOROOT with "/go".
+ src = bytes.Replace(src, []byte(ctx.GOROOT), []byte("/go"), -1)
+ // Add some line wrapping.
+ src = bytes.Replace(src, []byte("}, "), []byte("},\n"), -1)
+ src = bytes.Replace(src, []byte("true, "), []byte("true,\n"), -1)
+
+ var err error
+ src, err = format.Source(src)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ // Write out source file.
+ err = ioutil.WriteFile("pkgindex.go", src, 0644)
+ if err != nil {
+ log.Fatal(err)
+ }
+}
+
+const pkgIndexHead = `package imports
+
+func init() {
+ pkgIndexOnce.Do(func() {
+ pkgIndex.m = pkgIndexMaster
+ })
+ loadExports = func(dir string) map[string]bool {
+ return exportsMaster[dir]
+ }
+}
+`
+
+type pkg struct {
+ importpath string // full pkg import path, e.g. "net/http"
+ dir string // absolute file path to pkg directory e.g. "/usr/lib/go/src/fmt"
+}
+
+var fset = token.NewFileSet()
+
+func loadPkg(root, importpath string) {
+ shortName := path.Base(importpath)
+ if shortName == "testdata" {
+ return
+ }
+
+ dir := filepath.Join(root, importpath)
+ pkgIndex[shortName] = append(pkgIndex[shortName], pkg{
+ importpath: importpath,
+ dir: dir,
+ })
+
+ pkgDir, err := os.Open(dir)
+ if err != nil {
+ return
+ }
+ children, err := pkgDir.Readdir(-1)
+ pkgDir.Close()
+ if err != nil {
+ return
+ }
+ for _, child := range children {
+ name := child.Name()
+ if name == "" {
+ continue
+ }
+ if c := name[0]; c == '.' || ('0' <= c && c <= '9') {
+ continue
+ }
+ if child.IsDir() {
+ loadPkg(root, filepath.Join(importpath, name))
+ }
+ }
+}
+
+func loadExports(dir string) map[string]bool {
+ exports := make(map[string]bool)
+ buildPkg, err := build.ImportDir(dir, 0)
+ if err != nil {
+ if strings.Contains(err.Error(), "no buildable Go source files in") {
+ return nil
+ }
+ log.Printf("could not import %q: %v", dir, err)
+ return nil
+ }
+ for _, file := range buildPkg.GoFiles {
+ f, err := parser.ParseFile(fset, filepath.Join(dir, file), nil, 0)
+ if err != nil {
+ log.Printf("could not parse %q: %v", file, err)
+ continue
+ }
+ for name := range f.Scope.Objects {
+ if ast.IsExported(name) {
+ exports[name] = true
+ }
+ }
+ }
+ return exports
+}
diff --git a/vendor/golang.org/x/tools/imports/mkstdlib.go b/vendor/golang.org/x/tools/imports/mkstdlib.go
new file mode 100644
index 000000000..5e53378fc
--- /dev/null
+++ b/vendor/golang.org/x/tools/imports/mkstdlib.go
@@ -0,0 +1,111 @@
+// +build ignore
+
+// mkstdlib generates the zstdlib.go file, containing the Go standard
+// library API symbols. It's baked into the binary to avoid scanning
+// GOPATH in the common case.
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "go/format"
+ "io"
+ "io/ioutil"
+ "log"
+ "os"
+ "path/filepath"
+ "regexp"
+ "runtime"
+ "sort"
+ "strings"
+)
+
+func mustOpen(name string) io.Reader {
+ f, err := os.Open(name)
+ if err != nil {
+ log.Fatal(err)
+ }
+ return f
+}
+
+func api(base string) string {
+ return filepath.Join(runtime.GOROOT(), "api", base)
+}
+
+var sym = regexp.MustCompile(`^pkg (\S+).*?, (?:var|func|type|const) ([A-Z]\w*)`)
+
+var unsafeSyms = map[string]bool{"Alignof": true, "ArbitraryType": true, "Offsetof": true, "Pointer": true, "Sizeof": true}
+
+func main() {
+ var buf bytes.Buffer
+ outf := func(format string, args ...interface{}) {
+ fmt.Fprintf(&buf, format, args...)
+ }
+ outf("// Code generated by mkstdlib.go. DO NOT EDIT.\n\n")
+ outf("package imports\n")
+ outf("var stdlib = map[string]map[string]bool{\n")
+ f := io.MultiReader(
+ mustOpen(api("go1.txt")),
+ mustOpen(api("go1.1.txt")),
+ mustOpen(api("go1.2.txt")),
+ mustOpen(api("go1.3.txt")),
+ mustOpen(api("go1.4.txt")),
+ mustOpen(api("go1.5.txt")),
+ mustOpen(api("go1.6.txt")),
+ mustOpen(api("go1.7.txt")),
+ mustOpen(api("go1.8.txt")),
+ mustOpen(api("go1.9.txt")),
+ mustOpen(api("go1.10.txt")),
+ mustOpen(api("go1.11.txt")),
+ )
+ sc := bufio.NewScanner(f)
+
+ pkgs := map[string]map[string]bool{
+ "unsafe": unsafeSyms,
+ }
+ paths := []string{"unsafe"}
+
+ for sc.Scan() {
+ l := sc.Text()
+ has := func(v string) bool { return strings.Contains(l, v) }
+ if has("struct, ") || has("interface, ") || has(", method (") {
+ continue
+ }
+ if m := sym.FindStringSubmatch(l); m != nil {
+ path, sym := m[1], m[2]
+
+ if _, ok := pkgs[path]; !ok {
+ pkgs[path] = map[string]bool{}
+ paths = append(paths, path)
+ }
+ pkgs[path][sym] = true
+ }
+ }
+ if err := sc.Err(); err != nil {
+ log.Fatal(err)
+ }
+ sort.Strings(paths)
+ for _, path := range paths {
+ outf("\t%q: map[string]bool{\n", path)
+ pkg := pkgs[path]
+ var syms []string
+ for sym := range pkg {
+ syms = append(syms, sym)
+ }
+ sort.Strings(syms)
+ for _, sym := range syms {
+ outf("\t\t%q: true,\n", sym)
+ }
+ outf("},\n")
+ }
+ outf("}\n")
+ fmtbuf, err := format.Source(buf.Bytes())
+ if err != nil {
+ log.Fatal(err)
+ }
+ err = ioutil.WriteFile("zstdlib.go", fmtbuf, 0666)
+ if err != nil {
+ log.Fatal(err)
+ }
+}
diff --git a/vendor/golang.org/x/tools/imports/sortimports.go b/vendor/golang.org/x/tools/imports/sortimports.go
new file mode 100644
index 000000000..f3dd56c7a
--- /dev/null
+++ b/vendor/golang.org/x/tools/imports/sortimports.go
@@ -0,0 +1,230 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Hacked up copy of go/ast/import.go
+
+package imports
+
+import (
+ "go/ast"
+ "go/token"
+ "sort"
+ "strconv"
+)
+
+// sortImports sorts runs of consecutive import lines in import blocks in f.
+// It also removes duplicate imports when it is possible to do so without data loss.
+func sortImports(fset *token.FileSet, f *ast.File) {
+ for i, d := range f.Decls {
+ d, ok := d.(*ast.GenDecl)
+ if !ok || d.Tok != token.IMPORT {
+ // Not an import declaration, so we're done.
+ // Imports are always first.
+ break
+ }
+
+ if len(d.Specs) == 0 {
+ // Empty import block, remove it.
+ f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
+ }
+
+ if !d.Lparen.IsValid() {
+ // Not a block: sorted by default.
+ continue
+ }
+
+ // Identify and sort runs of specs on successive lines.
+ i := 0
+ specs := d.Specs[:0]
+ for j, s := range d.Specs {
+ if j > i && fset.Position(s.Pos()).Line > 1+fset.Position(d.Specs[j-1].End()).Line {
+ // j begins a new run. End this one.
+ specs = append(specs, sortSpecs(fset, f, d.Specs[i:j])...)
+ i = j
+ }
+ }
+ specs = append(specs, sortSpecs(fset, f, d.Specs[i:])...)
+ d.Specs = specs
+
+ // Deduping can leave a blank line before the rparen; clean that up.
+ if len(d.Specs) > 0 {
+ lastSpec := d.Specs[len(d.Specs)-1]
+ lastLine := fset.Position(lastSpec.Pos()).Line
+ if rParenLine := fset.Position(d.Rparen).Line; rParenLine > lastLine+1 {
+ fset.File(d.Rparen).MergeLine(rParenLine - 1)
+ }
+ }
+ }
+}
+
+func importPath(s ast.Spec) string {
+ t, err := strconv.Unquote(s.(*ast.ImportSpec).Path.Value)
+ if err == nil {
+ return t
+ }
+ return ""
+}
+
+func importName(s ast.Spec) string {
+ n := s.(*ast.ImportSpec).Name
+ if n == nil {
+ return ""
+ }
+ return n.Name
+}
+
+func importComment(s ast.Spec) string {
+ c := s.(*ast.ImportSpec).Comment
+ if c == nil {
+ return ""
+ }
+ return c.Text()
+}
+
+// collapse indicates whether prev may be removed, leaving only next.
+func collapse(prev, next ast.Spec) bool {
+ if importPath(next) != importPath(prev) || importName(next) != importName(prev) {
+ return false
+ }
+ return prev.(*ast.ImportSpec).Comment == nil
+}
+
+type posSpan struct {
+ Start token.Pos
+ End token.Pos
+}
+
+func sortSpecs(fset *token.FileSet, f *ast.File, specs []ast.Spec) []ast.Spec {
+ // Can't short-circuit here even if specs are already sorted,
+ // since they might yet need deduplication.
+ // A lone import, however, may be safely ignored.
+ if len(specs) <= 1 {
+ return specs
+ }
+
+ // Record positions for specs.
+ pos := make([]posSpan, len(specs))
+ for i, s := range specs {
+ pos[i] = posSpan{s.Pos(), s.End()}
+ }
+
+ // Identify comments in this range.
+ // Any comment from pos[0].Start to the final line counts.
+ lastLine := fset.Position(pos[len(pos)-1].End).Line
+ cstart := len(f.Comments)
+ cend := len(f.Comments)
+ for i, g := range f.Comments {
+ if g.Pos() < pos[0].Start {
+ continue
+ }
+ if i < cstart {
+ cstart = i
+ }
+ if fset.Position(g.End()).Line > lastLine {
+ cend = i
+ break
+ }
+ }
+ comments := f.Comments[cstart:cend]
+
+ // Assign each comment to the import spec preceding it.
+ importComment := map[*ast.ImportSpec][]*ast.CommentGroup{}
+ specIndex := 0
+ for _, g := range comments {
+ for specIndex+1 < len(specs) && pos[specIndex+1].Start <= g.Pos() {
+ specIndex++
+ }
+ s := specs[specIndex].(*ast.ImportSpec)
+ importComment[s] = append(importComment[s], g)
+ }
+
+ // Sort the import specs by import path.
+ // Remove duplicates, when possible without data loss.
+ // Reassign the import paths to have the same position sequence.
+ // Reassign each comment to abut the end of its spec.
+ // Sort the comments by new position.
+ sort.Sort(byImportSpec(specs))
+
+ // Dedup. Thanks to our sorting, we can just consider
+ // adjacent pairs of imports.
+ deduped := specs[:0]
+ for i, s := range specs {
+ if i == len(specs)-1 || !collapse(s, specs[i+1]) {
+ deduped = append(deduped, s)
+ } else {
+ p := s.Pos()
+ fset.File(p).MergeLine(fset.Position(p).Line)
+ }
+ }
+ specs = deduped
+
+ // Fix up comment positions
+ for i, s := range specs {
+ s := s.(*ast.ImportSpec)
+ if s.Name != nil {
+ s.Name.NamePos = pos[i].Start
+ }
+ s.Path.ValuePos = pos[i].Start
+ s.EndPos = pos[i].End
+ nextSpecPos := pos[i].End
+
+ for _, g := range importComment[s] {
+ for _, c := range g.List {
+ c.Slash = pos[i].End
+ nextSpecPos = c.End()
+ }
+ }
+ if i < len(specs)-1 {
+ pos[i+1].Start = nextSpecPos
+ pos[i+1].End = nextSpecPos
+ }
+ }
+
+ sort.Sort(byCommentPos(comments))
+
+ // Fixup comments can insert blank lines, because import specs are on different lines.
+ // We remove those blank lines here by merging import spec to the first import spec line.
+ firstSpecLine := fset.Position(specs[0].Pos()).Line
+ for _, s := range specs[1:] {
+ p := s.Pos()
+ line := fset.File(p).Line(p)
+ for previousLine := line - 1; previousLine >= firstSpecLine; {
+ fset.File(p).MergeLine(previousLine)
+ previousLine--
+ }
+ }
+ return specs
+}
+
+type byImportSpec []ast.Spec // slice of *ast.ImportSpec
+
+func (x byImportSpec) Len() int { return len(x) }
+func (x byImportSpec) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
+func (x byImportSpec) Less(i, j int) bool {
+ ipath := importPath(x[i])
+ jpath := importPath(x[j])
+
+ igroup := importGroup(ipath)
+ jgroup := importGroup(jpath)
+ if igroup != jgroup {
+ return igroup < jgroup
+ }
+
+ if ipath != jpath {
+ return ipath < jpath
+ }
+ iname := importName(x[i])
+ jname := importName(x[j])
+
+ if iname != jname {
+ return iname < jname
+ }
+ return importComment(x[i]) < importComment(x[j])
+}
+
+type byCommentPos []*ast.CommentGroup
+
+func (x byCommentPos) Len() int { return len(x) }
+func (x byCommentPos) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
+func (x byCommentPos) Less(i, j int) bool { return x[i].Pos() < x[j].Pos() }
diff --git a/vendor/golang.org/x/tools/imports/zstdlib.go b/vendor/golang.org/x/tools/imports/zstdlib.go
new file mode 100644
index 000000000..ca4f0b2f2
--- /dev/null
+++ b/vendor/golang.org/x/tools/imports/zstdlib.go
@@ -0,0 +1,10270 @@
+// Code generated by mkstdlib.go. DO NOT EDIT.
+
+package imports
+
+var stdlib = map[string]map[string]bool{
+ "archive/tar": map[string]bool{
+ "ErrFieldTooLong": true,
+ "ErrHeader": true,
+ "ErrWriteAfterClose": true,
+ "ErrWriteTooLong": true,
+ "FileInfoHeader": true,
+ "Format": true,
+ "FormatGNU": true,
+ "FormatPAX": true,
+ "FormatUSTAR": true,
+ "FormatUnknown": true,
+ "Header": true,
+ "NewReader": true,
+ "NewWriter": true,
+ "Reader": true,
+ "TypeBlock": true,
+ "TypeChar": true,
+ "TypeCont": true,
+ "TypeDir": true,
+ "TypeFifo": true,
+ "TypeGNULongLink": true,
+ "TypeGNULongName": true,
+ "TypeGNUSparse": true,
+ "TypeLink": true,
+ "TypeReg": true,
+ "TypeRegA": true,
+ "TypeSymlink": true,
+ "TypeXGlobalHeader": true,
+ "TypeXHeader": true,
+ "Writer": true,
+ },
+ "archive/zip": map[string]bool{
+ "Compressor": true,
+ "Decompressor": true,
+ "Deflate": true,
+ "ErrAlgorithm": true,
+ "ErrChecksum": true,
+ "ErrFormat": true,
+ "File": true,
+ "FileHeader": true,
+ "FileInfoHeader": true,
+ "NewReader": true,
+ "NewWriter": true,
+ "OpenReader": true,
+ "ReadCloser": true,
+ "Reader": true,
+ "RegisterCompressor": true,
+ "RegisterDecompressor": true,
+ "Store": true,
+ "Writer": true,
+ },
+ "bufio": map[string]bool{
+ "ErrAdvanceTooFar": true,
+ "ErrBufferFull": true,
+ "ErrFinalToken": true,
+ "ErrInvalidUnreadByte": true,
+ "ErrInvalidUnreadRune": true,
+ "ErrNegativeAdvance": true,
+ "ErrNegativeCount": true,
+ "ErrTooLong": true,
+ "MaxScanTokenSize": true,
+ "NewReadWriter": true,
+ "NewReader": true,
+ "NewReaderSize": true,
+ "NewScanner": true,
+ "NewWriter": true,
+ "NewWriterSize": true,
+ "ReadWriter": true,
+ "Reader": true,
+ "ScanBytes": true,
+ "ScanLines": true,
+ "ScanRunes": true,
+ "ScanWords": true,
+ "Scanner": true,
+ "SplitFunc": true,
+ "Writer": true,
+ },
+ "bytes": map[string]bool{
+ "Buffer": true,
+ "Compare": true,
+ "Contains": true,
+ "ContainsAny": true,
+ "ContainsRune": true,
+ "Count": true,
+ "Equal": true,
+ "EqualFold": true,
+ "ErrTooLarge": true,
+ "Fields": true,
+ "FieldsFunc": true,
+ "HasPrefix": true,
+ "HasSuffix": true,
+ "Index": true,
+ "IndexAny": true,
+ "IndexByte": true,
+ "IndexFunc": true,
+ "IndexRune": true,
+ "Join": true,
+ "LastIndex": true,
+ "LastIndexAny": true,
+ "LastIndexByte": true,
+ "LastIndexFunc": true,
+ "Map": true,
+ "MinRead": true,
+ "NewBuffer": true,
+ "NewBufferString": true,
+ "NewReader": true,
+ "Reader": true,
+ "Repeat": true,
+ "Replace": true,
+ "Runes": true,
+ "Split": true,
+ "SplitAfter": true,
+ "SplitAfterN": true,
+ "SplitN": true,
+ "Title": true,
+ "ToLower": true,
+ "ToLowerSpecial": true,
+ "ToTitle": true,
+ "ToTitleSpecial": true,
+ "ToUpper": true,
+ "ToUpperSpecial": true,
+ "Trim": true,
+ "TrimFunc": true,
+ "TrimLeft": true,
+ "TrimLeftFunc": true,
+ "TrimPrefix": true,
+ "TrimRight": true,
+ "TrimRightFunc": true,
+ "TrimSpace": true,
+ "TrimSuffix": true,
+ },
+ "compress/bzip2": map[string]bool{
+ "NewReader": true,
+ "StructuralError": true,
+ },
+ "compress/flate": map[string]bool{
+ "BestCompression": true,
+ "BestSpeed": true,
+ "CorruptInputError": true,
+ "DefaultCompression": true,
+ "HuffmanOnly": true,
+ "InternalError": true,
+ "NewReader": true,
+ "NewReaderDict": true,
+ "NewWriter": true,
+ "NewWriterDict": true,
+ "NoCompression": true,
+ "ReadError": true,
+ "Reader": true,
+ "Resetter": true,
+ "WriteError": true,
+ "Writer": true,
+ },
+ "compress/gzip": map[string]bool{
+ "BestCompression": true,
+ "BestSpeed": true,
+ "DefaultCompression": true,
+ "ErrChecksum": true,
+ "ErrHeader": true,
+ "Header": true,
+ "HuffmanOnly": true,
+ "NewReader": true,
+ "NewWriter": true,
+ "NewWriterLevel": true,
+ "NoCompression": true,
+ "Reader": true,
+ "Writer": true,
+ },
+ "compress/lzw": map[string]bool{
+ "LSB": true,
+ "MSB": true,
+ "NewReader": true,
+ "NewWriter": true,
+ "Order": true,
+ },
+ "compress/zlib": map[string]bool{
+ "BestCompression": true,
+ "BestSpeed": true,
+ "DefaultCompression": true,
+ "ErrChecksum": true,
+ "ErrDictionary": true,
+ "ErrHeader": true,
+ "HuffmanOnly": true,
+ "NewReader": true,
+ "NewReaderDict": true,
+ "NewWriter": true,
+ "NewWriterLevel": true,
+ "NewWriterLevelDict": true,
+ "NoCompression": true,
+ "Resetter": true,
+ "Writer": true,
+ },
+ "container/heap": map[string]bool{
+ "Fix": true,
+ "Init": true,
+ "Interface": true,
+ "Pop": true,
+ "Push": true,
+ "Remove": true,
+ },
+ "container/list": map[string]bool{
+ "Element": true,
+ "List": true,
+ "New": true,
+ },
+ "container/ring": map[string]bool{
+ "New": true,
+ "Ring": true,
+ },
+ "context": map[string]bool{
+ "Background": true,
+ "CancelFunc": true,
+ "Canceled": true,
+ "Context": true,
+ "DeadlineExceeded": true,
+ "TODO": true,
+ "WithCancel": true,
+ "WithDeadline": true,
+ "WithTimeout": true,
+ "WithValue": true,
+ },
+ "crypto": map[string]bool{
+ "BLAKE2b_256": true,
+ "BLAKE2b_384": true,
+ "BLAKE2b_512": true,
+ "BLAKE2s_256": true,
+ "Decrypter": true,
+ "DecrypterOpts": true,
+ "Hash": true,
+ "MD4": true,
+ "MD5": true,
+ "MD5SHA1": true,
+ "PrivateKey": true,
+ "PublicKey": true,
+ "RIPEMD160": true,
+ "RegisterHash": true,
+ "SHA1": true,
+ "SHA224": true,
+ "SHA256": true,
+ "SHA384": true,
+ "SHA3_224": true,
+ "SHA3_256": true,
+ "SHA3_384": true,
+ "SHA3_512": true,
+ "SHA512": true,
+ "SHA512_224": true,
+ "SHA512_256": true,
+ "Signer": true,
+ "SignerOpts": true,
+ },
+ "crypto/aes": map[string]bool{
+ "BlockSize": true,
+ "KeySizeError": true,
+ "NewCipher": true,
+ },
+ "crypto/cipher": map[string]bool{
+ "AEAD": true,
+ "Block": true,
+ "BlockMode": true,
+ "NewCBCDecrypter": true,
+ "NewCBCEncrypter": true,
+ "NewCFBDecrypter": true,
+ "NewCFBEncrypter": true,
+ "NewCTR": true,
+ "NewGCM": true,
+ "NewGCMWithNonceSize": true,
+ "NewGCMWithTagSize": true,
+ "NewOFB": true,
+ "Stream": true,
+ "StreamReader": true,
+ "StreamWriter": true,
+ },
+ "crypto/des": map[string]bool{
+ "BlockSize": true,
+ "KeySizeError": true,
+ "NewCipher": true,
+ "NewTripleDESCipher": true,
+ },
+ "crypto/dsa": map[string]bool{
+ "ErrInvalidPublicKey": true,
+ "GenerateKey": true,
+ "GenerateParameters": true,
+ "L1024N160": true,
+ "L2048N224": true,
+ "L2048N256": true,
+ "L3072N256": true,
+ "ParameterSizes": true,
+ "Parameters": true,
+ "PrivateKey": true,
+ "PublicKey": true,
+ "Sign": true,
+ "Verify": true,
+ },
+ "crypto/ecdsa": map[string]bool{
+ "GenerateKey": true,
+ "PrivateKey": true,
+ "PublicKey": true,
+ "Sign": true,
+ "Verify": true,
+ },
+ "crypto/elliptic": map[string]bool{
+ "Curve": true,
+ "CurveParams": true,
+ "GenerateKey": true,
+ "Marshal": true,
+ "P224": true,
+ "P256": true,
+ "P384": true,
+ "P521": true,
+ "Unmarshal": true,
+ },
+ "crypto/hmac": map[string]bool{
+ "Equal": true,
+ "New": true,
+ },
+ "crypto/md5": map[string]bool{
+ "BlockSize": true,
+ "New": true,
+ "Size": true,
+ "Sum": true,
+ },
+ "crypto/rand": map[string]bool{
+ "Int": true,
+ "Prime": true,
+ "Read": true,
+ "Reader": true,
+ },
+ "crypto/rc4": map[string]bool{
+ "Cipher": true,
+ "KeySizeError": true,
+ "NewCipher": true,
+ },
+ "crypto/rsa": map[string]bool{
+ "CRTValue": true,
+ "DecryptOAEP": true,
+ "DecryptPKCS1v15": true,
+ "DecryptPKCS1v15SessionKey": true,
+ "EncryptOAEP": true,
+ "EncryptPKCS1v15": true,
+ "ErrDecryption": true,
+ "ErrMessageTooLong": true,
+ "ErrVerification": true,
+ "GenerateKey": true,
+ "GenerateMultiPrimeKey": true,
+ "OAEPOptions": true,
+ "PKCS1v15DecryptOptions": true,
+ "PSSOptions": true,
+ "PSSSaltLengthAuto": true,
+ "PSSSaltLengthEqualsHash": true,
+ "PrecomputedValues": true,
+ "PrivateKey": true,
+ "PublicKey": true,
+ "SignPKCS1v15": true,
+ "SignPSS": true,
+ "VerifyPKCS1v15": true,
+ "VerifyPSS": true,
+ },
+ "crypto/sha1": map[string]bool{
+ "BlockSize": true,
+ "New": true,
+ "Size": true,
+ "Sum": true,
+ },
+ "crypto/sha256": map[string]bool{
+ "BlockSize": true,
+ "New": true,
+ "New224": true,
+ "Size": true,
+ "Size224": true,
+ "Sum224": true,
+ "Sum256": true,
+ },
+ "crypto/sha512": map[string]bool{
+ "BlockSize": true,
+ "New": true,
+ "New384": true,
+ "New512_224": true,
+ "New512_256": true,
+ "Size": true,
+ "Size224": true,
+ "Size256": true,
+ "Size384": true,
+ "Sum384": true,
+ "Sum512": true,
+ "Sum512_224": true,
+ "Sum512_256": true,
+ },
+ "crypto/subtle": map[string]bool{
+ "ConstantTimeByteEq": true,
+ "ConstantTimeCompare": true,
+ "ConstantTimeCopy": true,
+ "ConstantTimeEq": true,
+ "ConstantTimeLessOrEq": true,
+ "ConstantTimeSelect": true,
+ },
+ "crypto/tls": map[string]bool{
+ "Certificate": true,
+ "CertificateRequestInfo": true,
+ "Client": true,
+ "ClientAuthType": true,
+ "ClientHelloInfo": true,
+ "ClientSessionCache": true,
+ "ClientSessionState": true,
+ "Config": true,
+ "Conn": true,
+ "ConnectionState": true,
+ "CurveID": true,
+ "CurveP256": true,
+ "CurveP384": true,
+ "CurveP521": true,
+ "Dial": true,
+ "DialWithDialer": true,
+ "ECDSAWithP256AndSHA256": true,
+ "ECDSAWithP384AndSHA384": true,
+ "ECDSAWithP521AndSHA512": true,
+ "ECDSAWithSHA1": true,
+ "Listen": true,
+ "LoadX509KeyPair": true,
+ "NewLRUClientSessionCache": true,
+ "NewListener": true,
+ "NoClientCert": true,
+ "PKCS1WithSHA1": true,
+ "PKCS1WithSHA256": true,
+ "PKCS1WithSHA384": true,
+ "PKCS1WithSHA512": true,
+ "PSSWithSHA256": true,
+ "PSSWithSHA384": true,
+ "PSSWithSHA512": true,
+ "RecordHeaderError": true,
+ "RenegotiateFreelyAsClient": true,
+ "RenegotiateNever": true,
+ "RenegotiateOnceAsClient": true,
+ "RenegotiationSupport": true,
+ "RequestClientCert": true,
+ "RequireAndVerifyClientCert": true,
+ "RequireAnyClientCert": true,
+ "Server": true,
+ "SignatureScheme": true,
+ "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA": true,
+ "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256": true,
+ "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256": true,
+ "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA": true,
+ "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384": true,
+ "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305": true,
+ "TLS_ECDHE_ECDSA_WITH_RC4_128_SHA": true,
+ "TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA": true,
+ "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA": true,
+ "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256": true,
+ "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256": true,
+ "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA": true,
+ "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384": true,
+ "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305": true,
+ "TLS_ECDHE_RSA_WITH_RC4_128_SHA": true,
+ "TLS_FALLBACK_SCSV": true,
+ "TLS_RSA_WITH_3DES_EDE_CBC_SHA": true,
+ "TLS_RSA_WITH_AES_128_CBC_SHA": true,
+ "TLS_RSA_WITH_AES_128_CBC_SHA256": true,
+ "TLS_RSA_WITH_AES_128_GCM_SHA256": true,
+ "TLS_RSA_WITH_AES_256_CBC_SHA": true,
+ "TLS_RSA_WITH_AES_256_GCM_SHA384": true,
+ "TLS_RSA_WITH_RC4_128_SHA": true,
+ "VerifyClientCertIfGiven": true,
+ "VersionSSL30": true,
+ "VersionTLS10": true,
+ "VersionTLS11": true,
+ "VersionTLS12": true,
+ "X25519": true,
+ "X509KeyPair": true,
+ },
+ "crypto/x509": map[string]bool{
+ "CANotAuthorizedForExtKeyUsage": true,
+ "CANotAuthorizedForThisName": true,
+ "CertPool": true,
+ "Certificate": true,
+ "CertificateInvalidError": true,
+ "CertificateRequest": true,
+ "ConstraintViolationError": true,
+ "CreateCertificate": true,
+ "CreateCertificateRequest": true,
+ "DSA": true,
+ "DSAWithSHA1": true,
+ "DSAWithSHA256": true,
+ "DecryptPEMBlock": true,
+ "ECDSA": true,
+ "ECDSAWithSHA1": true,
+ "ECDSAWithSHA256": true,
+ "ECDSAWithSHA384": true,
+ "ECDSAWithSHA512": true,
+ "EncryptPEMBlock": true,
+ "ErrUnsupportedAlgorithm": true,
+ "Expired": true,
+ "ExtKeyUsage": true,
+ "ExtKeyUsageAny": true,
+ "ExtKeyUsageClientAuth": true,
+ "ExtKeyUsageCodeSigning": true,
+ "ExtKeyUsageEmailProtection": true,
+ "ExtKeyUsageIPSECEndSystem": true,
+ "ExtKeyUsageIPSECTunnel": true,
+ "ExtKeyUsageIPSECUser": true,
+ "ExtKeyUsageMicrosoftCommercialCodeSigning": true,
+ "ExtKeyUsageMicrosoftKernelCodeSigning": true,
+ "ExtKeyUsageMicrosoftServerGatedCrypto": true,
+ "ExtKeyUsageNetscapeServerGatedCrypto": true,
+ "ExtKeyUsageOCSPSigning": true,
+ "ExtKeyUsageServerAuth": true,
+ "ExtKeyUsageTimeStamping": true,
+ "HostnameError": true,
+ "IncompatibleUsage": true,
+ "IncorrectPasswordError": true,
+ "InsecureAlgorithmError": true,
+ "InvalidReason": true,
+ "IsEncryptedPEMBlock": true,
+ "KeyUsage": true,
+ "KeyUsageCRLSign": true,
+ "KeyUsageCertSign": true,
+ "KeyUsageContentCommitment": true,
+ "KeyUsageDataEncipherment": true,
+ "KeyUsageDecipherOnly": true,
+ "KeyUsageDigitalSignature": true,
+ "KeyUsageEncipherOnly": true,
+ "KeyUsageKeyAgreement": true,
+ "KeyUsageKeyEncipherment": true,
+ "MD2WithRSA": true,
+ "MD5WithRSA": true,
+ "MarshalECPrivateKey": true,
+ "MarshalPKCS1PrivateKey": true,
+ "MarshalPKCS1PublicKey": true,
+ "MarshalPKCS8PrivateKey": true,
+ "MarshalPKIXPublicKey": true,
+ "NameConstraintsWithoutSANs": true,
+ "NameMismatch": true,
+ "NewCertPool": true,
+ "NotAuthorizedToSign": true,
+ "PEMCipher": true,
+ "PEMCipher3DES": true,
+ "PEMCipherAES128": true,
+ "PEMCipherAES192": true,
+ "PEMCipherAES256": true,
+ "PEMCipherDES": true,
+ "ParseCRL": true,
+ "ParseCertificate": true,
+ "ParseCertificateRequest": true,
+ "ParseCertificates": true,
+ "ParseDERCRL": true,
+ "ParseECPrivateKey": true,
+ "ParsePKCS1PrivateKey": true,
+ "ParsePKCS1PublicKey": true,
+ "ParsePKCS8PrivateKey": true,
+ "ParsePKIXPublicKey": true,
+ "PublicKeyAlgorithm": true,
+ "RSA": true,
+ "SHA1WithRSA": true,
+ "SHA256WithRSA": true,
+ "SHA256WithRSAPSS": true,
+ "SHA384WithRSA": true,
+ "SHA384WithRSAPSS": true,
+ "SHA512WithRSA": true,
+ "SHA512WithRSAPSS": true,
+ "SignatureAlgorithm": true,
+ "SystemCertPool": true,
+ "SystemRootsError": true,
+ "TooManyConstraints": true,
+ "TooManyIntermediates": true,
+ "UnconstrainedName": true,
+ "UnhandledCriticalExtension": true,
+ "UnknownAuthorityError": true,
+ "UnknownPublicKeyAlgorithm": true,
+ "UnknownSignatureAlgorithm": true,
+ "VerifyOptions": true,
+ },
+ "crypto/x509/pkix": map[string]bool{
+ "AlgorithmIdentifier": true,
+ "AttributeTypeAndValue": true,
+ "AttributeTypeAndValueSET": true,
+ "CertificateList": true,
+ "Extension": true,
+ "Name": true,
+ "RDNSequence": true,
+ "RelativeDistinguishedNameSET": true,
+ "RevokedCertificate": true,
+ "TBSCertificateList": true,
+ },
+ "database/sql": map[string]bool{
+ "ColumnType": true,
+ "Conn": true,
+ "DB": true,
+ "DBStats": true,
+ "Drivers": true,
+ "ErrConnDone": true,
+ "ErrNoRows": true,
+ "ErrTxDone": true,
+ "IsolationLevel": true,
+ "LevelDefault": true,
+ "LevelLinearizable": true,
+ "LevelReadCommitted": true,
+ "LevelReadUncommitted": true,
+ "LevelRepeatableRead": true,
+ "LevelSerializable": true,
+ "LevelSnapshot": true,
+ "LevelWriteCommitted": true,
+ "Named": true,
+ "NamedArg": true,
+ "NullBool": true,
+ "NullFloat64": true,
+ "NullInt64": true,
+ "NullString": true,
+ "Open": true,
+ "OpenDB": true,
+ "Out": true,
+ "RawBytes": true,
+ "Register": true,
+ "Result": true,
+ "Row": true,
+ "Rows": true,
+ "Scanner": true,
+ "Stmt": true,
+ "Tx": true,
+ "TxOptions": true,
+ },
+ "database/sql/driver": map[string]bool{
+ "Bool": true,
+ "ColumnConverter": true,
+ "Conn": true,
+ "ConnBeginTx": true,
+ "ConnPrepareContext": true,
+ "Connector": true,
+ "DefaultParameterConverter": true,
+ "Driver": true,
+ "DriverContext": true,
+ "ErrBadConn": true,
+ "ErrRemoveArgument": true,
+ "ErrSkip": true,
+ "Execer": true,
+ "ExecerContext": true,
+ "Int32": true,
+ "IsScanValue": true,
+ "IsValue": true,
+ "IsolationLevel": true,
+ "NamedValue": true,
+ "NamedValueChecker": true,
+ "NotNull": true,
+ "Null": true,
+ "Pinger": true,
+ "Queryer": true,
+ "QueryerContext": true,
+ "Result": true,
+ "ResultNoRows": true,
+ "Rows": true,
+ "RowsAffected": true,
+ "RowsColumnTypeDatabaseTypeName": true,
+ "RowsColumnTypeLength": true,
+ "RowsColumnTypeNullable": true,
+ "RowsColumnTypePrecisionScale": true,
+ "RowsColumnTypeScanType": true,
+ "RowsNextResultSet": true,
+ "SessionResetter": true,
+ "Stmt": true,
+ "StmtExecContext": true,
+ "StmtQueryContext": true,
+ "String": true,
+ "Tx": true,
+ "TxOptions": true,
+ "Value": true,
+ "ValueConverter": true,
+ "Valuer": true,
+ },
+ "debug/dwarf": map[string]bool{
+ "AddrType": true,
+ "ArrayType": true,
+ "Attr": true,
+ "AttrAbstractOrigin": true,
+ "AttrAccessibility": true,
+ "AttrAddrClass": true,
+ "AttrAllocated": true,
+ "AttrArtificial": true,
+ "AttrAssociated": true,
+ "AttrBaseTypes": true,
+ "AttrBitOffset": true,
+ "AttrBitSize": true,
+ "AttrByteSize": true,
+ "AttrCallColumn": true,
+ "AttrCallFile": true,
+ "AttrCallLine": true,
+ "AttrCalling": true,
+ "AttrCommonRef": true,
+ "AttrCompDir": true,
+ "AttrConstValue": true,
+ "AttrContainingType": true,
+ "AttrCount": true,
+ "AttrDataLocation": true,
+ "AttrDataMemberLoc": true,
+ "AttrDeclColumn": true,
+ "AttrDeclFile": true,
+ "AttrDeclLine": true,
+ "AttrDeclaration": true,
+ "AttrDefaultValue": true,
+ "AttrDescription": true,
+ "AttrDiscr": true,
+ "AttrDiscrList": true,
+ "AttrDiscrValue": true,
+ "AttrEncoding": true,
+ "AttrEntrypc": true,
+ "AttrExtension": true,
+ "AttrExternal": true,
+ "AttrFrameBase": true,
+ "AttrFriend": true,
+ "AttrHighpc": true,
+ "AttrIdentifierCase": true,
+ "AttrImport": true,
+ "AttrInline": true,
+ "AttrIsOptional": true,
+ "AttrLanguage": true,
+ "AttrLocation": true,
+ "AttrLowerBound": true,
+ "AttrLowpc": true,
+ "AttrMacroInfo": true,
+ "AttrName": true,
+ "AttrNamelistItem": true,
+ "AttrOrdering": true,
+ "AttrPriority": true,
+ "AttrProducer": true,
+ "AttrPrototyped": true,
+ "AttrRanges": true,
+ "AttrReturnAddr": true,
+ "AttrSegment": true,
+ "AttrSibling": true,
+ "AttrSpecification": true,
+ "AttrStartScope": true,
+ "AttrStaticLink": true,
+ "AttrStmtList": true,
+ "AttrStride": true,
+ "AttrStrideSize": true,
+ "AttrStringLength": true,
+ "AttrTrampoline": true,
+ "AttrType": true,
+ "AttrUpperBound": true,
+ "AttrUseLocation": true,
+ "AttrUseUTF8": true,
+ "AttrVarParam": true,
+ "AttrVirtuality": true,
+ "AttrVisibility": true,
+ "AttrVtableElemLoc": true,
+ "BasicType": true,
+ "BoolType": true,
+ "CharType": true,
+ "Class": true,
+ "ClassAddress": true,
+ "ClassBlock": true,
+ "ClassConstant": true,
+ "ClassExprLoc": true,
+ "ClassFlag": true,
+ "ClassLinePtr": true,
+ "ClassLocListPtr": true,
+ "ClassMacPtr": true,
+ "ClassRangeListPtr": true,
+ "ClassReference": true,
+ "ClassReferenceAlt": true,
+ "ClassReferenceSig": true,
+ "ClassString": true,
+ "ClassStringAlt": true,
+ "ClassUnknown": true,
+ "CommonType": true,
+ "ComplexType": true,
+ "Data": true,
+ "DecodeError": true,
+ "DotDotDotType": true,
+ "Entry": true,
+ "EnumType": true,
+ "EnumValue": true,
+ "ErrUnknownPC": true,
+ "Field": true,
+ "FloatType": true,
+ "FuncType": true,
+ "IntType": true,
+ "LineEntry": true,
+ "LineFile": true,
+ "LineReader": true,
+ "LineReaderPos": true,
+ "New": true,
+ "Offset": true,
+ "PtrType": true,
+ "QualType": true,
+ "Reader": true,
+ "StructField": true,
+ "StructType": true,
+ "Tag": true,
+ "TagAccessDeclaration": true,
+ "TagArrayType": true,
+ "TagBaseType": true,
+ "TagCatchDwarfBlock": true,
+ "TagClassType": true,
+ "TagCommonDwarfBlock": true,
+ "TagCommonInclusion": true,
+ "TagCompileUnit": true,
+ "TagCondition": true,
+ "TagConstType": true,
+ "TagConstant": true,
+ "TagDwarfProcedure": true,
+ "TagEntryPoint": true,
+ "TagEnumerationType": true,
+ "TagEnumerator": true,
+ "TagFileType": true,
+ "TagFormalParameter": true,
+ "TagFriend": true,
+ "TagImportedDeclaration": true,
+ "TagImportedModule": true,
+ "TagImportedUnit": true,
+ "TagInheritance": true,
+ "TagInlinedSubroutine": true,
+ "TagInterfaceType": true,
+ "TagLabel": true,
+ "TagLexDwarfBlock": true,
+ "TagMember": true,
+ "TagModule": true,
+ "TagMutableType": true,
+ "TagNamelist": true,
+ "TagNamelistItem": true,
+ "TagNamespace": true,
+ "TagPackedType": true,
+ "TagPartialUnit": true,
+ "TagPointerType": true,
+ "TagPtrToMemberType": true,
+ "TagReferenceType": true,
+ "TagRestrictType": true,
+ "TagRvalueReferenceType": true,
+ "TagSetType": true,
+ "TagSharedType": true,
+ "TagStringType": true,
+ "TagStructType": true,
+ "TagSubprogram": true,
+ "TagSubrangeType": true,
+ "TagSubroutineType": true,
+ "TagTemplateAlias": true,
+ "TagTemplateTypeParameter": true,
+ "TagTemplateValueParameter": true,
+ "TagThrownType": true,
+ "TagTryDwarfBlock": true,
+ "TagTypeUnit": true,
+ "TagTypedef": true,
+ "TagUnionType": true,
+ "TagUnspecifiedParameters": true,
+ "TagUnspecifiedType": true,
+ "TagVariable": true,
+ "TagVariant": true,
+ "TagVariantPart": true,
+ "TagVolatileType": true,
+ "TagWithStmt": true,
+ "Type": true,
+ "TypedefType": true,
+ "UcharType": true,
+ "UintType": true,
+ "UnspecifiedType": true,
+ "VoidType": true,
+ },
+ "debug/elf": map[string]bool{
+ "ARM_MAGIC_TRAMP_NUMBER": true,
+ "COMPRESS_HIOS": true,
+ "COMPRESS_HIPROC": true,
+ "COMPRESS_LOOS": true,
+ "COMPRESS_LOPROC": true,
+ "COMPRESS_ZLIB": true,
+ "Chdr32": true,
+ "Chdr64": true,
+ "Class": true,
+ "CompressionType": true,
+ "DF_BIND_NOW": true,
+ "DF_ORIGIN": true,
+ "DF_STATIC_TLS": true,
+ "DF_SYMBOLIC": true,
+ "DF_TEXTREL": true,
+ "DT_BIND_NOW": true,
+ "DT_DEBUG": true,
+ "DT_ENCODING": true,
+ "DT_FINI": true,
+ "DT_FINI_ARRAY": true,
+ "DT_FINI_ARRAYSZ": true,
+ "DT_FLAGS": true,
+ "DT_HASH": true,
+ "DT_HIOS": true,
+ "DT_HIPROC": true,
+ "DT_INIT": true,
+ "DT_INIT_ARRAY": true,
+ "DT_INIT_ARRAYSZ": true,
+ "DT_JMPREL": true,
+ "DT_LOOS": true,
+ "DT_LOPROC": true,
+ "DT_NEEDED": true,
+ "DT_NULL": true,
+ "DT_PLTGOT": true,
+ "DT_PLTREL": true,
+ "DT_PLTRELSZ": true,
+ "DT_PREINIT_ARRAY": true,
+ "DT_PREINIT_ARRAYSZ": true,
+ "DT_REL": true,
+ "DT_RELA": true,
+ "DT_RELAENT": true,
+ "DT_RELASZ": true,
+ "DT_RELENT": true,
+ "DT_RELSZ": true,
+ "DT_RPATH": true,
+ "DT_RUNPATH": true,
+ "DT_SONAME": true,
+ "DT_STRSZ": true,
+ "DT_STRTAB": true,
+ "DT_SYMBOLIC": true,
+ "DT_SYMENT": true,
+ "DT_SYMTAB": true,
+ "DT_TEXTREL": true,
+ "DT_VERNEED": true,
+ "DT_VERNEEDNUM": true,
+ "DT_VERSYM": true,
+ "Data": true,
+ "Dyn32": true,
+ "Dyn64": true,
+ "DynFlag": true,
+ "DynTag": true,
+ "EI_ABIVERSION": true,
+ "EI_CLASS": true,
+ "EI_DATA": true,
+ "EI_NIDENT": true,
+ "EI_OSABI": true,
+ "EI_PAD": true,
+ "EI_VERSION": true,
+ "ELFCLASS32": true,
+ "ELFCLASS64": true,
+ "ELFCLASSNONE": true,
+ "ELFDATA2LSB": true,
+ "ELFDATA2MSB": true,
+ "ELFDATANONE": true,
+ "ELFMAG": true,
+ "ELFOSABI_86OPEN": true,
+ "ELFOSABI_AIX": true,
+ "ELFOSABI_ARM": true,
+ "ELFOSABI_AROS": true,
+ "ELFOSABI_CLOUDABI": true,
+ "ELFOSABI_FENIXOS": true,
+ "ELFOSABI_FREEBSD": true,
+ "ELFOSABI_HPUX": true,
+ "ELFOSABI_HURD": true,
+ "ELFOSABI_IRIX": true,
+ "ELFOSABI_LINUX": true,
+ "ELFOSABI_MODESTO": true,
+ "ELFOSABI_NETBSD": true,
+ "ELFOSABI_NONE": true,
+ "ELFOSABI_NSK": true,
+ "ELFOSABI_OPENBSD": true,
+ "ELFOSABI_OPENVMS": true,
+ "ELFOSABI_SOLARIS": true,
+ "ELFOSABI_STANDALONE": true,
+ "ELFOSABI_TRU64": true,
+ "EM_386": true,
+ "EM_486": true,
+ "EM_56800EX": true,
+ "EM_68HC05": true,
+ "EM_68HC08": true,
+ "EM_68HC11": true,
+ "EM_68HC12": true,
+ "EM_68HC16": true,
+ "EM_68K": true,
+ "EM_78KOR": true,
+ "EM_8051": true,
+ "EM_860": true,
+ "EM_88K": true,
+ "EM_960": true,
+ "EM_AARCH64": true,
+ "EM_ALPHA": true,
+ "EM_ALPHA_STD": true,
+ "EM_ALTERA_NIOS2": true,
+ "EM_AMDGPU": true,
+ "EM_ARC": true,
+ "EM_ARCA": true,
+ "EM_ARC_COMPACT": true,
+ "EM_ARC_COMPACT2": true,
+ "EM_ARM": true,
+ "EM_AVR": true,
+ "EM_AVR32": true,
+ "EM_BA1": true,
+ "EM_BA2": true,
+ "EM_BLACKFIN": true,
+ "EM_BPF": true,
+ "EM_C166": true,
+ "EM_CDP": true,
+ "EM_CE": true,
+ "EM_CLOUDSHIELD": true,
+ "EM_COGE": true,
+ "EM_COLDFIRE": true,
+ "EM_COOL": true,
+ "EM_COREA_1ST": true,
+ "EM_COREA_2ND": true,
+ "EM_CR": true,
+ "EM_CR16": true,
+ "EM_CRAYNV2": true,
+ "EM_CRIS": true,
+ "EM_CRX": true,
+ "EM_CSR_KALIMBA": true,
+ "EM_CUDA": true,
+ "EM_CYPRESS_M8C": true,
+ "EM_D10V": true,
+ "EM_D30V": true,
+ "EM_DSP24": true,
+ "EM_DSPIC30F": true,
+ "EM_DXP": true,
+ "EM_ECOG1": true,
+ "EM_ECOG16": true,
+ "EM_ECOG1X": true,
+ "EM_ECOG2": true,
+ "EM_ETPU": true,
+ "EM_EXCESS": true,
+ "EM_F2MC16": true,
+ "EM_FIREPATH": true,
+ "EM_FR20": true,
+ "EM_FR30": true,
+ "EM_FT32": true,
+ "EM_FX66": true,
+ "EM_H8S": true,
+ "EM_H8_300": true,
+ "EM_H8_300H": true,
+ "EM_H8_500": true,
+ "EM_HUANY": true,
+ "EM_IA_64": true,
+ "EM_INTEL205": true,
+ "EM_INTEL206": true,
+ "EM_INTEL207": true,
+ "EM_INTEL208": true,
+ "EM_INTEL209": true,
+ "EM_IP2K": true,
+ "EM_JAVELIN": true,
+ "EM_K10M": true,
+ "EM_KM32": true,
+ "EM_KMX16": true,
+ "EM_KMX32": true,
+ "EM_KMX8": true,
+ "EM_KVARC": true,
+ "EM_L10M": true,
+ "EM_LANAI": true,
+ "EM_LATTICEMICO32": true,
+ "EM_M16C": true,
+ "EM_M32": true,
+ "EM_M32C": true,
+ "EM_M32R": true,
+ "EM_MANIK": true,
+ "EM_MAX": true,
+ "EM_MAXQ30": true,
+ "EM_MCHP_PIC": true,
+ "EM_MCST_ELBRUS": true,
+ "EM_ME16": true,
+ "EM_METAG": true,
+ "EM_MICROBLAZE": true,
+ "EM_MIPS": true,
+ "EM_MIPS_RS3_LE": true,
+ "EM_MIPS_RS4_BE": true,
+ "EM_MIPS_X": true,
+ "EM_MMA": true,
+ "EM_MMDSP_PLUS": true,
+ "EM_MMIX": true,
+ "EM_MN10200": true,
+ "EM_MN10300": true,
+ "EM_MOXIE": true,
+ "EM_MSP430": true,
+ "EM_NCPU": true,
+ "EM_NDR1": true,
+ "EM_NDS32": true,
+ "EM_NONE": true,
+ "EM_NORC": true,
+ "EM_NS32K": true,
+ "EM_OPEN8": true,
+ "EM_OPENRISC": true,
+ "EM_PARISC": true,
+ "EM_PCP": true,
+ "EM_PDP10": true,
+ "EM_PDP11": true,
+ "EM_PDSP": true,
+ "EM_PJ": true,
+ "EM_PPC": true,
+ "EM_PPC64": true,
+ "EM_PRISM": true,
+ "EM_QDSP6": true,
+ "EM_R32C": true,
+ "EM_RCE": true,
+ "EM_RH32": true,
+ "EM_RISCV": true,
+ "EM_RL78": true,
+ "EM_RS08": true,
+ "EM_RX": true,
+ "EM_S370": true,
+ "EM_S390": true,
+ "EM_SCORE7": true,
+ "EM_SEP": true,
+ "EM_SE_C17": true,
+ "EM_SE_C33": true,
+ "EM_SH": true,
+ "EM_SHARC": true,
+ "EM_SLE9X": true,
+ "EM_SNP1K": true,
+ "EM_SPARC": true,
+ "EM_SPARC32PLUS": true,
+ "EM_SPARCV9": true,
+ "EM_ST100": true,
+ "EM_ST19": true,
+ "EM_ST200": true,
+ "EM_ST7": true,
+ "EM_ST9PLUS": true,
+ "EM_STARCORE": true,
+ "EM_STM8": true,
+ "EM_STXP7X": true,
+ "EM_SVX": true,
+ "EM_TILE64": true,
+ "EM_TILEGX": true,
+ "EM_TILEPRO": true,
+ "EM_TINYJ": true,
+ "EM_TI_ARP32": true,
+ "EM_TI_C2000": true,
+ "EM_TI_C5500": true,
+ "EM_TI_C6000": true,
+ "EM_TI_PRU": true,
+ "EM_TMM_GPP": true,
+ "EM_TPC": true,
+ "EM_TRICORE": true,
+ "EM_TRIMEDIA": true,
+ "EM_TSK3000": true,
+ "EM_UNICORE": true,
+ "EM_V800": true,
+ "EM_V850": true,
+ "EM_VAX": true,
+ "EM_VIDEOCORE": true,
+ "EM_VIDEOCORE3": true,
+ "EM_VIDEOCORE5": true,
+ "EM_VISIUM": true,
+ "EM_VPP500": true,
+ "EM_X86_64": true,
+ "EM_XCORE": true,
+ "EM_XGATE": true,
+ "EM_XIMO16": true,
+ "EM_XTENSA": true,
+ "EM_Z80": true,
+ "EM_ZSP": true,
+ "ET_CORE": true,
+ "ET_DYN": true,
+ "ET_EXEC": true,
+ "ET_HIOS": true,
+ "ET_HIPROC": true,
+ "ET_LOOS": true,
+ "ET_LOPROC": true,
+ "ET_NONE": true,
+ "ET_REL": true,
+ "EV_CURRENT": true,
+ "EV_NONE": true,
+ "ErrNoSymbols": true,
+ "File": true,
+ "FileHeader": true,
+ "FormatError": true,
+ "Header32": true,
+ "Header64": true,
+ "ImportedSymbol": true,
+ "Machine": true,
+ "NT_FPREGSET": true,
+ "NT_PRPSINFO": true,
+ "NT_PRSTATUS": true,
+ "NType": true,
+ "NewFile": true,
+ "OSABI": true,
+ "Open": true,
+ "PF_MASKOS": true,
+ "PF_MASKPROC": true,
+ "PF_R": true,
+ "PF_W": true,
+ "PF_X": true,
+ "PT_DYNAMIC": true,
+ "PT_HIOS": true,
+ "PT_HIPROC": true,
+ "PT_INTERP": true,
+ "PT_LOAD": true,
+ "PT_LOOS": true,
+ "PT_LOPROC": true,
+ "PT_NOTE": true,
+ "PT_NULL": true,
+ "PT_PHDR": true,
+ "PT_SHLIB": true,
+ "PT_TLS": true,
+ "Prog": true,
+ "Prog32": true,
+ "Prog64": true,
+ "ProgFlag": true,
+ "ProgHeader": true,
+ "ProgType": true,
+ "R_386": true,
+ "R_386_16": true,
+ "R_386_32": true,
+ "R_386_32PLT": true,
+ "R_386_8": true,
+ "R_386_COPY": true,
+ "R_386_GLOB_DAT": true,
+ "R_386_GOT32": true,
+ "R_386_GOT32X": true,
+ "R_386_GOTOFF": true,
+ "R_386_GOTPC": true,
+ "R_386_IRELATIVE": true,
+ "R_386_JMP_SLOT": true,
+ "R_386_NONE": true,
+ "R_386_PC16": true,
+ "R_386_PC32": true,
+ "R_386_PC8": true,
+ "R_386_PLT32": true,
+ "R_386_RELATIVE": true,
+ "R_386_SIZE32": true,
+ "R_386_TLS_DESC": true,
+ "R_386_TLS_DESC_CALL": true,
+ "R_386_TLS_DTPMOD32": true,
+ "R_386_TLS_DTPOFF32": true,
+ "R_386_TLS_GD": true,
+ "R_386_TLS_GD_32": true,
+ "R_386_TLS_GD_CALL": true,
+ "R_386_TLS_GD_POP": true,
+ "R_386_TLS_GD_PUSH": true,
+ "R_386_TLS_GOTDESC": true,
+ "R_386_TLS_GOTIE": true,
+ "R_386_TLS_IE": true,
+ "R_386_TLS_IE_32": true,
+ "R_386_TLS_LDM": true,
+ "R_386_TLS_LDM_32": true,
+ "R_386_TLS_LDM_CALL": true,
+ "R_386_TLS_LDM_POP": true,
+ "R_386_TLS_LDM_PUSH": true,
+ "R_386_TLS_LDO_32": true,
+ "R_386_TLS_LE": true,
+ "R_386_TLS_LE_32": true,
+ "R_386_TLS_TPOFF": true,
+ "R_386_TLS_TPOFF32": true,
+ "R_390": true,
+ "R_390_12": true,
+ "R_390_16": true,
+ "R_390_20": true,
+ "R_390_32": true,
+ "R_390_64": true,
+ "R_390_8": true,
+ "R_390_COPY": true,
+ "R_390_GLOB_DAT": true,
+ "R_390_GOT12": true,
+ "R_390_GOT16": true,
+ "R_390_GOT20": true,
+ "R_390_GOT32": true,
+ "R_390_GOT64": true,
+ "R_390_GOTENT": true,
+ "R_390_GOTOFF": true,
+ "R_390_GOTOFF16": true,
+ "R_390_GOTOFF64": true,
+ "R_390_GOTPC": true,
+ "R_390_GOTPCDBL": true,
+ "R_390_GOTPLT12": true,
+ "R_390_GOTPLT16": true,
+ "R_390_GOTPLT20": true,
+ "R_390_GOTPLT32": true,
+ "R_390_GOTPLT64": true,
+ "R_390_GOTPLTENT": true,
+ "R_390_GOTPLTOFF16": true,
+ "R_390_GOTPLTOFF32": true,
+ "R_390_GOTPLTOFF64": true,
+ "R_390_JMP_SLOT": true,
+ "R_390_NONE": true,
+ "R_390_PC16": true,
+ "R_390_PC16DBL": true,
+ "R_390_PC32": true,
+ "R_390_PC32DBL": true,
+ "R_390_PC64": true,
+ "R_390_PLT16DBL": true,
+ "R_390_PLT32": true,
+ "R_390_PLT32DBL": true,
+ "R_390_PLT64": true,
+ "R_390_RELATIVE": true,
+ "R_390_TLS_DTPMOD": true,
+ "R_390_TLS_DTPOFF": true,
+ "R_390_TLS_GD32": true,
+ "R_390_TLS_GD64": true,
+ "R_390_TLS_GDCALL": true,
+ "R_390_TLS_GOTIE12": true,
+ "R_390_TLS_GOTIE20": true,
+ "R_390_TLS_GOTIE32": true,
+ "R_390_TLS_GOTIE64": true,
+ "R_390_TLS_IE32": true,
+ "R_390_TLS_IE64": true,
+ "R_390_TLS_IEENT": true,
+ "R_390_TLS_LDCALL": true,
+ "R_390_TLS_LDM32": true,
+ "R_390_TLS_LDM64": true,
+ "R_390_TLS_LDO32": true,
+ "R_390_TLS_LDO64": true,
+ "R_390_TLS_LE32": true,
+ "R_390_TLS_LE64": true,
+ "R_390_TLS_LOAD": true,
+ "R_390_TLS_TPOFF": true,
+ "R_AARCH64": true,
+ "R_AARCH64_ABS16": true,
+ "R_AARCH64_ABS32": true,
+ "R_AARCH64_ABS64": true,
+ "R_AARCH64_ADD_ABS_LO12_NC": true,
+ "R_AARCH64_ADR_GOT_PAGE": true,
+ "R_AARCH64_ADR_PREL_LO21": true,
+ "R_AARCH64_ADR_PREL_PG_HI21": true,
+ "R_AARCH64_ADR_PREL_PG_HI21_NC": true,
+ "R_AARCH64_CALL26": true,
+ "R_AARCH64_CONDBR19": true,
+ "R_AARCH64_COPY": true,
+ "R_AARCH64_GLOB_DAT": true,
+ "R_AARCH64_GOT_LD_PREL19": true,
+ "R_AARCH64_IRELATIVE": true,
+ "R_AARCH64_JUMP26": true,
+ "R_AARCH64_JUMP_SLOT": true,
+ "R_AARCH64_LD64_GOTOFF_LO15": true,
+ "R_AARCH64_LD64_GOTPAGE_LO15": true,
+ "R_AARCH64_LD64_GOT_LO12_NC": true,
+ "R_AARCH64_LDST128_ABS_LO12_NC": true,
+ "R_AARCH64_LDST16_ABS_LO12_NC": true,
+ "R_AARCH64_LDST32_ABS_LO12_NC": true,
+ "R_AARCH64_LDST64_ABS_LO12_NC": true,
+ "R_AARCH64_LDST8_ABS_LO12_NC": true,
+ "R_AARCH64_LD_PREL_LO19": true,
+ "R_AARCH64_MOVW_SABS_G0": true,
+ "R_AARCH64_MOVW_SABS_G1": true,
+ "R_AARCH64_MOVW_SABS_G2": true,
+ "R_AARCH64_MOVW_UABS_G0": true,
+ "R_AARCH64_MOVW_UABS_G0_NC": true,
+ "R_AARCH64_MOVW_UABS_G1": true,
+ "R_AARCH64_MOVW_UABS_G1_NC": true,
+ "R_AARCH64_MOVW_UABS_G2": true,
+ "R_AARCH64_MOVW_UABS_G2_NC": true,
+ "R_AARCH64_MOVW_UABS_G3": true,
+ "R_AARCH64_NONE": true,
+ "R_AARCH64_NULL": true,
+ "R_AARCH64_P32_ABS16": true,
+ "R_AARCH64_P32_ABS32": true,
+ "R_AARCH64_P32_ADD_ABS_LO12_NC": true,
+ "R_AARCH64_P32_ADR_GOT_PAGE": true,
+ "R_AARCH64_P32_ADR_PREL_LO21": true,
+ "R_AARCH64_P32_ADR_PREL_PG_HI21": true,
+ "R_AARCH64_P32_CALL26": true,
+ "R_AARCH64_P32_CONDBR19": true,
+ "R_AARCH64_P32_COPY": true,
+ "R_AARCH64_P32_GLOB_DAT": true,
+ "R_AARCH64_P32_GOT_LD_PREL19": true,
+ "R_AARCH64_P32_IRELATIVE": true,
+ "R_AARCH64_P32_JUMP26": true,
+ "R_AARCH64_P32_JUMP_SLOT": true,
+ "R_AARCH64_P32_LD32_GOT_LO12_NC": true,
+ "R_AARCH64_P32_LDST128_ABS_LO12_NC": true,
+ "R_AARCH64_P32_LDST16_ABS_LO12_NC": true,
+ "R_AARCH64_P32_LDST32_ABS_LO12_NC": true,
+ "R_AARCH64_P32_LDST64_ABS_LO12_NC": true,
+ "R_AARCH64_P32_LDST8_ABS_LO12_NC": true,
+ "R_AARCH64_P32_LD_PREL_LO19": true,
+ "R_AARCH64_P32_MOVW_SABS_G0": true,
+ "R_AARCH64_P32_MOVW_UABS_G0": true,
+ "R_AARCH64_P32_MOVW_UABS_G0_NC": true,
+ "R_AARCH64_P32_MOVW_UABS_G1": true,
+ "R_AARCH64_P32_PREL16": true,
+ "R_AARCH64_P32_PREL32": true,
+ "R_AARCH64_P32_RELATIVE": true,
+ "R_AARCH64_P32_TLSDESC": true,
+ "R_AARCH64_P32_TLSDESC_ADD_LO12_NC": true,
+ "R_AARCH64_P32_TLSDESC_ADR_PAGE21": true,
+ "R_AARCH64_P32_TLSDESC_ADR_PREL21": true,
+ "R_AARCH64_P32_TLSDESC_CALL": true,
+ "R_AARCH64_P32_TLSDESC_LD32_LO12_NC": true,
+ "R_AARCH64_P32_TLSDESC_LD_PREL19": true,
+ "R_AARCH64_P32_TLSGD_ADD_LO12_NC": true,
+ "R_AARCH64_P32_TLSGD_ADR_PAGE21": true,
+ "R_AARCH64_P32_TLSIE_ADR_GOTTPREL_PAGE21": true,
+ "R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC": true,
+ "R_AARCH64_P32_TLSIE_LD_GOTTPREL_PREL19": true,
+ "R_AARCH64_P32_TLSLE_ADD_TPREL_HI12": true,
+ "R_AARCH64_P32_TLSLE_ADD_TPREL_LO12": true,
+ "R_AARCH64_P32_TLSLE_ADD_TPREL_LO12_NC": true,
+ "R_AARCH64_P32_TLSLE_MOVW_TPREL_G0": true,
+ "R_AARCH64_P32_TLSLE_MOVW_TPREL_G0_NC": true,
+ "R_AARCH64_P32_TLSLE_MOVW_TPREL_G1": true,
+ "R_AARCH64_P32_TLS_DTPMOD": true,
+ "R_AARCH64_P32_TLS_DTPREL": true,
+ "R_AARCH64_P32_TLS_TPREL": true,
+ "R_AARCH64_P32_TSTBR14": true,
+ "R_AARCH64_PREL16": true,
+ "R_AARCH64_PREL32": true,
+ "R_AARCH64_PREL64": true,
+ "R_AARCH64_RELATIVE": true,
+ "R_AARCH64_TLSDESC": true,
+ "R_AARCH64_TLSDESC_ADD": true,
+ "R_AARCH64_TLSDESC_ADD_LO12_NC": true,
+ "R_AARCH64_TLSDESC_ADR_PAGE21": true,
+ "R_AARCH64_TLSDESC_ADR_PREL21": true,
+ "R_AARCH64_TLSDESC_CALL": true,
+ "R_AARCH64_TLSDESC_LD64_LO12_NC": true,
+ "R_AARCH64_TLSDESC_LDR": true,
+ "R_AARCH64_TLSDESC_LD_PREL19": true,
+ "R_AARCH64_TLSDESC_OFF_G0_NC": true,
+ "R_AARCH64_TLSDESC_OFF_G1": true,
+ "R_AARCH64_TLSGD_ADD_LO12_NC": true,
+ "R_AARCH64_TLSGD_ADR_PAGE21": true,
+ "R_AARCH64_TLSGD_ADR_PREL21": true,
+ "R_AARCH64_TLSGD_MOVW_G0_NC": true,
+ "R_AARCH64_TLSGD_MOVW_G1": true,
+ "R_AARCH64_TLSIE_ADR_GOTTPREL_PAGE21": true,
+ "R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC": true,
+ "R_AARCH64_TLSIE_LD_GOTTPREL_PREL19": true,
+ "R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC": true,
+ "R_AARCH64_TLSIE_MOVW_GOTTPREL_G1": true,
+ "R_AARCH64_TLSLD_ADR_PAGE21": true,
+ "R_AARCH64_TLSLD_ADR_PREL21": true,
+ "R_AARCH64_TLSLD_LDST128_DTPREL_LO12": true,
+ "R_AARCH64_TLSLD_LDST128_DTPREL_LO12_NC": true,
+ "R_AARCH64_TLSLE_ADD_TPREL_HI12": true,
+ "R_AARCH64_TLSLE_ADD_TPREL_LO12": true,
+ "R_AARCH64_TLSLE_ADD_TPREL_LO12_NC": true,
+ "R_AARCH64_TLSLE_LDST128_TPREL_LO12": true,
+ "R_AARCH64_TLSLE_LDST128_TPREL_LO12_NC": true,
+ "R_AARCH64_TLSLE_MOVW_TPREL_G0": true,
+ "R_AARCH64_TLSLE_MOVW_TPREL_G0_NC": true,
+ "R_AARCH64_TLSLE_MOVW_TPREL_G1": true,
+ "R_AARCH64_TLSLE_MOVW_TPREL_G1_NC": true,
+ "R_AARCH64_TLSLE_MOVW_TPREL_G2": true,
+ "R_AARCH64_TLS_DTPMOD64": true,
+ "R_AARCH64_TLS_DTPREL64": true,
+ "R_AARCH64_TLS_TPREL64": true,
+ "R_AARCH64_TSTBR14": true,
+ "R_ALPHA": true,
+ "R_ALPHA_BRADDR": true,
+ "R_ALPHA_COPY": true,
+ "R_ALPHA_GLOB_DAT": true,
+ "R_ALPHA_GPDISP": true,
+ "R_ALPHA_GPREL32": true,
+ "R_ALPHA_GPRELHIGH": true,
+ "R_ALPHA_GPRELLOW": true,
+ "R_ALPHA_GPVALUE": true,
+ "R_ALPHA_HINT": true,
+ "R_ALPHA_IMMED_BR_HI32": true,
+ "R_ALPHA_IMMED_GP_16": true,
+ "R_ALPHA_IMMED_GP_HI32": true,
+ "R_ALPHA_IMMED_LO32": true,
+ "R_ALPHA_IMMED_SCN_HI32": true,
+ "R_ALPHA_JMP_SLOT": true,
+ "R_ALPHA_LITERAL": true,
+ "R_ALPHA_LITUSE": true,
+ "R_ALPHA_NONE": true,
+ "R_ALPHA_OP_PRSHIFT": true,
+ "R_ALPHA_OP_PSUB": true,
+ "R_ALPHA_OP_PUSH": true,
+ "R_ALPHA_OP_STORE": true,
+ "R_ALPHA_REFLONG": true,
+ "R_ALPHA_REFQUAD": true,
+ "R_ALPHA_RELATIVE": true,
+ "R_ALPHA_SREL16": true,
+ "R_ALPHA_SREL32": true,
+ "R_ALPHA_SREL64": true,
+ "R_ARM": true,
+ "R_ARM_ABS12": true,
+ "R_ARM_ABS16": true,
+ "R_ARM_ABS32": true,
+ "R_ARM_ABS32_NOI": true,
+ "R_ARM_ABS8": true,
+ "R_ARM_ALU_PCREL_15_8": true,
+ "R_ARM_ALU_PCREL_23_15": true,
+ "R_ARM_ALU_PCREL_7_0": true,
+ "R_ARM_ALU_PC_G0": true,
+ "R_ARM_ALU_PC_G0_NC": true,
+ "R_ARM_ALU_PC_G1": true,
+ "R_ARM_ALU_PC_G1_NC": true,
+ "R_ARM_ALU_PC_G2": true,
+ "R_ARM_ALU_SBREL_19_12_NC": true,
+ "R_ARM_ALU_SBREL_27_20_CK": true,
+ "R_ARM_ALU_SB_G0": true,
+ "R_ARM_ALU_SB_G0_NC": true,
+ "R_ARM_ALU_SB_G1": true,
+ "R_ARM_ALU_SB_G1_NC": true,
+ "R_ARM_ALU_SB_G2": true,
+ "R_ARM_AMP_VCALL9": true,
+ "R_ARM_BASE_ABS": true,
+ "R_ARM_CALL": true,
+ "R_ARM_COPY": true,
+ "R_ARM_GLOB_DAT": true,
+ "R_ARM_GNU_VTENTRY": true,
+ "R_ARM_GNU_VTINHERIT": true,
+ "R_ARM_GOT32": true,
+ "R_ARM_GOTOFF": true,
+ "R_ARM_GOTOFF12": true,
+ "R_ARM_GOTPC": true,
+ "R_ARM_GOTRELAX": true,
+ "R_ARM_GOT_ABS": true,
+ "R_ARM_GOT_BREL12": true,
+ "R_ARM_GOT_PREL": true,
+ "R_ARM_IRELATIVE": true,
+ "R_ARM_JUMP24": true,
+ "R_ARM_JUMP_SLOT": true,
+ "R_ARM_LDC_PC_G0": true,
+ "R_ARM_LDC_PC_G1": true,
+ "R_ARM_LDC_PC_G2": true,
+ "R_ARM_LDC_SB_G0": true,
+ "R_ARM_LDC_SB_G1": true,
+ "R_ARM_LDC_SB_G2": true,
+ "R_ARM_LDRS_PC_G0": true,
+ "R_ARM_LDRS_PC_G1": true,
+ "R_ARM_LDRS_PC_G2": true,
+ "R_ARM_LDRS_SB_G0": true,
+ "R_ARM_LDRS_SB_G1": true,
+ "R_ARM_LDRS_SB_G2": true,
+ "R_ARM_LDR_PC_G1": true,
+ "R_ARM_LDR_PC_G2": true,
+ "R_ARM_LDR_SBREL_11_10_NC": true,
+ "R_ARM_LDR_SB_G0": true,
+ "R_ARM_LDR_SB_G1": true,
+ "R_ARM_LDR_SB_G2": true,
+ "R_ARM_ME_TOO": true,
+ "R_ARM_MOVT_ABS": true,
+ "R_ARM_MOVT_BREL": true,
+ "R_ARM_MOVT_PREL": true,
+ "R_ARM_MOVW_ABS_NC": true,
+ "R_ARM_MOVW_BREL": true,
+ "R_ARM_MOVW_BREL_NC": true,
+ "R_ARM_MOVW_PREL_NC": true,
+ "R_ARM_NONE": true,
+ "R_ARM_PC13": true,
+ "R_ARM_PC24": true,
+ "R_ARM_PLT32": true,
+ "R_ARM_PLT32_ABS": true,
+ "R_ARM_PREL31": true,
+ "R_ARM_PRIVATE_0": true,
+ "R_ARM_PRIVATE_1": true,
+ "R_ARM_PRIVATE_10": true,
+ "R_ARM_PRIVATE_11": true,
+ "R_ARM_PRIVATE_12": true,
+ "R_ARM_PRIVATE_13": true,
+ "R_ARM_PRIVATE_14": true,
+ "R_ARM_PRIVATE_15": true,
+ "R_ARM_PRIVATE_2": true,
+ "R_ARM_PRIVATE_3": true,
+ "R_ARM_PRIVATE_4": true,
+ "R_ARM_PRIVATE_5": true,
+ "R_ARM_PRIVATE_6": true,
+ "R_ARM_PRIVATE_7": true,
+ "R_ARM_PRIVATE_8": true,
+ "R_ARM_PRIVATE_9": true,
+ "R_ARM_RABS32": true,
+ "R_ARM_RBASE": true,
+ "R_ARM_REL32": true,
+ "R_ARM_REL32_NOI": true,
+ "R_ARM_RELATIVE": true,
+ "R_ARM_RPC24": true,
+ "R_ARM_RREL32": true,
+ "R_ARM_RSBREL32": true,
+ "R_ARM_RXPC25": true,
+ "R_ARM_SBREL31": true,
+ "R_ARM_SBREL32": true,
+ "R_ARM_SWI24": true,
+ "R_ARM_TARGET1": true,
+ "R_ARM_TARGET2": true,
+ "R_ARM_THM_ABS5": true,
+ "R_ARM_THM_ALU_ABS_G0_NC": true,
+ "R_ARM_THM_ALU_ABS_G1_NC": true,
+ "R_ARM_THM_ALU_ABS_G2_NC": true,
+ "R_ARM_THM_ALU_ABS_G3": true,
+ "R_ARM_THM_ALU_PREL_11_0": true,
+ "R_ARM_THM_GOT_BREL12": true,
+ "R_ARM_THM_JUMP11": true,
+ "R_ARM_THM_JUMP19": true,
+ "R_ARM_THM_JUMP24": true,
+ "R_ARM_THM_JUMP6": true,
+ "R_ARM_THM_JUMP8": true,
+ "R_ARM_THM_MOVT_ABS": true,
+ "R_ARM_THM_MOVT_BREL": true,
+ "R_ARM_THM_MOVT_PREL": true,
+ "R_ARM_THM_MOVW_ABS_NC": true,
+ "R_ARM_THM_MOVW_BREL": true,
+ "R_ARM_THM_MOVW_BREL_NC": true,
+ "R_ARM_THM_MOVW_PREL_NC": true,
+ "R_ARM_THM_PC12": true,
+ "R_ARM_THM_PC22": true,
+ "R_ARM_THM_PC8": true,
+ "R_ARM_THM_RPC22": true,
+ "R_ARM_THM_SWI8": true,
+ "R_ARM_THM_TLS_CALL": true,
+ "R_ARM_THM_TLS_DESCSEQ16": true,
+ "R_ARM_THM_TLS_DESCSEQ32": true,
+ "R_ARM_THM_XPC22": true,
+ "R_ARM_TLS_CALL": true,
+ "R_ARM_TLS_DESCSEQ": true,
+ "R_ARM_TLS_DTPMOD32": true,
+ "R_ARM_TLS_DTPOFF32": true,
+ "R_ARM_TLS_GD32": true,
+ "R_ARM_TLS_GOTDESC": true,
+ "R_ARM_TLS_IE12GP": true,
+ "R_ARM_TLS_IE32": true,
+ "R_ARM_TLS_LDM32": true,
+ "R_ARM_TLS_LDO12": true,
+ "R_ARM_TLS_LDO32": true,
+ "R_ARM_TLS_LE12": true,
+ "R_ARM_TLS_LE32": true,
+ "R_ARM_TLS_TPOFF32": true,
+ "R_ARM_V4BX": true,
+ "R_ARM_XPC25": true,
+ "R_INFO": true,
+ "R_INFO32": true,
+ "R_MIPS": true,
+ "R_MIPS_16": true,
+ "R_MIPS_26": true,
+ "R_MIPS_32": true,
+ "R_MIPS_64": true,
+ "R_MIPS_ADD_IMMEDIATE": true,
+ "R_MIPS_CALL16": true,
+ "R_MIPS_CALL_HI16": true,
+ "R_MIPS_CALL_LO16": true,
+ "R_MIPS_DELETE": true,
+ "R_MIPS_GOT16": true,
+ "R_MIPS_GOT_DISP": true,
+ "R_MIPS_GOT_HI16": true,
+ "R_MIPS_GOT_LO16": true,
+ "R_MIPS_GOT_OFST": true,
+ "R_MIPS_GOT_PAGE": true,
+ "R_MIPS_GPREL16": true,
+ "R_MIPS_GPREL32": true,
+ "R_MIPS_HI16": true,
+ "R_MIPS_HIGHER": true,
+ "R_MIPS_HIGHEST": true,
+ "R_MIPS_INSERT_A": true,
+ "R_MIPS_INSERT_B": true,
+ "R_MIPS_JALR": true,
+ "R_MIPS_LITERAL": true,
+ "R_MIPS_LO16": true,
+ "R_MIPS_NONE": true,
+ "R_MIPS_PC16": true,
+ "R_MIPS_PJUMP": true,
+ "R_MIPS_REL16": true,
+ "R_MIPS_REL32": true,
+ "R_MIPS_RELGOT": true,
+ "R_MIPS_SCN_DISP": true,
+ "R_MIPS_SHIFT5": true,
+ "R_MIPS_SHIFT6": true,
+ "R_MIPS_SUB": true,
+ "R_MIPS_TLS_DTPMOD32": true,
+ "R_MIPS_TLS_DTPMOD64": true,
+ "R_MIPS_TLS_DTPREL32": true,
+ "R_MIPS_TLS_DTPREL64": true,
+ "R_MIPS_TLS_DTPREL_HI16": true,
+ "R_MIPS_TLS_DTPREL_LO16": true,
+ "R_MIPS_TLS_GD": true,
+ "R_MIPS_TLS_GOTTPREL": true,
+ "R_MIPS_TLS_LDM": true,
+ "R_MIPS_TLS_TPREL32": true,
+ "R_MIPS_TLS_TPREL64": true,
+ "R_MIPS_TLS_TPREL_HI16": true,
+ "R_MIPS_TLS_TPREL_LO16": true,
+ "R_PPC": true,
+ "R_PPC64": true,
+ "R_PPC64_ADDR14": true,
+ "R_PPC64_ADDR14_BRNTAKEN": true,
+ "R_PPC64_ADDR14_BRTAKEN": true,
+ "R_PPC64_ADDR16": true,
+ "R_PPC64_ADDR16_DS": true,
+ "R_PPC64_ADDR16_HA": true,
+ "R_PPC64_ADDR16_HI": true,
+ "R_PPC64_ADDR16_HIGH": true,
+ "R_PPC64_ADDR16_HIGHA": true,
+ "R_PPC64_ADDR16_HIGHER": true,
+ "R_PPC64_ADDR16_HIGHERA": true,
+ "R_PPC64_ADDR16_HIGHEST": true,
+ "R_PPC64_ADDR16_HIGHESTA": true,
+ "R_PPC64_ADDR16_LO": true,
+ "R_PPC64_ADDR16_LO_DS": true,
+ "R_PPC64_ADDR24": true,
+ "R_PPC64_ADDR32": true,
+ "R_PPC64_ADDR64": true,
+ "R_PPC64_ADDR64_LOCAL": true,
+ "R_PPC64_DTPMOD64": true,
+ "R_PPC64_DTPREL16": true,
+ "R_PPC64_DTPREL16_DS": true,
+ "R_PPC64_DTPREL16_HA": true,
+ "R_PPC64_DTPREL16_HI": true,
+ "R_PPC64_DTPREL16_HIGH": true,
+ "R_PPC64_DTPREL16_HIGHA": true,
+ "R_PPC64_DTPREL16_HIGHER": true,
+ "R_PPC64_DTPREL16_HIGHERA": true,
+ "R_PPC64_DTPREL16_HIGHEST": true,
+ "R_PPC64_DTPREL16_HIGHESTA": true,
+ "R_PPC64_DTPREL16_LO": true,
+ "R_PPC64_DTPREL16_LO_DS": true,
+ "R_PPC64_DTPREL64": true,
+ "R_PPC64_ENTRY": true,
+ "R_PPC64_GOT16": true,
+ "R_PPC64_GOT16_DS": true,
+ "R_PPC64_GOT16_HA": true,
+ "R_PPC64_GOT16_HI": true,
+ "R_PPC64_GOT16_LO": true,
+ "R_PPC64_GOT16_LO_DS": true,
+ "R_PPC64_GOT_DTPREL16_DS": true,
+ "R_PPC64_GOT_DTPREL16_HA": true,
+ "R_PPC64_GOT_DTPREL16_HI": true,
+ "R_PPC64_GOT_DTPREL16_LO_DS": true,
+ "R_PPC64_GOT_TLSGD16": true,
+ "R_PPC64_GOT_TLSGD16_HA": true,
+ "R_PPC64_GOT_TLSGD16_HI": true,
+ "R_PPC64_GOT_TLSGD16_LO": true,
+ "R_PPC64_GOT_TLSLD16": true,
+ "R_PPC64_GOT_TLSLD16_HA": true,
+ "R_PPC64_GOT_TLSLD16_HI": true,
+ "R_PPC64_GOT_TLSLD16_LO": true,
+ "R_PPC64_GOT_TPREL16_DS": true,
+ "R_PPC64_GOT_TPREL16_HA": true,
+ "R_PPC64_GOT_TPREL16_HI": true,
+ "R_PPC64_GOT_TPREL16_LO_DS": true,
+ "R_PPC64_IRELATIVE": true,
+ "R_PPC64_JMP_IREL": true,
+ "R_PPC64_JMP_SLOT": true,
+ "R_PPC64_NONE": true,
+ "R_PPC64_PLT16_LO_DS": true,
+ "R_PPC64_PLTGOT16": true,
+ "R_PPC64_PLTGOT16_DS": true,
+ "R_PPC64_PLTGOT16_HA": true,
+ "R_PPC64_PLTGOT16_HI": true,
+ "R_PPC64_PLTGOT16_LO": true,
+ "R_PPC64_PLTGOT_LO_DS": true,
+ "R_PPC64_REL14": true,
+ "R_PPC64_REL14_BRNTAKEN": true,
+ "R_PPC64_REL14_BRTAKEN": true,
+ "R_PPC64_REL16": true,
+ "R_PPC64_REL16DX_HA": true,
+ "R_PPC64_REL16_HA": true,
+ "R_PPC64_REL16_HI": true,
+ "R_PPC64_REL16_LO": true,
+ "R_PPC64_REL24": true,
+ "R_PPC64_REL24_NOTOC": true,
+ "R_PPC64_REL32": true,
+ "R_PPC64_REL64": true,
+ "R_PPC64_SECTOFF_DS": true,
+ "R_PPC64_SECTOFF_LO_DS": true,
+ "R_PPC64_TLS": true,
+ "R_PPC64_TLSGD": true,
+ "R_PPC64_TLSLD": true,
+ "R_PPC64_TOC": true,
+ "R_PPC64_TOC16": true,
+ "R_PPC64_TOC16_DS": true,
+ "R_PPC64_TOC16_HA": true,
+ "R_PPC64_TOC16_HI": true,
+ "R_PPC64_TOC16_LO": true,
+ "R_PPC64_TOC16_LO_DS": true,
+ "R_PPC64_TOCSAVE": true,
+ "R_PPC64_TPREL16": true,
+ "R_PPC64_TPREL16_DS": true,
+ "R_PPC64_TPREL16_HA": true,
+ "R_PPC64_TPREL16_HI": true,
+ "R_PPC64_TPREL16_HIGH": true,
+ "R_PPC64_TPREL16_HIGHA": true,
+ "R_PPC64_TPREL16_HIGHER": true,
+ "R_PPC64_TPREL16_HIGHERA": true,
+ "R_PPC64_TPREL16_HIGHEST": true,
+ "R_PPC64_TPREL16_HIGHESTA": true,
+ "R_PPC64_TPREL16_LO": true,
+ "R_PPC64_TPREL16_LO_DS": true,
+ "R_PPC64_TPREL64": true,
+ "R_PPC_ADDR14": true,
+ "R_PPC_ADDR14_BRNTAKEN": true,
+ "R_PPC_ADDR14_BRTAKEN": true,
+ "R_PPC_ADDR16": true,
+ "R_PPC_ADDR16_HA": true,
+ "R_PPC_ADDR16_HI": true,
+ "R_PPC_ADDR16_LO": true,
+ "R_PPC_ADDR24": true,
+ "R_PPC_ADDR32": true,
+ "R_PPC_COPY": true,
+ "R_PPC_DTPMOD32": true,
+ "R_PPC_DTPREL16": true,
+ "R_PPC_DTPREL16_HA": true,
+ "R_PPC_DTPREL16_HI": true,
+ "R_PPC_DTPREL16_LO": true,
+ "R_PPC_DTPREL32": true,
+ "R_PPC_EMB_BIT_FLD": true,
+ "R_PPC_EMB_MRKREF": true,
+ "R_PPC_EMB_NADDR16": true,
+ "R_PPC_EMB_NADDR16_HA": true,
+ "R_PPC_EMB_NADDR16_HI": true,
+ "R_PPC_EMB_NADDR16_LO": true,
+ "R_PPC_EMB_NADDR32": true,
+ "R_PPC_EMB_RELSDA": true,
+ "R_PPC_EMB_RELSEC16": true,
+ "R_PPC_EMB_RELST_HA": true,
+ "R_PPC_EMB_RELST_HI": true,
+ "R_PPC_EMB_RELST_LO": true,
+ "R_PPC_EMB_SDA21": true,
+ "R_PPC_EMB_SDA2I16": true,
+ "R_PPC_EMB_SDA2REL": true,
+ "R_PPC_EMB_SDAI16": true,
+ "R_PPC_GLOB_DAT": true,
+ "R_PPC_GOT16": true,
+ "R_PPC_GOT16_HA": true,
+ "R_PPC_GOT16_HI": true,
+ "R_PPC_GOT16_LO": true,
+ "R_PPC_GOT_TLSGD16": true,
+ "R_PPC_GOT_TLSGD16_HA": true,
+ "R_PPC_GOT_TLSGD16_HI": true,
+ "R_PPC_GOT_TLSGD16_LO": true,
+ "R_PPC_GOT_TLSLD16": true,
+ "R_PPC_GOT_TLSLD16_HA": true,
+ "R_PPC_GOT_TLSLD16_HI": true,
+ "R_PPC_GOT_TLSLD16_LO": true,
+ "R_PPC_GOT_TPREL16": true,
+ "R_PPC_GOT_TPREL16_HA": true,
+ "R_PPC_GOT_TPREL16_HI": true,
+ "R_PPC_GOT_TPREL16_LO": true,
+ "R_PPC_JMP_SLOT": true,
+ "R_PPC_LOCAL24PC": true,
+ "R_PPC_NONE": true,
+ "R_PPC_PLT16_HA": true,
+ "R_PPC_PLT16_HI": true,
+ "R_PPC_PLT16_LO": true,
+ "R_PPC_PLT32": true,
+ "R_PPC_PLTREL24": true,
+ "R_PPC_PLTREL32": true,
+ "R_PPC_REL14": true,
+ "R_PPC_REL14_BRNTAKEN": true,
+ "R_PPC_REL14_BRTAKEN": true,
+ "R_PPC_REL24": true,
+ "R_PPC_REL32": true,
+ "R_PPC_RELATIVE": true,
+ "R_PPC_SDAREL16": true,
+ "R_PPC_SECTOFF": true,
+ "R_PPC_SECTOFF_HA": true,
+ "R_PPC_SECTOFF_HI": true,
+ "R_PPC_SECTOFF_LO": true,
+ "R_PPC_TLS": true,
+ "R_PPC_TPREL16": true,
+ "R_PPC_TPREL16_HA": true,
+ "R_PPC_TPREL16_HI": true,
+ "R_PPC_TPREL16_LO": true,
+ "R_PPC_TPREL32": true,
+ "R_PPC_UADDR16": true,
+ "R_PPC_UADDR32": true,
+ "R_RISCV": true,
+ "R_RISCV_32": true,
+ "R_RISCV_64": true,
+ "R_RISCV_ADD16": true,
+ "R_RISCV_ADD32": true,
+ "R_RISCV_ADD64": true,
+ "R_RISCV_ADD8": true,
+ "R_RISCV_ALIGN": true,
+ "R_RISCV_BRANCH": true,
+ "R_RISCV_CALL": true,
+ "R_RISCV_CALL_PLT": true,
+ "R_RISCV_COPY": true,
+ "R_RISCV_GNU_VTENTRY": true,
+ "R_RISCV_GNU_VTINHERIT": true,
+ "R_RISCV_GOT_HI20": true,
+ "R_RISCV_GPREL_I": true,
+ "R_RISCV_GPREL_S": true,
+ "R_RISCV_HI20": true,
+ "R_RISCV_JAL": true,
+ "R_RISCV_JUMP_SLOT": true,
+ "R_RISCV_LO12_I": true,
+ "R_RISCV_LO12_S": true,
+ "R_RISCV_NONE": true,
+ "R_RISCV_PCREL_HI20": true,
+ "R_RISCV_PCREL_LO12_I": true,
+ "R_RISCV_PCREL_LO12_S": true,
+ "R_RISCV_RELATIVE": true,
+ "R_RISCV_RELAX": true,
+ "R_RISCV_RVC_BRANCH": true,
+ "R_RISCV_RVC_JUMP": true,
+ "R_RISCV_RVC_LUI": true,
+ "R_RISCV_SET16": true,
+ "R_RISCV_SET32": true,
+ "R_RISCV_SET6": true,
+ "R_RISCV_SET8": true,
+ "R_RISCV_SUB16": true,
+ "R_RISCV_SUB32": true,
+ "R_RISCV_SUB6": true,
+ "R_RISCV_SUB64": true,
+ "R_RISCV_SUB8": true,
+ "R_RISCV_TLS_DTPMOD32": true,
+ "R_RISCV_TLS_DTPMOD64": true,
+ "R_RISCV_TLS_DTPREL32": true,
+ "R_RISCV_TLS_DTPREL64": true,
+ "R_RISCV_TLS_GD_HI20": true,
+ "R_RISCV_TLS_GOT_HI20": true,
+ "R_RISCV_TLS_TPREL32": true,
+ "R_RISCV_TLS_TPREL64": true,
+ "R_RISCV_TPREL_ADD": true,
+ "R_RISCV_TPREL_HI20": true,
+ "R_RISCV_TPREL_I": true,
+ "R_RISCV_TPREL_LO12_I": true,
+ "R_RISCV_TPREL_LO12_S": true,
+ "R_RISCV_TPREL_S": true,
+ "R_SPARC": true,
+ "R_SPARC_10": true,
+ "R_SPARC_11": true,
+ "R_SPARC_13": true,
+ "R_SPARC_16": true,
+ "R_SPARC_22": true,
+ "R_SPARC_32": true,
+ "R_SPARC_5": true,
+ "R_SPARC_6": true,
+ "R_SPARC_64": true,
+ "R_SPARC_7": true,
+ "R_SPARC_8": true,
+ "R_SPARC_COPY": true,
+ "R_SPARC_DISP16": true,
+ "R_SPARC_DISP32": true,
+ "R_SPARC_DISP64": true,
+ "R_SPARC_DISP8": true,
+ "R_SPARC_GLOB_DAT": true,
+ "R_SPARC_GLOB_JMP": true,
+ "R_SPARC_GOT10": true,
+ "R_SPARC_GOT13": true,
+ "R_SPARC_GOT22": true,
+ "R_SPARC_H44": true,
+ "R_SPARC_HH22": true,
+ "R_SPARC_HI22": true,
+ "R_SPARC_HIPLT22": true,
+ "R_SPARC_HIX22": true,
+ "R_SPARC_HM10": true,
+ "R_SPARC_JMP_SLOT": true,
+ "R_SPARC_L44": true,
+ "R_SPARC_LM22": true,
+ "R_SPARC_LO10": true,
+ "R_SPARC_LOPLT10": true,
+ "R_SPARC_LOX10": true,
+ "R_SPARC_M44": true,
+ "R_SPARC_NONE": true,
+ "R_SPARC_OLO10": true,
+ "R_SPARC_PC10": true,
+ "R_SPARC_PC22": true,
+ "R_SPARC_PCPLT10": true,
+ "R_SPARC_PCPLT22": true,
+ "R_SPARC_PCPLT32": true,
+ "R_SPARC_PC_HH22": true,
+ "R_SPARC_PC_HM10": true,
+ "R_SPARC_PC_LM22": true,
+ "R_SPARC_PLT32": true,
+ "R_SPARC_PLT64": true,
+ "R_SPARC_REGISTER": true,
+ "R_SPARC_RELATIVE": true,
+ "R_SPARC_UA16": true,
+ "R_SPARC_UA32": true,
+ "R_SPARC_UA64": true,
+ "R_SPARC_WDISP16": true,
+ "R_SPARC_WDISP19": true,
+ "R_SPARC_WDISP22": true,
+ "R_SPARC_WDISP30": true,
+ "R_SPARC_WPLT30": true,
+ "R_SYM32": true,
+ "R_SYM64": true,
+ "R_TYPE32": true,
+ "R_TYPE64": true,
+ "R_X86_64": true,
+ "R_X86_64_16": true,
+ "R_X86_64_32": true,
+ "R_X86_64_32S": true,
+ "R_X86_64_64": true,
+ "R_X86_64_8": true,
+ "R_X86_64_COPY": true,
+ "R_X86_64_DTPMOD64": true,
+ "R_X86_64_DTPOFF32": true,
+ "R_X86_64_DTPOFF64": true,
+ "R_X86_64_GLOB_DAT": true,
+ "R_X86_64_GOT32": true,
+ "R_X86_64_GOT64": true,
+ "R_X86_64_GOTOFF64": true,
+ "R_X86_64_GOTPC32": true,
+ "R_X86_64_GOTPC32_TLSDESC": true,
+ "R_X86_64_GOTPC64": true,
+ "R_X86_64_GOTPCREL": true,
+ "R_X86_64_GOTPCREL64": true,
+ "R_X86_64_GOTPCRELX": true,
+ "R_X86_64_GOTPLT64": true,
+ "R_X86_64_GOTTPOFF": true,
+ "R_X86_64_IRELATIVE": true,
+ "R_X86_64_JMP_SLOT": true,
+ "R_X86_64_NONE": true,
+ "R_X86_64_PC16": true,
+ "R_X86_64_PC32": true,
+ "R_X86_64_PC32_BND": true,
+ "R_X86_64_PC64": true,
+ "R_X86_64_PC8": true,
+ "R_X86_64_PLT32": true,
+ "R_X86_64_PLT32_BND": true,
+ "R_X86_64_PLTOFF64": true,
+ "R_X86_64_RELATIVE": true,
+ "R_X86_64_RELATIVE64": true,
+ "R_X86_64_REX_GOTPCRELX": true,
+ "R_X86_64_SIZE32": true,
+ "R_X86_64_SIZE64": true,
+ "R_X86_64_TLSDESC": true,
+ "R_X86_64_TLSDESC_CALL": true,
+ "R_X86_64_TLSGD": true,
+ "R_X86_64_TLSLD": true,
+ "R_X86_64_TPOFF32": true,
+ "R_X86_64_TPOFF64": true,
+ "Rel32": true,
+ "Rel64": true,
+ "Rela32": true,
+ "Rela64": true,
+ "SHF_ALLOC": true,
+ "SHF_COMPRESSED": true,
+ "SHF_EXECINSTR": true,
+ "SHF_GROUP": true,
+ "SHF_INFO_LINK": true,
+ "SHF_LINK_ORDER": true,
+ "SHF_MASKOS": true,
+ "SHF_MASKPROC": true,
+ "SHF_MERGE": true,
+ "SHF_OS_NONCONFORMING": true,
+ "SHF_STRINGS": true,
+ "SHF_TLS": true,
+ "SHF_WRITE": true,
+ "SHN_ABS": true,
+ "SHN_COMMON": true,
+ "SHN_HIOS": true,
+ "SHN_HIPROC": true,
+ "SHN_HIRESERVE": true,
+ "SHN_LOOS": true,
+ "SHN_LOPROC": true,
+ "SHN_LORESERVE": true,
+ "SHN_UNDEF": true,
+ "SHN_XINDEX": true,
+ "SHT_DYNAMIC": true,
+ "SHT_DYNSYM": true,
+ "SHT_FINI_ARRAY": true,
+ "SHT_GNU_ATTRIBUTES": true,
+ "SHT_GNU_HASH": true,
+ "SHT_GNU_LIBLIST": true,
+ "SHT_GNU_VERDEF": true,
+ "SHT_GNU_VERNEED": true,
+ "SHT_GNU_VERSYM": true,
+ "SHT_GROUP": true,
+ "SHT_HASH": true,
+ "SHT_HIOS": true,
+ "SHT_HIPROC": true,
+ "SHT_HIUSER": true,
+ "SHT_INIT_ARRAY": true,
+ "SHT_LOOS": true,
+ "SHT_LOPROC": true,
+ "SHT_LOUSER": true,
+ "SHT_NOBITS": true,
+ "SHT_NOTE": true,
+ "SHT_NULL": true,
+ "SHT_PREINIT_ARRAY": true,
+ "SHT_PROGBITS": true,
+ "SHT_REL": true,
+ "SHT_RELA": true,
+ "SHT_SHLIB": true,
+ "SHT_STRTAB": true,
+ "SHT_SYMTAB": true,
+ "SHT_SYMTAB_SHNDX": true,
+ "STB_GLOBAL": true,
+ "STB_HIOS": true,
+ "STB_HIPROC": true,
+ "STB_LOCAL": true,
+ "STB_LOOS": true,
+ "STB_LOPROC": true,
+ "STB_WEAK": true,
+ "STT_COMMON": true,
+ "STT_FILE": true,
+ "STT_FUNC": true,
+ "STT_HIOS": true,
+ "STT_HIPROC": true,
+ "STT_LOOS": true,
+ "STT_LOPROC": true,
+ "STT_NOTYPE": true,
+ "STT_OBJECT": true,
+ "STT_SECTION": true,
+ "STT_TLS": true,
+ "STV_DEFAULT": true,
+ "STV_HIDDEN": true,
+ "STV_INTERNAL": true,
+ "STV_PROTECTED": true,
+ "ST_BIND": true,
+ "ST_INFO": true,
+ "ST_TYPE": true,
+ "ST_VISIBILITY": true,
+ "Section": true,
+ "Section32": true,
+ "Section64": true,
+ "SectionFlag": true,
+ "SectionHeader": true,
+ "SectionIndex": true,
+ "SectionType": true,
+ "Sym32": true,
+ "Sym32Size": true,
+ "Sym64": true,
+ "Sym64Size": true,
+ "SymBind": true,
+ "SymType": true,
+ "SymVis": true,
+ "Symbol": true,
+ "Type": true,
+ "Version": true,
+ },
+ "debug/gosym": map[string]bool{
+ "DecodingError": true,
+ "Func": true,
+ "LineTable": true,
+ "NewLineTable": true,
+ "NewTable": true,
+ "Obj": true,
+ "Sym": true,
+ "Table": true,
+ "UnknownFileError": true,
+ "UnknownLineError": true,
+ },
+ "debug/macho": map[string]bool{
+ "ARM64_RELOC_ADDEND": true,
+ "ARM64_RELOC_BRANCH26": true,
+ "ARM64_RELOC_GOT_LOAD_PAGE21": true,
+ "ARM64_RELOC_GOT_LOAD_PAGEOFF12": true,
+ "ARM64_RELOC_PAGE21": true,
+ "ARM64_RELOC_PAGEOFF12": true,
+ "ARM64_RELOC_POINTER_TO_GOT": true,
+ "ARM64_RELOC_SUBTRACTOR": true,
+ "ARM64_RELOC_TLVP_LOAD_PAGE21": true,
+ "ARM64_RELOC_TLVP_LOAD_PAGEOFF12": true,
+ "ARM64_RELOC_UNSIGNED": true,
+ "ARM_RELOC_BR24": true,
+ "ARM_RELOC_HALF": true,
+ "ARM_RELOC_HALF_SECTDIFF": true,
+ "ARM_RELOC_LOCAL_SECTDIFF": true,
+ "ARM_RELOC_PAIR": true,
+ "ARM_RELOC_PB_LA_PTR": true,
+ "ARM_RELOC_SECTDIFF": true,
+ "ARM_RELOC_VANILLA": true,
+ "ARM_THUMB_32BIT_BRANCH": true,
+ "ARM_THUMB_RELOC_BR22": true,
+ "Cpu": true,
+ "Cpu386": true,
+ "CpuAmd64": true,
+ "CpuArm": true,
+ "CpuArm64": true,
+ "CpuPpc": true,
+ "CpuPpc64": true,
+ "Dylib": true,
+ "DylibCmd": true,
+ "Dysymtab": true,
+ "DysymtabCmd": true,
+ "ErrNotFat": true,
+ "FatArch": true,
+ "FatArchHeader": true,
+ "FatFile": true,
+ "File": true,
+ "FileHeader": true,
+ "FlagAllModsBound": true,
+ "FlagAllowStackExecution": true,
+ "FlagAppExtensionSafe": true,
+ "FlagBindAtLoad": true,
+ "FlagBindsToWeak": true,
+ "FlagCanonical": true,
+ "FlagDeadStrippableDylib": true,
+ "FlagDyldLink": true,
+ "FlagForceFlat": true,
+ "FlagHasTLVDescriptors": true,
+ "FlagIncrLink": true,
+ "FlagLazyInit": true,
+ "FlagNoFixPrebinding": true,
+ "FlagNoHeapExecution": true,
+ "FlagNoMultiDefs": true,
+ "FlagNoReexportedDylibs": true,
+ "FlagNoUndefs": true,
+ "FlagPIE": true,
+ "FlagPrebindable": true,
+ "FlagPrebound": true,
+ "FlagRootSafe": true,
+ "FlagSetuidSafe": true,
+ "FlagSplitSegs": true,
+ "FlagSubsectionsViaSymbols": true,
+ "FlagTwoLevel": true,
+ "FlagWeakDefines": true,
+ "FormatError": true,
+ "GENERIC_RELOC_LOCAL_SECTDIFF": true,
+ "GENERIC_RELOC_PAIR": true,
+ "GENERIC_RELOC_PB_LA_PTR": true,
+ "GENERIC_RELOC_SECTDIFF": true,
+ "GENERIC_RELOC_TLV": true,
+ "GENERIC_RELOC_VANILLA": true,
+ "Load": true,
+ "LoadBytes": true,
+ "LoadCmd": true,
+ "LoadCmdDylib": true,
+ "LoadCmdDylinker": true,
+ "LoadCmdDysymtab": true,
+ "LoadCmdRpath": true,
+ "LoadCmdSegment": true,
+ "LoadCmdSegment64": true,
+ "LoadCmdSymtab": true,
+ "LoadCmdThread": true,
+ "LoadCmdUnixThread": true,
+ "Magic32": true,
+ "Magic64": true,
+ "MagicFat": true,
+ "NewFatFile": true,
+ "NewFile": true,
+ "Nlist32": true,
+ "Nlist64": true,
+ "Open": true,
+ "OpenFat": true,
+ "Regs386": true,
+ "RegsAMD64": true,
+ "Reloc": true,
+ "RelocTypeARM": true,
+ "RelocTypeARM64": true,
+ "RelocTypeGeneric": true,
+ "RelocTypeX86_64": true,
+ "Rpath": true,
+ "RpathCmd": true,
+ "Section": true,
+ "Section32": true,
+ "Section64": true,
+ "SectionHeader": true,
+ "Segment": true,
+ "Segment32": true,
+ "Segment64": true,
+ "SegmentHeader": true,
+ "Symbol": true,
+ "Symtab": true,
+ "SymtabCmd": true,
+ "Thread": true,
+ "Type": true,
+ "TypeBundle": true,
+ "TypeDylib": true,
+ "TypeExec": true,
+ "TypeObj": true,
+ "X86_64_RELOC_BRANCH": true,
+ "X86_64_RELOC_GOT": true,
+ "X86_64_RELOC_GOT_LOAD": true,
+ "X86_64_RELOC_SIGNED": true,
+ "X86_64_RELOC_SIGNED_1": true,
+ "X86_64_RELOC_SIGNED_2": true,
+ "X86_64_RELOC_SIGNED_4": true,
+ "X86_64_RELOC_SUBTRACTOR": true,
+ "X86_64_RELOC_TLV": true,
+ "X86_64_RELOC_UNSIGNED": true,
+ },
+ "debug/pe": map[string]bool{
+ "COFFSymbol": true,
+ "COFFSymbolSize": true,
+ "DataDirectory": true,
+ "File": true,
+ "FileHeader": true,
+ "FormatError": true,
+ "IMAGE_DIRECTORY_ENTRY_ARCHITECTURE": true,
+ "IMAGE_DIRECTORY_ENTRY_BASERELOC": true,
+ "IMAGE_DIRECTORY_ENTRY_BOUND_IMPORT": true,
+ "IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR": true,
+ "IMAGE_DIRECTORY_ENTRY_DEBUG": true,
+ "IMAGE_DIRECTORY_ENTRY_DELAY_IMPORT": true,
+ "IMAGE_DIRECTORY_ENTRY_EXCEPTION": true,
+ "IMAGE_DIRECTORY_ENTRY_EXPORT": true,
+ "IMAGE_DIRECTORY_ENTRY_GLOBALPTR": true,
+ "IMAGE_DIRECTORY_ENTRY_IAT": true,
+ "IMAGE_DIRECTORY_ENTRY_IMPORT": true,
+ "IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG": true,
+ "IMAGE_DIRECTORY_ENTRY_RESOURCE": true,
+ "IMAGE_DIRECTORY_ENTRY_SECURITY": true,
+ "IMAGE_DIRECTORY_ENTRY_TLS": true,
+ "IMAGE_FILE_MACHINE_AM33": true,
+ "IMAGE_FILE_MACHINE_AMD64": true,
+ "IMAGE_FILE_MACHINE_ARM": true,
+ "IMAGE_FILE_MACHINE_ARM64": true,
+ "IMAGE_FILE_MACHINE_EBC": true,
+ "IMAGE_FILE_MACHINE_I386": true,
+ "IMAGE_FILE_MACHINE_IA64": true,
+ "IMAGE_FILE_MACHINE_M32R": true,
+ "IMAGE_FILE_MACHINE_MIPS16": true,
+ "IMAGE_FILE_MACHINE_MIPSFPU": true,
+ "IMAGE_FILE_MACHINE_MIPSFPU16": true,
+ "IMAGE_FILE_MACHINE_POWERPC": true,
+ "IMAGE_FILE_MACHINE_POWERPCFP": true,
+ "IMAGE_FILE_MACHINE_R4000": true,
+ "IMAGE_FILE_MACHINE_SH3": true,
+ "IMAGE_FILE_MACHINE_SH3DSP": true,
+ "IMAGE_FILE_MACHINE_SH4": true,
+ "IMAGE_FILE_MACHINE_SH5": true,
+ "IMAGE_FILE_MACHINE_THUMB": true,
+ "IMAGE_FILE_MACHINE_UNKNOWN": true,
+ "IMAGE_FILE_MACHINE_WCEMIPSV2": true,
+ "ImportDirectory": true,
+ "NewFile": true,
+ "Open": true,
+ "OptionalHeader32": true,
+ "OptionalHeader64": true,
+ "Reloc": true,
+ "Section": true,
+ "SectionHeader": true,
+ "SectionHeader32": true,
+ "StringTable": true,
+ "Symbol": true,
+ },
+ "debug/plan9obj": map[string]bool{
+ "File": true,
+ "FileHeader": true,
+ "Magic386": true,
+ "Magic64": true,
+ "MagicAMD64": true,
+ "MagicARM": true,
+ "NewFile": true,
+ "Open": true,
+ "Section": true,
+ "SectionHeader": true,
+ "Sym": true,
+ },
+ "encoding": map[string]bool{
+ "BinaryMarshaler": true,
+ "BinaryUnmarshaler": true,
+ "TextMarshaler": true,
+ "TextUnmarshaler": true,
+ },
+ "encoding/ascii85": map[string]bool{
+ "CorruptInputError": true,
+ "Decode": true,
+ "Encode": true,
+ "MaxEncodedLen": true,
+ "NewDecoder": true,
+ "NewEncoder": true,
+ },
+ "encoding/asn1": map[string]bool{
+ "BitString": true,
+ "ClassApplication": true,
+ "ClassContextSpecific": true,
+ "ClassPrivate": true,
+ "ClassUniversal": true,
+ "Enumerated": true,
+ "Flag": true,
+ "Marshal": true,
+ "MarshalWithParams": true,
+ "NullBytes": true,
+ "NullRawValue": true,
+ "ObjectIdentifier": true,
+ "RawContent": true,
+ "RawValue": true,
+ "StructuralError": true,
+ "SyntaxError": true,
+ "TagBitString": true,
+ "TagBoolean": true,
+ "TagEnum": true,
+ "TagGeneralString": true,
+ "TagGeneralizedTime": true,
+ "TagIA5String": true,
+ "TagInteger": true,
+ "TagNull": true,
+ "TagNumericString": true,
+ "TagOID": true,
+ "TagOctetString": true,
+ "TagPrintableString": true,
+ "TagSequence": true,
+ "TagSet": true,
+ "TagT61String": true,
+ "TagUTCTime": true,
+ "TagUTF8String": true,
+ "Unmarshal": true,
+ "UnmarshalWithParams": true,
+ },
+ "encoding/base32": map[string]bool{
+ "CorruptInputError": true,
+ "Encoding": true,
+ "HexEncoding": true,
+ "NewDecoder": true,
+ "NewEncoder": true,
+ "NewEncoding": true,
+ "NoPadding": true,
+ "StdEncoding": true,
+ "StdPadding": true,
+ },
+ "encoding/base64": map[string]bool{
+ "CorruptInputError": true,
+ "Encoding": true,
+ "NewDecoder": true,
+ "NewEncoder": true,
+ "NewEncoding": true,
+ "NoPadding": true,
+ "RawStdEncoding": true,
+ "RawURLEncoding": true,
+ "StdEncoding": true,
+ "StdPadding": true,
+ "URLEncoding": true,
+ },
+ "encoding/binary": map[string]bool{
+ "BigEndian": true,
+ "ByteOrder": true,
+ "LittleEndian": true,
+ "MaxVarintLen16": true,
+ "MaxVarintLen32": true,
+ "MaxVarintLen64": true,
+ "PutUvarint": true,
+ "PutVarint": true,
+ "Read": true,
+ "ReadUvarint": true,
+ "ReadVarint": true,
+ "Size": true,
+ "Uvarint": true,
+ "Varint": true,
+ "Write": true,
+ },
+ "encoding/csv": map[string]bool{
+ "ErrBareQuote": true,
+ "ErrFieldCount": true,
+ "ErrQuote": true,
+ "ErrTrailingComma": true,
+ "NewReader": true,
+ "NewWriter": true,
+ "ParseError": true,
+ "Reader": true,
+ "Writer": true,
+ },
+ "encoding/gob": map[string]bool{
+ "CommonType": true,
+ "Decoder": true,
+ "Encoder": true,
+ "GobDecoder": true,
+ "GobEncoder": true,
+ "NewDecoder": true,
+ "NewEncoder": true,
+ "Register": true,
+ "RegisterName": true,
+ },
+ "encoding/hex": map[string]bool{
+ "Decode": true,
+ "DecodeString": true,
+ "DecodedLen": true,
+ "Dump": true,
+ "Dumper": true,
+ "Encode": true,
+ "EncodeToString": true,
+ "EncodedLen": true,
+ "ErrLength": true,
+ "InvalidByteError": true,
+ "NewDecoder": true,
+ "NewEncoder": true,
+ },
+ "encoding/json": map[string]bool{
+ "Compact": true,
+ "Decoder": true,
+ "Delim": true,
+ "Encoder": true,
+ "HTMLEscape": true,
+ "Indent": true,
+ "InvalidUTF8Error": true,
+ "InvalidUnmarshalError": true,
+ "Marshal": true,
+ "MarshalIndent": true,
+ "Marshaler": true,
+ "MarshalerError": true,
+ "NewDecoder": true,
+ "NewEncoder": true,
+ "Number": true,
+ "RawMessage": true,
+ "SyntaxError": true,
+ "Token": true,
+ "Unmarshal": true,
+ "UnmarshalFieldError": true,
+ "UnmarshalTypeError": true,
+ "Unmarshaler": true,
+ "UnsupportedTypeError": true,
+ "UnsupportedValueError": true,
+ "Valid": true,
+ },
+ "encoding/pem": map[string]bool{
+ "Block": true,
+ "Decode": true,
+ "Encode": true,
+ "EncodeToMemory": true,
+ },
+ "encoding/xml": map[string]bool{
+ "Attr": true,
+ "CharData": true,
+ "Comment": true,
+ "CopyToken": true,
+ "Decoder": true,
+ "Directive": true,
+ "Encoder": true,
+ "EndElement": true,
+ "Escape": true,
+ "EscapeText": true,
+ "HTMLAutoClose": true,
+ "HTMLEntity": true,
+ "Header": true,
+ "Marshal": true,
+ "MarshalIndent": true,
+ "Marshaler": true,
+ "MarshalerAttr": true,
+ "Name": true,
+ "NewDecoder": true,
+ "NewEncoder": true,
+ "NewTokenDecoder": true,
+ "ProcInst": true,
+ "StartElement": true,
+ "SyntaxError": true,
+ "TagPathError": true,
+ "Token": true,
+ "TokenReader": true,
+ "Unmarshal": true,
+ "UnmarshalError": true,
+ "Unmarshaler": true,
+ "UnmarshalerAttr": true,
+ "UnsupportedTypeError": true,
+ },
+ "errors": map[string]bool{
+ "New": true,
+ },
+ "expvar": map[string]bool{
+ "Do": true,
+ "Float": true,
+ "Func": true,
+ "Get": true,
+ "Handler": true,
+ "Int": true,
+ "KeyValue": true,
+ "Map": true,
+ "NewFloat": true,
+ "NewInt": true,
+ "NewMap": true,
+ "NewString": true,
+ "Publish": true,
+ "String": true,
+ "Var": true,
+ },
+ "flag": map[string]bool{
+ "Arg": true,
+ "Args": true,
+ "Bool": true,
+ "BoolVar": true,
+ "CommandLine": true,
+ "ContinueOnError": true,
+ "Duration": true,
+ "DurationVar": true,
+ "ErrHelp": true,
+ "ErrorHandling": true,
+ "ExitOnError": true,
+ "Flag": true,
+ "FlagSet": true,
+ "Float64": true,
+ "Float64Var": true,
+ "Getter": true,
+ "Int": true,
+ "Int64": true,
+ "Int64Var": true,
+ "IntVar": true,
+ "Lookup": true,
+ "NArg": true,
+ "NFlag": true,
+ "NewFlagSet": true,
+ "PanicOnError": true,
+ "Parse": true,
+ "Parsed": true,
+ "PrintDefaults": true,
+ "Set": true,
+ "String": true,
+ "StringVar": true,
+ "Uint": true,
+ "Uint64": true,
+ "Uint64Var": true,
+ "UintVar": true,
+ "UnquoteUsage": true,
+ "Usage": true,
+ "Value": true,
+ "Var": true,
+ "Visit": true,
+ "VisitAll": true,
+ },
+ "fmt": map[string]bool{
+ "Errorf": true,
+ "Formatter": true,
+ "Fprint": true,
+ "Fprintf": true,
+ "Fprintln": true,
+ "Fscan": true,
+ "Fscanf": true,
+ "Fscanln": true,
+ "GoStringer": true,
+ "Print": true,
+ "Printf": true,
+ "Println": true,
+ "Scan": true,
+ "ScanState": true,
+ "Scanf": true,
+ "Scanln": true,
+ "Scanner": true,
+ "Sprint": true,
+ "Sprintf": true,
+ "Sprintln": true,
+ "Sscan": true,
+ "Sscanf": true,
+ "Sscanln": true,
+ "State": true,
+ "Stringer": true,
+ },
+ "go/ast": map[string]bool{
+ "ArrayType": true,
+ "AssignStmt": true,
+ "Bad": true,
+ "BadDecl": true,
+ "BadExpr": true,
+ "BadStmt": true,
+ "BasicLit": true,
+ "BinaryExpr": true,
+ "BlockStmt": true,
+ "BranchStmt": true,
+ "CallExpr": true,
+ "CaseClause": true,
+ "ChanDir": true,
+ "ChanType": true,
+ "CommClause": true,
+ "Comment": true,
+ "CommentGroup": true,
+ "CommentMap": true,
+ "CompositeLit": true,
+ "Con": true,
+ "DeclStmt": true,
+ "DeferStmt": true,
+ "Ellipsis": true,
+ "EmptyStmt": true,
+ "ExprStmt": true,
+ "Field": true,
+ "FieldFilter": true,
+ "FieldList": true,
+ "File": true,
+ "FileExports": true,
+ "Filter": true,
+ "FilterDecl": true,
+ "FilterFile": true,
+ "FilterFuncDuplicates": true,
+ "FilterImportDuplicates": true,
+ "FilterPackage": true,
+ "FilterUnassociatedComments": true,
+ "ForStmt": true,
+ "Fprint": true,
+ "Fun": true,
+ "FuncDecl": true,
+ "FuncLit": true,
+ "FuncType": true,
+ "GenDecl": true,
+ "GoStmt": true,
+ "Ident": true,
+ "IfStmt": true,
+ "ImportSpec": true,
+ "Importer": true,
+ "IncDecStmt": true,
+ "IndexExpr": true,
+ "Inspect": true,
+ "InterfaceType": true,
+ "IsExported": true,
+ "KeyValueExpr": true,
+ "LabeledStmt": true,
+ "Lbl": true,
+ "MapType": true,
+ "MergeMode": true,
+ "MergePackageFiles": true,
+ "NewCommentMap": true,
+ "NewIdent": true,
+ "NewObj": true,
+ "NewPackage": true,
+ "NewScope": true,
+ "Node": true,
+ "NotNilFilter": true,
+ "ObjKind": true,
+ "Object": true,
+ "Package": true,
+ "PackageExports": true,
+ "ParenExpr": true,
+ "Pkg": true,
+ "Print": true,
+ "RECV": true,
+ "RangeStmt": true,
+ "ReturnStmt": true,
+ "SEND": true,
+ "Scope": true,
+ "SelectStmt": true,
+ "SelectorExpr": true,
+ "SendStmt": true,
+ "SliceExpr": true,
+ "SortImports": true,
+ "StarExpr": true,
+ "StructType": true,
+ "SwitchStmt": true,
+ "Typ": true,
+ "TypeAssertExpr": true,
+ "TypeSpec": true,
+ "TypeSwitchStmt": true,
+ "UnaryExpr": true,
+ "ValueSpec": true,
+ "Var": true,
+ "Visitor": true,
+ "Walk": true,
+ },
+ "go/build": map[string]bool{
+ "AllowBinary": true,
+ "ArchChar": true,
+ "Context": true,
+ "Default": true,
+ "FindOnly": true,
+ "IgnoreVendor": true,
+ "Import": true,
+ "ImportComment": true,
+ "ImportDir": true,
+ "ImportMode": true,
+ "IsLocalImport": true,
+ "MultiplePackageError": true,
+ "NoGoError": true,
+ "Package": true,
+ "ToolDir": true,
+ },
+ "go/constant": map[string]bool{
+ "BinaryOp": true,
+ "BitLen": true,
+ "Bool": true,
+ "BoolVal": true,
+ "Bytes": true,
+ "Compare": true,
+ "Complex": true,
+ "Denom": true,
+ "Float": true,
+ "Float32Val": true,
+ "Float64Val": true,
+ "Imag": true,
+ "Int": true,
+ "Int64Val": true,
+ "Kind": true,
+ "MakeBool": true,
+ "MakeFloat64": true,
+ "MakeFromBytes": true,
+ "MakeFromLiteral": true,
+ "MakeImag": true,
+ "MakeInt64": true,
+ "MakeString": true,
+ "MakeUint64": true,
+ "MakeUnknown": true,
+ "Num": true,
+ "Real": true,
+ "Shift": true,
+ "Sign": true,
+ "String": true,
+ "StringVal": true,
+ "ToComplex": true,
+ "ToFloat": true,
+ "ToInt": true,
+ "Uint64Val": true,
+ "UnaryOp": true,
+ "Unknown": true,
+ },
+ "go/doc": map[string]bool{
+ "AllDecls": true,
+ "AllMethods": true,
+ "Example": true,
+ "Examples": true,
+ "Filter": true,
+ "Func": true,
+ "IllegalPrefixes": true,
+ "IsPredeclared": true,
+ "Mode": true,
+ "New": true,
+ "Note": true,
+ "Package": true,
+ "Synopsis": true,
+ "ToHTML": true,
+ "ToText": true,
+ "Type": true,
+ "Value": true,
+ },
+ "go/format": map[string]bool{
+ "Node": true,
+ "Source": true,
+ },
+ "go/importer": map[string]bool{
+ "Default": true,
+ "For": true,
+ "Lookup": true,
+ },
+ "go/parser": map[string]bool{
+ "AllErrors": true,
+ "DeclarationErrors": true,
+ "ImportsOnly": true,
+ "Mode": true,
+ "PackageClauseOnly": true,
+ "ParseComments": true,
+ "ParseDir": true,
+ "ParseExpr": true,
+ "ParseExprFrom": true,
+ "ParseFile": true,
+ "SpuriousErrors": true,
+ "Trace": true,
+ },
+ "go/printer": map[string]bool{
+ "CommentedNode": true,
+ "Config": true,
+ "Fprint": true,
+ "Mode": true,
+ "RawFormat": true,
+ "SourcePos": true,
+ "TabIndent": true,
+ "UseSpaces": true,
+ },
+ "go/scanner": map[string]bool{
+ "Error": true,
+ "ErrorHandler": true,
+ "ErrorList": true,
+ "Mode": true,
+ "PrintError": true,
+ "ScanComments": true,
+ "Scanner": true,
+ },
+ "go/token": map[string]bool{
+ "ADD": true,
+ "ADD_ASSIGN": true,
+ "AND": true,
+ "AND_ASSIGN": true,
+ "AND_NOT": true,
+ "AND_NOT_ASSIGN": true,
+ "ARROW": true,
+ "ASSIGN": true,
+ "BREAK": true,
+ "CASE": true,
+ "CHAN": true,
+ "CHAR": true,
+ "COLON": true,
+ "COMMA": true,
+ "COMMENT": true,
+ "CONST": true,
+ "CONTINUE": true,
+ "DEC": true,
+ "DEFAULT": true,
+ "DEFER": true,
+ "DEFINE": true,
+ "ELLIPSIS": true,
+ "ELSE": true,
+ "EOF": true,
+ "EQL": true,
+ "FALLTHROUGH": true,
+ "FLOAT": true,
+ "FOR": true,
+ "FUNC": true,
+ "File": true,
+ "FileSet": true,
+ "GEQ": true,
+ "GO": true,
+ "GOTO": true,
+ "GTR": true,
+ "HighestPrec": true,
+ "IDENT": true,
+ "IF": true,
+ "ILLEGAL": true,
+ "IMAG": true,
+ "IMPORT": true,
+ "INC": true,
+ "INT": true,
+ "INTERFACE": true,
+ "LAND": true,
+ "LBRACE": true,
+ "LBRACK": true,
+ "LEQ": true,
+ "LOR": true,
+ "LPAREN": true,
+ "LSS": true,
+ "Lookup": true,
+ "LowestPrec": true,
+ "MAP": true,
+ "MUL": true,
+ "MUL_ASSIGN": true,
+ "NEQ": true,
+ "NOT": true,
+ "NewFileSet": true,
+ "NoPos": true,
+ "OR": true,
+ "OR_ASSIGN": true,
+ "PACKAGE": true,
+ "PERIOD": true,
+ "Pos": true,
+ "Position": true,
+ "QUO": true,
+ "QUO_ASSIGN": true,
+ "RANGE": true,
+ "RBRACE": true,
+ "RBRACK": true,
+ "REM": true,
+ "REM_ASSIGN": true,
+ "RETURN": true,
+ "RPAREN": true,
+ "SELECT": true,
+ "SEMICOLON": true,
+ "SHL": true,
+ "SHL_ASSIGN": true,
+ "SHR": true,
+ "SHR_ASSIGN": true,
+ "STRING": true,
+ "STRUCT": true,
+ "SUB": true,
+ "SUB_ASSIGN": true,
+ "SWITCH": true,
+ "TYPE": true,
+ "Token": true,
+ "UnaryPrec": true,
+ "VAR": true,
+ "XOR": true,
+ "XOR_ASSIGN": true,
+ },
+ "go/types": map[string]bool{
+ "Array": true,
+ "AssertableTo": true,
+ "AssignableTo": true,
+ "Basic": true,
+ "BasicInfo": true,
+ "BasicKind": true,
+ "Bool": true,
+ "Builtin": true,
+ "Byte": true,
+ "Chan": true,
+ "ChanDir": true,
+ "Checker": true,
+ "Comparable": true,
+ "Complex128": true,
+ "Complex64": true,
+ "Config": true,
+ "Const": true,
+ "ConvertibleTo": true,
+ "DefPredeclaredTestFuncs": true,
+ "Default": true,
+ "Error": true,
+ "Eval": true,
+ "ExprString": true,
+ "FieldVal": true,
+ "Float32": true,
+ "Float64": true,
+ "Func": true,
+ "Id": true,
+ "Identical": true,
+ "IdenticalIgnoreTags": true,
+ "Implements": true,
+ "ImportMode": true,
+ "Importer": true,
+ "ImporterFrom": true,
+ "Info": true,
+ "Initializer": true,
+ "Int": true,
+ "Int16": true,
+ "Int32": true,
+ "Int64": true,
+ "Int8": true,
+ "Interface": true,
+ "Invalid": true,
+ "IsBoolean": true,
+ "IsComplex": true,
+ "IsConstType": true,
+ "IsFloat": true,
+ "IsInteger": true,
+ "IsInterface": true,
+ "IsNumeric": true,
+ "IsOrdered": true,
+ "IsString": true,
+ "IsUnsigned": true,
+ "IsUntyped": true,
+ "Label": true,
+ "LookupFieldOrMethod": true,
+ "Map": true,
+ "MethodExpr": true,
+ "MethodSet": true,
+ "MethodVal": true,
+ "MissingMethod": true,
+ "Named": true,
+ "NewArray": true,
+ "NewChan": true,
+ "NewChecker": true,
+ "NewConst": true,
+ "NewField": true,
+ "NewFunc": true,
+ "NewInterface": true,
+ "NewInterfaceType": true,
+ "NewLabel": true,
+ "NewMap": true,
+ "NewMethodSet": true,
+ "NewNamed": true,
+ "NewPackage": true,
+ "NewParam": true,
+ "NewPkgName": true,
+ "NewPointer": true,
+ "NewScope": true,
+ "NewSignature": true,
+ "NewSlice": true,
+ "NewStruct": true,
+ "NewTuple": true,
+ "NewTypeName": true,
+ "NewVar": true,
+ "Nil": true,
+ "ObjectString": true,
+ "Package": true,
+ "PkgName": true,
+ "Pointer": true,
+ "Qualifier": true,
+ "RecvOnly": true,
+ "RelativeTo": true,
+ "Rune": true,
+ "Scope": true,
+ "Selection": true,
+ "SelectionKind": true,
+ "SelectionString": true,
+ "SendOnly": true,
+ "SendRecv": true,
+ "Signature": true,
+ "Sizes": true,
+ "SizesFor": true,
+ "Slice": true,
+ "StdSizes": true,
+ "String": true,
+ "Struct": true,
+ "Tuple": true,
+ "Typ": true,
+ "Type": true,
+ "TypeAndValue": true,
+ "TypeName": true,
+ "TypeString": true,
+ "Uint": true,
+ "Uint16": true,
+ "Uint32": true,
+ "Uint64": true,
+ "Uint8": true,
+ "Uintptr": true,
+ "Universe": true,
+ "Unsafe": true,
+ "UnsafePointer": true,
+ "UntypedBool": true,
+ "UntypedComplex": true,
+ "UntypedFloat": true,
+ "UntypedInt": true,
+ "UntypedNil": true,
+ "UntypedRune": true,
+ "UntypedString": true,
+ "Var": true,
+ "WriteExpr": true,
+ "WriteSignature": true,
+ "WriteType": true,
+ },
+ "hash": map[string]bool{
+ "Hash": true,
+ "Hash32": true,
+ "Hash64": true,
+ },
+ "hash/adler32": map[string]bool{
+ "Checksum": true,
+ "New": true,
+ "Size": true,
+ },
+ "hash/crc32": map[string]bool{
+ "Castagnoli": true,
+ "Checksum": true,
+ "ChecksumIEEE": true,
+ "IEEE": true,
+ "IEEETable": true,
+ "Koopman": true,
+ "MakeTable": true,
+ "New": true,
+ "NewIEEE": true,
+ "Size": true,
+ "Table": true,
+ "Update": true,
+ },
+ "hash/crc64": map[string]bool{
+ "Checksum": true,
+ "ECMA": true,
+ "ISO": true,
+ "MakeTable": true,
+ "New": true,
+ "Size": true,
+ "Table": true,
+ "Update": true,
+ },
+ "hash/fnv": map[string]bool{
+ "New128": true,
+ "New128a": true,
+ "New32": true,
+ "New32a": true,
+ "New64": true,
+ "New64a": true,
+ },
+ "html": map[string]bool{
+ "EscapeString": true,
+ "UnescapeString": true,
+ },
+ "html/template": map[string]bool{
+ "CSS": true,
+ "ErrAmbigContext": true,
+ "ErrBadHTML": true,
+ "ErrBranchEnd": true,
+ "ErrEndContext": true,
+ "ErrNoSuchTemplate": true,
+ "ErrOutputContext": true,
+ "ErrPartialCharset": true,
+ "ErrPartialEscape": true,
+ "ErrPredefinedEscaper": true,
+ "ErrRangeLoopReentry": true,
+ "ErrSlashAmbig": true,
+ "Error": true,
+ "ErrorCode": true,
+ "FuncMap": true,
+ "HTML": true,
+ "HTMLAttr": true,
+ "HTMLEscape": true,
+ "HTMLEscapeString": true,
+ "HTMLEscaper": true,
+ "IsTrue": true,
+ "JS": true,
+ "JSEscape": true,
+ "JSEscapeString": true,
+ "JSEscaper": true,
+ "JSStr": true,
+ "Must": true,
+ "New": true,
+ "OK": true,
+ "ParseFiles": true,
+ "ParseGlob": true,
+ "Srcset": true,
+ "Template": true,
+ "URL": true,
+ "URLQueryEscaper": true,
+ },
+ "image": map[string]bool{
+ "Alpha": true,
+ "Alpha16": true,
+ "Black": true,
+ "CMYK": true,
+ "Config": true,
+ "Decode": true,
+ "DecodeConfig": true,
+ "ErrFormat": true,
+ "Gray": true,
+ "Gray16": true,
+ "Image": true,
+ "NRGBA": true,
+ "NRGBA64": true,
+ "NYCbCrA": true,
+ "NewAlpha": true,
+ "NewAlpha16": true,
+ "NewCMYK": true,
+ "NewGray": true,
+ "NewGray16": true,
+ "NewNRGBA": true,
+ "NewNRGBA64": true,
+ "NewNYCbCrA": true,
+ "NewPaletted": true,
+ "NewRGBA": true,
+ "NewRGBA64": true,
+ "NewUniform": true,
+ "NewYCbCr": true,
+ "Opaque": true,
+ "Paletted": true,
+ "PalettedImage": true,
+ "Point": true,
+ "Pt": true,
+ "RGBA": true,
+ "RGBA64": true,
+ "Rect": true,
+ "Rectangle": true,
+ "RegisterFormat": true,
+ "Transparent": true,
+ "Uniform": true,
+ "White": true,
+ "YCbCr": true,
+ "YCbCrSubsampleRatio": true,
+ "YCbCrSubsampleRatio410": true,
+ "YCbCrSubsampleRatio411": true,
+ "YCbCrSubsampleRatio420": true,
+ "YCbCrSubsampleRatio422": true,
+ "YCbCrSubsampleRatio440": true,
+ "YCbCrSubsampleRatio444": true,
+ "ZP": true,
+ "ZR": true,
+ },
+ "image/color": map[string]bool{
+ "Alpha": true,
+ "Alpha16": true,
+ "Alpha16Model": true,
+ "AlphaModel": true,
+ "Black": true,
+ "CMYK": true,
+ "CMYKModel": true,
+ "CMYKToRGB": true,
+ "Color": true,
+ "Gray": true,
+ "Gray16": true,
+ "Gray16Model": true,
+ "GrayModel": true,
+ "Model": true,
+ "ModelFunc": true,
+ "NRGBA": true,
+ "NRGBA64": true,
+ "NRGBA64Model": true,
+ "NRGBAModel": true,
+ "NYCbCrA": true,
+ "NYCbCrAModel": true,
+ "Opaque": true,
+ "Palette": true,
+ "RGBA": true,
+ "RGBA64": true,
+ "RGBA64Model": true,
+ "RGBAModel": true,
+ "RGBToCMYK": true,
+ "RGBToYCbCr": true,
+ "Transparent": true,
+ "White": true,
+ "YCbCr": true,
+ "YCbCrModel": true,
+ "YCbCrToRGB": true,
+ },
+ "image/color/palette": map[string]bool{
+ "Plan9": true,
+ "WebSafe": true,
+ },
+ "image/draw": map[string]bool{
+ "Draw": true,
+ "DrawMask": true,
+ "Drawer": true,
+ "FloydSteinberg": true,
+ "Image": true,
+ "Op": true,
+ "Over": true,
+ "Quantizer": true,
+ "Src": true,
+ },
+ "image/gif": map[string]bool{
+ "Decode": true,
+ "DecodeAll": true,
+ "DecodeConfig": true,
+ "DisposalBackground": true,
+ "DisposalNone": true,
+ "DisposalPrevious": true,
+ "Encode": true,
+ "EncodeAll": true,
+ "GIF": true,
+ "Options": true,
+ },
+ "image/jpeg": map[string]bool{
+ "Decode": true,
+ "DecodeConfig": true,
+ "DefaultQuality": true,
+ "Encode": true,
+ "FormatError": true,
+ "Options": true,
+ "Reader": true,
+ "UnsupportedError": true,
+ },
+ "image/png": map[string]bool{
+ "BestCompression": true,
+ "BestSpeed": true,
+ "CompressionLevel": true,
+ "Decode": true,
+ "DecodeConfig": true,
+ "DefaultCompression": true,
+ "Encode": true,
+ "Encoder": true,
+ "EncoderBuffer": true,
+ "EncoderBufferPool": true,
+ "FormatError": true,
+ "NoCompression": true,
+ "UnsupportedError": true,
+ },
+ "index/suffixarray": map[string]bool{
+ "Index": true,
+ "New": true,
+ },
+ "io": map[string]bool{
+ "ByteReader": true,
+ "ByteScanner": true,
+ "ByteWriter": true,
+ "Closer": true,
+ "Copy": true,
+ "CopyBuffer": true,
+ "CopyN": true,
+ "EOF": true,
+ "ErrClosedPipe": true,
+ "ErrNoProgress": true,
+ "ErrShortBuffer": true,
+ "ErrShortWrite": true,
+ "ErrUnexpectedEOF": true,
+ "LimitReader": true,
+ "LimitedReader": true,
+ "MultiReader": true,
+ "MultiWriter": true,
+ "NewSectionReader": true,
+ "Pipe": true,
+ "PipeReader": true,
+ "PipeWriter": true,
+ "ReadAtLeast": true,
+ "ReadCloser": true,
+ "ReadFull": true,
+ "ReadSeeker": true,
+ "ReadWriteCloser": true,
+ "ReadWriteSeeker": true,
+ "ReadWriter": true,
+ "Reader": true,
+ "ReaderAt": true,
+ "ReaderFrom": true,
+ "RuneReader": true,
+ "RuneScanner": true,
+ "SectionReader": true,
+ "SeekCurrent": true,
+ "SeekEnd": true,
+ "SeekStart": true,
+ "Seeker": true,
+ "TeeReader": true,
+ "WriteCloser": true,
+ "WriteSeeker": true,
+ "WriteString": true,
+ "Writer": true,
+ "WriterAt": true,
+ "WriterTo": true,
+ },
+ "io/ioutil": map[string]bool{
+ "Discard": true,
+ "NopCloser": true,
+ "ReadAll": true,
+ "ReadDir": true,
+ "ReadFile": true,
+ "TempDir": true,
+ "TempFile": true,
+ "WriteFile": true,
+ },
+ "log": map[string]bool{
+ "Fatal": true,
+ "Fatalf": true,
+ "Fatalln": true,
+ "Flags": true,
+ "LUTC": true,
+ "Ldate": true,
+ "Llongfile": true,
+ "Lmicroseconds": true,
+ "Logger": true,
+ "Lshortfile": true,
+ "LstdFlags": true,
+ "Ltime": true,
+ "New": true,
+ "Output": true,
+ "Panic": true,
+ "Panicf": true,
+ "Panicln": true,
+ "Prefix": true,
+ "Print": true,
+ "Printf": true,
+ "Println": true,
+ "SetFlags": true,
+ "SetOutput": true,
+ "SetPrefix": true,
+ },
+ "log/syslog": map[string]bool{
+ "Dial": true,
+ "LOG_ALERT": true,
+ "LOG_AUTH": true,
+ "LOG_AUTHPRIV": true,
+ "LOG_CRIT": true,
+ "LOG_CRON": true,
+ "LOG_DAEMON": true,
+ "LOG_DEBUG": true,
+ "LOG_EMERG": true,
+ "LOG_ERR": true,
+ "LOG_FTP": true,
+ "LOG_INFO": true,
+ "LOG_KERN": true,
+ "LOG_LOCAL0": true,
+ "LOG_LOCAL1": true,
+ "LOG_LOCAL2": true,
+ "LOG_LOCAL3": true,
+ "LOG_LOCAL4": true,
+ "LOG_LOCAL5": true,
+ "LOG_LOCAL6": true,
+ "LOG_LOCAL7": true,
+ "LOG_LPR": true,
+ "LOG_MAIL": true,
+ "LOG_NEWS": true,
+ "LOG_NOTICE": true,
+ "LOG_SYSLOG": true,
+ "LOG_USER": true,
+ "LOG_UUCP": true,
+ "LOG_WARNING": true,
+ "New": true,
+ "NewLogger": true,
+ "Priority": true,
+ "Writer": true,
+ },
+ "math": map[string]bool{
+ "Abs": true,
+ "Acos": true,
+ "Acosh": true,
+ "Asin": true,
+ "Asinh": true,
+ "Atan": true,
+ "Atan2": true,
+ "Atanh": true,
+ "Cbrt": true,
+ "Ceil": true,
+ "Copysign": true,
+ "Cos": true,
+ "Cosh": true,
+ "Dim": true,
+ "E": true,
+ "Erf": true,
+ "Erfc": true,
+ "Erfcinv": true,
+ "Erfinv": true,
+ "Exp": true,
+ "Exp2": true,
+ "Expm1": true,
+ "Float32bits": true,
+ "Float32frombits": true,
+ "Float64bits": true,
+ "Float64frombits": true,
+ "Floor": true,
+ "Frexp": true,
+ "Gamma": true,
+ "Hypot": true,
+ "Ilogb": true,
+ "Inf": true,
+ "IsInf": true,
+ "IsNaN": true,
+ "J0": true,
+ "J1": true,
+ "Jn": true,
+ "Ldexp": true,
+ "Lgamma": true,
+ "Ln10": true,
+ "Ln2": true,
+ "Log": true,
+ "Log10": true,
+ "Log10E": true,
+ "Log1p": true,
+ "Log2": true,
+ "Log2E": true,
+ "Logb": true,
+ "Max": true,
+ "MaxFloat32": true,
+ "MaxFloat64": true,
+ "MaxInt16": true,
+ "MaxInt32": true,
+ "MaxInt64": true,
+ "MaxInt8": true,
+ "MaxUint16": true,
+ "MaxUint32": true,
+ "MaxUint64": true,
+ "MaxUint8": true,
+ "Min": true,
+ "MinInt16": true,
+ "MinInt32": true,
+ "MinInt64": true,
+ "MinInt8": true,
+ "Mod": true,
+ "Modf": true,
+ "NaN": true,
+ "Nextafter": true,
+ "Nextafter32": true,
+ "Phi": true,
+ "Pi": true,
+ "Pow": true,
+ "Pow10": true,
+ "Remainder": true,
+ "Round": true,
+ "RoundToEven": true,
+ "Signbit": true,
+ "Sin": true,
+ "Sincos": true,
+ "Sinh": true,
+ "SmallestNonzeroFloat32": true,
+ "SmallestNonzeroFloat64": true,
+ "Sqrt": true,
+ "Sqrt2": true,
+ "SqrtE": true,
+ "SqrtPhi": true,
+ "SqrtPi": true,
+ "Tan": true,
+ "Tanh": true,
+ "Trunc": true,
+ "Y0": true,
+ "Y1": true,
+ "Yn": true,
+ },
+ "math/big": map[string]bool{
+ "Above": true,
+ "Accuracy": true,
+ "AwayFromZero": true,
+ "Below": true,
+ "ErrNaN": true,
+ "Exact": true,
+ "Float": true,
+ "Int": true,
+ "Jacobi": true,
+ "MaxBase": true,
+ "MaxExp": true,
+ "MaxPrec": true,
+ "MinExp": true,
+ "NewFloat": true,
+ "NewInt": true,
+ "NewRat": true,
+ "ParseFloat": true,
+ "Rat": true,
+ "RoundingMode": true,
+ "ToNearestAway": true,
+ "ToNearestEven": true,
+ "ToNegativeInf": true,
+ "ToPositiveInf": true,
+ "ToZero": true,
+ "Word": true,
+ },
+ "math/bits": map[string]bool{
+ "LeadingZeros": true,
+ "LeadingZeros16": true,
+ "LeadingZeros32": true,
+ "LeadingZeros64": true,
+ "LeadingZeros8": true,
+ "Len": true,
+ "Len16": true,
+ "Len32": true,
+ "Len64": true,
+ "Len8": true,
+ "OnesCount": true,
+ "OnesCount16": true,
+ "OnesCount32": true,
+ "OnesCount64": true,
+ "OnesCount8": true,
+ "Reverse": true,
+ "Reverse16": true,
+ "Reverse32": true,
+ "Reverse64": true,
+ "Reverse8": true,
+ "ReverseBytes": true,
+ "ReverseBytes16": true,
+ "ReverseBytes32": true,
+ "ReverseBytes64": true,
+ "RotateLeft": true,
+ "RotateLeft16": true,
+ "RotateLeft32": true,
+ "RotateLeft64": true,
+ "RotateLeft8": true,
+ "TrailingZeros": true,
+ "TrailingZeros16": true,
+ "TrailingZeros32": true,
+ "TrailingZeros64": true,
+ "TrailingZeros8": true,
+ "UintSize": true,
+ },
+ "math/cmplx": map[string]bool{
+ "Abs": true,
+ "Acos": true,
+ "Acosh": true,
+ "Asin": true,
+ "Asinh": true,
+ "Atan": true,
+ "Atanh": true,
+ "Conj": true,
+ "Cos": true,
+ "Cosh": true,
+ "Cot": true,
+ "Exp": true,
+ "Inf": true,
+ "IsInf": true,
+ "IsNaN": true,
+ "Log": true,
+ "Log10": true,
+ "NaN": true,
+ "Phase": true,
+ "Polar": true,
+ "Pow": true,
+ "Rect": true,
+ "Sin": true,
+ "Sinh": true,
+ "Sqrt": true,
+ "Tan": true,
+ "Tanh": true,
+ },
+ "math/rand": map[string]bool{
+ "ExpFloat64": true,
+ "Float32": true,
+ "Float64": true,
+ "Int": true,
+ "Int31": true,
+ "Int31n": true,
+ "Int63": true,
+ "Int63n": true,
+ "Intn": true,
+ "New": true,
+ "NewSource": true,
+ "NewZipf": true,
+ "NormFloat64": true,
+ "Perm": true,
+ "Rand": true,
+ "Read": true,
+ "Seed": true,
+ "Shuffle": true,
+ "Source": true,
+ "Source64": true,
+ "Uint32": true,
+ "Uint64": true,
+ "Zipf": true,
+ },
+ "mime": map[string]bool{
+ "AddExtensionType": true,
+ "BEncoding": true,
+ "ErrInvalidMediaParameter": true,
+ "ExtensionsByType": true,
+ "FormatMediaType": true,
+ "ParseMediaType": true,
+ "QEncoding": true,
+ "TypeByExtension": true,
+ "WordDecoder": true,
+ "WordEncoder": true,
+ },
+ "mime/multipart": map[string]bool{
+ "ErrMessageTooLarge": true,
+ "File": true,
+ "FileHeader": true,
+ "Form": true,
+ "NewReader": true,
+ "NewWriter": true,
+ "Part": true,
+ "Reader": true,
+ "Writer": true,
+ },
+ "mime/quotedprintable": map[string]bool{
+ "NewReader": true,
+ "NewWriter": true,
+ "Reader": true,
+ "Writer": true,
+ },
+ "net": map[string]bool{
+ "Addr": true,
+ "AddrError": true,
+ "Buffers": true,
+ "CIDRMask": true,
+ "Conn": true,
+ "DNSConfigError": true,
+ "DNSError": true,
+ "DefaultResolver": true,
+ "Dial": true,
+ "DialIP": true,
+ "DialTCP": true,
+ "DialTimeout": true,
+ "DialUDP": true,
+ "DialUnix": true,
+ "Dialer": true,
+ "ErrWriteToConnected": true,
+ "Error": true,
+ "FileConn": true,
+ "FileListener": true,
+ "FilePacketConn": true,
+ "FlagBroadcast": true,
+ "FlagLoopback": true,
+ "FlagMulticast": true,
+ "FlagPointToPoint": true,
+ "FlagUp": true,
+ "Flags": true,
+ "HardwareAddr": true,
+ "IP": true,
+ "IPAddr": true,
+ "IPConn": true,
+ "IPMask": true,
+ "IPNet": true,
+ "IPv4": true,
+ "IPv4Mask": true,
+ "IPv4allrouter": true,
+ "IPv4allsys": true,
+ "IPv4bcast": true,
+ "IPv4len": true,
+ "IPv4zero": true,
+ "IPv6interfacelocalallnodes": true,
+ "IPv6len": true,
+ "IPv6linklocalallnodes": true,
+ "IPv6linklocalallrouters": true,
+ "IPv6loopback": true,
+ "IPv6unspecified": true,
+ "IPv6zero": true,
+ "Interface": true,
+ "InterfaceAddrs": true,
+ "InterfaceByIndex": true,
+ "InterfaceByName": true,
+ "Interfaces": true,
+ "InvalidAddrError": true,
+ "JoinHostPort": true,
+ "Listen": true,
+ "ListenConfig": true,
+ "ListenIP": true,
+ "ListenMulticastUDP": true,
+ "ListenPacket": true,
+ "ListenTCP": true,
+ "ListenUDP": true,
+ "ListenUnix": true,
+ "ListenUnixgram": true,
+ "Listener": true,
+ "LookupAddr": true,
+ "LookupCNAME": true,
+ "LookupHost": true,
+ "LookupIP": true,
+ "LookupMX": true,
+ "LookupNS": true,
+ "LookupPort": true,
+ "LookupSRV": true,
+ "LookupTXT": true,
+ "MX": true,
+ "NS": true,
+ "OpError": true,
+ "PacketConn": true,
+ "ParseCIDR": true,
+ "ParseError": true,
+ "ParseIP": true,
+ "ParseMAC": true,
+ "Pipe": true,
+ "ResolveIPAddr": true,
+ "ResolveTCPAddr": true,
+ "ResolveUDPAddr": true,
+ "ResolveUnixAddr": true,
+ "Resolver": true,
+ "SRV": true,
+ "SplitHostPort": true,
+ "TCPAddr": true,
+ "TCPConn": true,
+ "TCPListener": true,
+ "UDPAddr": true,
+ "UDPConn": true,
+ "UnixAddr": true,
+ "UnixConn": true,
+ "UnixListener": true,
+ "UnknownNetworkError": true,
+ },
+ "net/http": map[string]bool{
+ "CanonicalHeaderKey": true,
+ "Client": true,
+ "CloseNotifier": true,
+ "ConnState": true,
+ "Cookie": true,
+ "CookieJar": true,
+ "DefaultClient": true,
+ "DefaultMaxHeaderBytes": true,
+ "DefaultMaxIdleConnsPerHost": true,
+ "DefaultServeMux": true,
+ "DefaultTransport": true,
+ "DetectContentType": true,
+ "Dir": true,
+ "ErrAbortHandler": true,
+ "ErrBodyNotAllowed": true,
+ "ErrBodyReadAfterClose": true,
+ "ErrContentLength": true,
+ "ErrHandlerTimeout": true,
+ "ErrHeaderTooLong": true,
+ "ErrHijacked": true,
+ "ErrLineTooLong": true,
+ "ErrMissingBoundary": true,
+ "ErrMissingContentLength": true,
+ "ErrMissingFile": true,
+ "ErrNoCookie": true,
+ "ErrNoLocation": true,
+ "ErrNotMultipart": true,
+ "ErrNotSupported": true,
+ "ErrServerClosed": true,
+ "ErrShortBody": true,
+ "ErrSkipAltProtocol": true,
+ "ErrUnexpectedTrailer": true,
+ "ErrUseLastResponse": true,
+ "ErrWriteAfterFlush": true,
+ "Error": true,
+ "File": true,
+ "FileServer": true,
+ "FileSystem": true,
+ "Flusher": true,
+ "Get": true,
+ "Handle": true,
+ "HandleFunc": true,
+ "Handler": true,
+ "HandlerFunc": true,
+ "Head": true,
+ "Header": true,
+ "Hijacker": true,
+ "ListenAndServe": true,
+ "ListenAndServeTLS": true,
+ "LocalAddrContextKey": true,
+ "MaxBytesReader": true,
+ "MethodConnect": true,
+ "MethodDelete": true,
+ "MethodGet": true,
+ "MethodHead": true,
+ "MethodOptions": true,
+ "MethodPatch": true,
+ "MethodPost": true,
+ "MethodPut": true,
+ "MethodTrace": true,
+ "NewFileTransport": true,
+ "NewRequest": true,
+ "NewServeMux": true,
+ "NoBody": true,
+ "NotFound": true,
+ "NotFoundHandler": true,
+ "ParseHTTPVersion": true,
+ "ParseTime": true,
+ "Post": true,
+ "PostForm": true,
+ "ProtocolError": true,
+ "ProxyFromEnvironment": true,
+ "ProxyURL": true,
+ "PushOptions": true,
+ "Pusher": true,
+ "ReadRequest": true,
+ "ReadResponse": true,
+ "Redirect": true,
+ "RedirectHandler": true,
+ "Request": true,
+ "Response": true,
+ "ResponseWriter": true,
+ "RoundTripper": true,
+ "SameSite": true,
+ "SameSiteDefaultMode": true,
+ "SameSiteLaxMode": true,
+ "SameSiteStrictMode": true,
+ "Serve": true,
+ "ServeContent": true,
+ "ServeFile": true,
+ "ServeMux": true,
+ "ServeTLS": true,
+ "Server": true,
+ "ServerContextKey": true,
+ "SetCookie": true,
+ "StateActive": true,
+ "StateClosed": true,
+ "StateHijacked": true,
+ "StateIdle": true,
+ "StateNew": true,
+ "StatusAccepted": true,
+ "StatusAlreadyReported": true,
+ "StatusBadGateway": true,
+ "StatusBadRequest": true,
+ "StatusConflict": true,
+ "StatusContinue": true,
+ "StatusCreated": true,
+ "StatusExpectationFailed": true,
+ "StatusFailedDependency": true,
+ "StatusForbidden": true,
+ "StatusFound": true,
+ "StatusGatewayTimeout": true,
+ "StatusGone": true,
+ "StatusHTTPVersionNotSupported": true,
+ "StatusIMUsed": true,
+ "StatusInsufficientStorage": true,
+ "StatusInternalServerError": true,
+ "StatusLengthRequired": true,
+ "StatusLocked": true,
+ "StatusLoopDetected": true,
+ "StatusMethodNotAllowed": true,
+ "StatusMisdirectedRequest": true,
+ "StatusMovedPermanently": true,
+ "StatusMultiStatus": true,
+ "StatusMultipleChoices": true,
+ "StatusNetworkAuthenticationRequired": true,
+ "StatusNoContent": true,
+ "StatusNonAuthoritativeInfo": true,
+ "StatusNotAcceptable": true,
+ "StatusNotExtended": true,
+ "StatusNotFound": true,
+ "StatusNotImplemented": true,
+ "StatusNotModified": true,
+ "StatusOK": true,
+ "StatusPartialContent": true,
+ "StatusPaymentRequired": true,
+ "StatusPermanentRedirect": true,
+ "StatusPreconditionFailed": true,
+ "StatusPreconditionRequired": true,
+ "StatusProcessing": true,
+ "StatusProxyAuthRequired": true,
+ "StatusRequestEntityTooLarge": true,
+ "StatusRequestHeaderFieldsTooLarge": true,
+ "StatusRequestTimeout": true,
+ "StatusRequestURITooLong": true,
+ "StatusRequestedRangeNotSatisfiable": true,
+ "StatusResetContent": true,
+ "StatusSeeOther": true,
+ "StatusServiceUnavailable": true,
+ "StatusSwitchingProtocols": true,
+ "StatusTeapot": true,
+ "StatusTemporaryRedirect": true,
+ "StatusText": true,
+ "StatusTooManyRequests": true,
+ "StatusUnauthorized": true,
+ "StatusUnavailableForLegalReasons": true,
+ "StatusUnprocessableEntity": true,
+ "StatusUnsupportedMediaType": true,
+ "StatusUpgradeRequired": true,
+ "StatusUseProxy": true,
+ "StatusVariantAlsoNegotiates": true,
+ "StripPrefix": true,
+ "TimeFormat": true,
+ "TimeoutHandler": true,
+ "TrailerPrefix": true,
+ "Transport": true,
+ },
+ "net/http/cgi": map[string]bool{
+ "Handler": true,
+ "Request": true,
+ "RequestFromMap": true,
+ "Serve": true,
+ },
+ "net/http/cookiejar": map[string]bool{
+ "Jar": true,
+ "New": true,
+ "Options": true,
+ "PublicSuffixList": true,
+ },
+ "net/http/fcgi": map[string]bool{
+ "ErrConnClosed": true,
+ "ErrRequestAborted": true,
+ "ProcessEnv": true,
+ "Serve": true,
+ },
+ "net/http/httptest": map[string]bool{
+ "DefaultRemoteAddr": true,
+ "NewRecorder": true,
+ "NewRequest": true,
+ "NewServer": true,
+ "NewTLSServer": true,
+ "NewUnstartedServer": true,
+ "ResponseRecorder": true,
+ "Server": true,
+ },
+ "net/http/httptrace": map[string]bool{
+ "ClientTrace": true,
+ "ContextClientTrace": true,
+ "DNSDoneInfo": true,
+ "DNSStartInfo": true,
+ "GotConnInfo": true,
+ "WithClientTrace": true,
+ "WroteRequestInfo": true,
+ },
+ "net/http/httputil": map[string]bool{
+ "BufferPool": true,
+ "ClientConn": true,
+ "DumpRequest": true,
+ "DumpRequestOut": true,
+ "DumpResponse": true,
+ "ErrClosed": true,
+ "ErrLineTooLong": true,
+ "ErrPersistEOF": true,
+ "ErrPipeline": true,
+ "NewChunkedReader": true,
+ "NewChunkedWriter": true,
+ "NewClientConn": true,
+ "NewProxyClientConn": true,
+ "NewServerConn": true,
+ "NewSingleHostReverseProxy": true,
+ "ReverseProxy": true,
+ "ServerConn": true,
+ },
+ "net/http/pprof": map[string]bool{
+ "Cmdline": true,
+ "Handler": true,
+ "Index": true,
+ "Profile": true,
+ "Symbol": true,
+ "Trace": true,
+ },
+ "net/mail": map[string]bool{
+ "Address": true,
+ "AddressParser": true,
+ "ErrHeaderNotPresent": true,
+ "Header": true,
+ "Message": true,
+ "ParseAddress": true,
+ "ParseAddressList": true,
+ "ParseDate": true,
+ "ReadMessage": true,
+ },
+ "net/rpc": map[string]bool{
+ "Accept": true,
+ "Call": true,
+ "Client": true,
+ "ClientCodec": true,
+ "DefaultDebugPath": true,
+ "DefaultRPCPath": true,
+ "DefaultServer": true,
+ "Dial": true,
+ "DialHTTP": true,
+ "DialHTTPPath": true,
+ "ErrShutdown": true,
+ "HandleHTTP": true,
+ "NewClient": true,
+ "NewClientWithCodec": true,
+ "NewServer": true,
+ "Register": true,
+ "RegisterName": true,
+ "Request": true,
+ "Response": true,
+ "ServeCodec": true,
+ "ServeConn": true,
+ "ServeRequest": true,
+ "Server": true,
+ "ServerCodec": true,
+ "ServerError": true,
+ },
+ "net/rpc/jsonrpc": map[string]bool{
+ "Dial": true,
+ "NewClient": true,
+ "NewClientCodec": true,
+ "NewServerCodec": true,
+ "ServeConn": true,
+ },
+ "net/smtp": map[string]bool{
+ "Auth": true,
+ "CRAMMD5Auth": true,
+ "Client": true,
+ "Dial": true,
+ "NewClient": true,
+ "PlainAuth": true,
+ "SendMail": true,
+ "ServerInfo": true,
+ },
+ "net/textproto": map[string]bool{
+ "CanonicalMIMEHeaderKey": true,
+ "Conn": true,
+ "Dial": true,
+ "Error": true,
+ "MIMEHeader": true,
+ "NewConn": true,
+ "NewReader": true,
+ "NewWriter": true,
+ "Pipeline": true,
+ "ProtocolError": true,
+ "Reader": true,
+ "TrimBytes": true,
+ "TrimString": true,
+ "Writer": true,
+ },
+ "net/url": map[string]bool{
+ "Error": true,
+ "EscapeError": true,
+ "InvalidHostError": true,
+ "Parse": true,
+ "ParseQuery": true,
+ "ParseRequestURI": true,
+ "PathEscape": true,
+ "PathUnescape": true,
+ "QueryEscape": true,
+ "QueryUnescape": true,
+ "URL": true,
+ "User": true,
+ "UserPassword": true,
+ "Userinfo": true,
+ "Values": true,
+ },
+ "os": map[string]bool{
+ "Args": true,
+ "Chdir": true,
+ "Chmod": true,
+ "Chown": true,
+ "Chtimes": true,
+ "Clearenv": true,
+ "Create": true,
+ "DevNull": true,
+ "Environ": true,
+ "ErrClosed": true,
+ "ErrExist": true,
+ "ErrInvalid": true,
+ "ErrNoDeadline": true,
+ "ErrNotExist": true,
+ "ErrPermission": true,
+ "Executable": true,
+ "Exit": true,
+ "Expand": true,
+ "ExpandEnv": true,
+ "File": true,
+ "FileInfo": true,
+ "FileMode": true,
+ "FindProcess": true,
+ "Getegid": true,
+ "Getenv": true,
+ "Geteuid": true,
+ "Getgid": true,
+ "Getgroups": true,
+ "Getpagesize": true,
+ "Getpid": true,
+ "Getppid": true,
+ "Getuid": true,
+ "Getwd": true,
+ "Hostname": true,
+ "Interrupt": true,
+ "IsExist": true,
+ "IsNotExist": true,
+ "IsPathSeparator": true,
+ "IsPermission": true,
+ "IsTimeout": true,
+ "Kill": true,
+ "Lchown": true,
+ "Link": true,
+ "LinkError": true,
+ "LookupEnv": true,
+ "Lstat": true,
+ "Mkdir": true,
+ "MkdirAll": true,
+ "ModeAppend": true,
+ "ModeCharDevice": true,
+ "ModeDevice": true,
+ "ModeDir": true,
+ "ModeExclusive": true,
+ "ModeIrregular": true,
+ "ModeNamedPipe": true,
+ "ModePerm": true,
+ "ModeSetgid": true,
+ "ModeSetuid": true,
+ "ModeSocket": true,
+ "ModeSticky": true,
+ "ModeSymlink": true,
+ "ModeTemporary": true,
+ "ModeType": true,
+ "NewFile": true,
+ "NewSyscallError": true,
+ "O_APPEND": true,
+ "O_CREATE": true,
+ "O_EXCL": true,
+ "O_RDONLY": true,
+ "O_RDWR": true,
+ "O_SYNC": true,
+ "O_TRUNC": true,
+ "O_WRONLY": true,
+ "Open": true,
+ "OpenFile": true,
+ "PathError": true,
+ "PathListSeparator": true,
+ "PathSeparator": true,
+ "Pipe": true,
+ "ProcAttr": true,
+ "Process": true,
+ "ProcessState": true,
+ "Readlink": true,
+ "Remove": true,
+ "RemoveAll": true,
+ "Rename": true,
+ "SEEK_CUR": true,
+ "SEEK_END": true,
+ "SEEK_SET": true,
+ "SameFile": true,
+ "Setenv": true,
+ "Signal": true,
+ "StartProcess": true,
+ "Stat": true,
+ "Stderr": true,
+ "Stdin": true,
+ "Stdout": true,
+ "Symlink": true,
+ "SyscallError": true,
+ "TempDir": true,
+ "Truncate": true,
+ "Unsetenv": true,
+ "UserCacheDir": true,
+ },
+ "os/exec": map[string]bool{
+ "Cmd": true,
+ "Command": true,
+ "CommandContext": true,
+ "ErrNotFound": true,
+ "Error": true,
+ "ExitError": true,
+ "LookPath": true,
+ },
+ "os/signal": map[string]bool{
+ "Ignore": true,
+ "Ignored": true,
+ "Notify": true,
+ "Reset": true,
+ "Stop": true,
+ },
+ "os/user": map[string]bool{
+ "Current": true,
+ "Group": true,
+ "Lookup": true,
+ "LookupGroup": true,
+ "LookupGroupId": true,
+ "LookupId": true,
+ "UnknownGroupError": true,
+ "UnknownGroupIdError": true,
+ "UnknownUserError": true,
+ "UnknownUserIdError": true,
+ "User": true,
+ },
+ "path": map[string]bool{
+ "Base": true,
+ "Clean": true,
+ "Dir": true,
+ "ErrBadPattern": true,
+ "Ext": true,
+ "IsAbs": true,
+ "Join": true,
+ "Match": true,
+ "Split": true,
+ },
+ "path/filepath": map[string]bool{
+ "Abs": true,
+ "Base": true,
+ "Clean": true,
+ "Dir": true,
+ "ErrBadPattern": true,
+ "EvalSymlinks": true,
+ "Ext": true,
+ "FromSlash": true,
+ "Glob": true,
+ "HasPrefix": true,
+ "IsAbs": true,
+ "Join": true,
+ "ListSeparator": true,
+ "Match": true,
+ "Rel": true,
+ "Separator": true,
+ "SkipDir": true,
+ "Split": true,
+ "SplitList": true,
+ "ToSlash": true,
+ "VolumeName": true,
+ "Walk": true,
+ "WalkFunc": true,
+ },
+ "plugin": map[string]bool{
+ "Open": true,
+ "Plugin": true,
+ "Symbol": true,
+ },
+ "reflect": map[string]bool{
+ "Append": true,
+ "AppendSlice": true,
+ "Array": true,
+ "ArrayOf": true,
+ "Bool": true,
+ "BothDir": true,
+ "Chan": true,
+ "ChanDir": true,
+ "ChanOf": true,
+ "Complex128": true,
+ "Complex64": true,
+ "Copy": true,
+ "DeepEqual": true,
+ "Float32": true,
+ "Float64": true,
+ "Func": true,
+ "FuncOf": true,
+ "Indirect": true,
+ "Int": true,
+ "Int16": true,
+ "Int32": true,
+ "Int64": true,
+ "Int8": true,
+ "Interface": true,
+ "Invalid": true,
+ "Kind": true,
+ "MakeChan": true,
+ "MakeFunc": true,
+ "MakeMap": true,
+ "MakeMapWithSize": true,
+ "MakeSlice": true,
+ "Map": true,
+ "MapOf": true,
+ "Method": true,
+ "New": true,
+ "NewAt": true,
+ "Ptr": true,
+ "PtrTo": true,
+ "RecvDir": true,
+ "Select": true,
+ "SelectCase": true,
+ "SelectDefault": true,
+ "SelectDir": true,
+ "SelectRecv": true,
+ "SelectSend": true,
+ "SendDir": true,
+ "Slice": true,
+ "SliceHeader": true,
+ "SliceOf": true,
+ "String": true,
+ "StringHeader": true,
+ "Struct": true,
+ "StructField": true,
+ "StructOf": true,
+ "StructTag": true,
+ "Swapper": true,
+ "TypeOf": true,
+ "Uint": true,
+ "Uint16": true,
+ "Uint32": true,
+ "Uint64": true,
+ "Uint8": true,
+ "Uintptr": true,
+ "UnsafePointer": true,
+ "Value": true,
+ "ValueError": true,
+ "ValueOf": true,
+ "Zero": true,
+ },
+ "regexp": map[string]bool{
+ "Compile": true,
+ "CompilePOSIX": true,
+ "Match": true,
+ "MatchReader": true,
+ "MatchString": true,
+ "MustCompile": true,
+ "MustCompilePOSIX": true,
+ "QuoteMeta": true,
+ "Regexp": true,
+ },
+ "regexp/syntax": map[string]bool{
+ "ClassNL": true,
+ "Compile": true,
+ "DotNL": true,
+ "EmptyBeginLine": true,
+ "EmptyBeginText": true,
+ "EmptyEndLine": true,
+ "EmptyEndText": true,
+ "EmptyNoWordBoundary": true,
+ "EmptyOp": true,
+ "EmptyOpContext": true,
+ "EmptyWordBoundary": true,
+ "ErrInternalError": true,
+ "ErrInvalidCharClass": true,
+ "ErrInvalidCharRange": true,
+ "ErrInvalidEscape": true,
+ "ErrInvalidNamedCapture": true,
+ "ErrInvalidPerlOp": true,
+ "ErrInvalidRepeatOp": true,
+ "ErrInvalidRepeatSize": true,
+ "ErrInvalidUTF8": true,
+ "ErrMissingBracket": true,
+ "ErrMissingParen": true,
+ "ErrMissingRepeatArgument": true,
+ "ErrTrailingBackslash": true,
+ "ErrUnexpectedParen": true,
+ "Error": true,
+ "ErrorCode": true,
+ "Flags": true,
+ "FoldCase": true,
+ "Inst": true,
+ "InstAlt": true,
+ "InstAltMatch": true,
+ "InstCapture": true,
+ "InstEmptyWidth": true,
+ "InstFail": true,
+ "InstMatch": true,
+ "InstNop": true,
+ "InstOp": true,
+ "InstRune": true,
+ "InstRune1": true,
+ "InstRuneAny": true,
+ "InstRuneAnyNotNL": true,
+ "IsWordChar": true,
+ "Literal": true,
+ "MatchNL": true,
+ "NonGreedy": true,
+ "OneLine": true,
+ "Op": true,
+ "OpAlternate": true,
+ "OpAnyChar": true,
+ "OpAnyCharNotNL": true,
+ "OpBeginLine": true,
+ "OpBeginText": true,
+ "OpCapture": true,
+ "OpCharClass": true,
+ "OpConcat": true,
+ "OpEmptyMatch": true,
+ "OpEndLine": true,
+ "OpEndText": true,
+ "OpLiteral": true,
+ "OpNoMatch": true,
+ "OpNoWordBoundary": true,
+ "OpPlus": true,
+ "OpQuest": true,
+ "OpRepeat": true,
+ "OpStar": true,
+ "OpWordBoundary": true,
+ "POSIX": true,
+ "Parse": true,
+ "Perl": true,
+ "PerlX": true,
+ "Prog": true,
+ "Regexp": true,
+ "Simple": true,
+ "UnicodeGroups": true,
+ "WasDollar": true,
+ },
+ "runtime": map[string]bool{
+ "BlockProfile": true,
+ "BlockProfileRecord": true,
+ "Breakpoint": true,
+ "CPUProfile": true,
+ "Caller": true,
+ "Callers": true,
+ "CallersFrames": true,
+ "Compiler": true,
+ "Error": true,
+ "Frame": true,
+ "Frames": true,
+ "Func": true,
+ "FuncForPC": true,
+ "GC": true,
+ "GOARCH": true,
+ "GOMAXPROCS": true,
+ "GOOS": true,
+ "GOROOT": true,
+ "Goexit": true,
+ "GoroutineProfile": true,
+ "Gosched": true,
+ "KeepAlive": true,
+ "LockOSThread": true,
+ "MemProfile": true,
+ "MemProfileRate": true,
+ "MemProfileRecord": true,
+ "MemStats": true,
+ "MutexProfile": true,
+ "NumCPU": true,
+ "NumCgoCall": true,
+ "NumGoroutine": true,
+ "ReadMemStats": true,
+ "ReadTrace": true,
+ "SetBlockProfileRate": true,
+ "SetCPUProfileRate": true,
+ "SetCgoTraceback": true,
+ "SetFinalizer": true,
+ "SetMutexProfileFraction": true,
+ "Stack": true,
+ "StackRecord": true,
+ "StartTrace": true,
+ "StopTrace": true,
+ "ThreadCreateProfile": true,
+ "TypeAssertionError": true,
+ "UnlockOSThread": true,
+ "Version": true,
+ },
+ "runtime/debug": map[string]bool{
+ "FreeOSMemory": true,
+ "GCStats": true,
+ "PrintStack": true,
+ "ReadGCStats": true,
+ "SetGCPercent": true,
+ "SetMaxStack": true,
+ "SetMaxThreads": true,
+ "SetPanicOnFault": true,
+ "SetTraceback": true,
+ "Stack": true,
+ "WriteHeapDump": true,
+ },
+ "runtime/pprof": map[string]bool{
+ "Do": true,
+ "ForLabels": true,
+ "Label": true,
+ "LabelSet": true,
+ "Labels": true,
+ "Lookup": true,
+ "NewProfile": true,
+ "Profile": true,
+ "Profiles": true,
+ "SetGoroutineLabels": true,
+ "StartCPUProfile": true,
+ "StopCPUProfile": true,
+ "WithLabels": true,
+ "WriteHeapProfile": true,
+ },
+ "runtime/trace": map[string]bool{
+ "IsEnabled": true,
+ "Log": true,
+ "Logf": true,
+ "NewTask": true,
+ "Region": true,
+ "Start": true,
+ "StartRegion": true,
+ "Stop": true,
+ "Task": true,
+ "WithRegion": true,
+ },
+ "sort": map[string]bool{
+ "Float64Slice": true,
+ "Float64s": true,
+ "Float64sAreSorted": true,
+ "IntSlice": true,
+ "Interface": true,
+ "Ints": true,
+ "IntsAreSorted": true,
+ "IsSorted": true,
+ "Reverse": true,
+ "Search": true,
+ "SearchFloat64s": true,
+ "SearchInts": true,
+ "SearchStrings": true,
+ "Slice": true,
+ "SliceIsSorted": true,
+ "SliceStable": true,
+ "Sort": true,
+ "Stable": true,
+ "StringSlice": true,
+ "Strings": true,
+ "StringsAreSorted": true,
+ },
+ "strconv": map[string]bool{
+ "AppendBool": true,
+ "AppendFloat": true,
+ "AppendInt": true,
+ "AppendQuote": true,
+ "AppendQuoteRune": true,
+ "AppendQuoteRuneToASCII": true,
+ "AppendQuoteRuneToGraphic": true,
+ "AppendQuoteToASCII": true,
+ "AppendQuoteToGraphic": true,
+ "AppendUint": true,
+ "Atoi": true,
+ "CanBackquote": true,
+ "ErrRange": true,
+ "ErrSyntax": true,
+ "FormatBool": true,
+ "FormatFloat": true,
+ "FormatInt": true,
+ "FormatUint": true,
+ "IntSize": true,
+ "IsGraphic": true,
+ "IsPrint": true,
+ "Itoa": true,
+ "NumError": true,
+ "ParseBool": true,
+ "ParseFloat": true,
+ "ParseInt": true,
+ "ParseUint": true,
+ "Quote": true,
+ "QuoteRune": true,
+ "QuoteRuneToASCII": true,
+ "QuoteRuneToGraphic": true,
+ "QuoteToASCII": true,
+ "QuoteToGraphic": true,
+ "Unquote": true,
+ "UnquoteChar": true,
+ },
+ "strings": map[string]bool{
+ "Builder": true,
+ "Compare": true,
+ "Contains": true,
+ "ContainsAny": true,
+ "ContainsRune": true,
+ "Count": true,
+ "EqualFold": true,
+ "Fields": true,
+ "FieldsFunc": true,
+ "HasPrefix": true,
+ "HasSuffix": true,
+ "Index": true,
+ "IndexAny": true,
+ "IndexByte": true,
+ "IndexFunc": true,
+ "IndexRune": true,
+ "Join": true,
+ "LastIndex": true,
+ "LastIndexAny": true,
+ "LastIndexByte": true,
+ "LastIndexFunc": true,
+ "Map": true,
+ "NewReader": true,
+ "NewReplacer": true,
+ "Reader": true,
+ "Repeat": true,
+ "Replace": true,
+ "Replacer": true,
+ "Split": true,
+ "SplitAfter": true,
+ "SplitAfterN": true,
+ "SplitN": true,
+ "Title": true,
+ "ToLower": true,
+ "ToLowerSpecial": true,
+ "ToTitle": true,
+ "ToTitleSpecial": true,
+ "ToUpper": true,
+ "ToUpperSpecial": true,
+ "Trim": true,
+ "TrimFunc": true,
+ "TrimLeft": true,
+ "TrimLeftFunc": true,
+ "TrimPrefix": true,
+ "TrimRight": true,
+ "TrimRightFunc": true,
+ "TrimSpace": true,
+ "TrimSuffix": true,
+ },
+ "sync": map[string]bool{
+ "Cond": true,
+ "Locker": true,
+ "Map": true,
+ "Mutex": true,
+ "NewCond": true,
+ "Once": true,
+ "Pool": true,
+ "RWMutex": true,
+ "WaitGroup": true,
+ },
+ "sync/atomic": map[string]bool{
+ "AddInt32": true,
+ "AddInt64": true,
+ "AddUint32": true,
+ "AddUint64": true,
+ "AddUintptr": true,
+ "CompareAndSwapInt32": true,
+ "CompareAndSwapInt64": true,
+ "CompareAndSwapPointer": true,
+ "CompareAndSwapUint32": true,
+ "CompareAndSwapUint64": true,
+ "CompareAndSwapUintptr": true,
+ "LoadInt32": true,
+ "LoadInt64": true,
+ "LoadPointer": true,
+ "LoadUint32": true,
+ "LoadUint64": true,
+ "LoadUintptr": true,
+ "StoreInt32": true,
+ "StoreInt64": true,
+ "StorePointer": true,
+ "StoreUint32": true,
+ "StoreUint64": true,
+ "StoreUintptr": true,
+ "SwapInt32": true,
+ "SwapInt64": true,
+ "SwapPointer": true,
+ "SwapUint32": true,
+ "SwapUint64": true,
+ "SwapUintptr": true,
+ "Value": true,
+ },
+ "syscall": map[string]bool{
+ "AF_ALG": true,
+ "AF_APPLETALK": true,
+ "AF_ARP": true,
+ "AF_ASH": true,
+ "AF_ATM": true,
+ "AF_ATMPVC": true,
+ "AF_ATMSVC": true,
+ "AF_AX25": true,
+ "AF_BLUETOOTH": true,
+ "AF_BRIDGE": true,
+ "AF_CAIF": true,
+ "AF_CAN": true,
+ "AF_CCITT": true,
+ "AF_CHAOS": true,
+ "AF_CNT": true,
+ "AF_COIP": true,
+ "AF_DATAKIT": true,
+ "AF_DECnet": true,
+ "AF_DLI": true,
+ "AF_E164": true,
+ "AF_ECMA": true,
+ "AF_ECONET": true,
+ "AF_ENCAP": true,
+ "AF_FILE": true,
+ "AF_HYLINK": true,
+ "AF_IEEE80211": true,
+ "AF_IEEE802154": true,
+ "AF_IMPLINK": true,
+ "AF_INET": true,
+ "AF_INET6": true,
+ "AF_INET6_SDP": true,
+ "AF_INET_SDP": true,
+ "AF_IPX": true,
+ "AF_IRDA": true,
+ "AF_ISDN": true,
+ "AF_ISO": true,
+ "AF_IUCV": true,
+ "AF_KEY": true,
+ "AF_LAT": true,
+ "AF_LINK": true,
+ "AF_LLC": true,
+ "AF_LOCAL": true,
+ "AF_MAX": true,
+ "AF_MPLS": true,
+ "AF_NATM": true,
+ "AF_NDRV": true,
+ "AF_NETBEUI": true,
+ "AF_NETBIOS": true,
+ "AF_NETGRAPH": true,
+ "AF_NETLINK": true,
+ "AF_NETROM": true,
+ "AF_NS": true,
+ "AF_OROUTE": true,
+ "AF_OSI": true,
+ "AF_PACKET": true,
+ "AF_PHONET": true,
+ "AF_PPP": true,
+ "AF_PPPOX": true,
+ "AF_PUP": true,
+ "AF_RDS": true,
+ "AF_RESERVED_36": true,
+ "AF_ROSE": true,
+ "AF_ROUTE": true,
+ "AF_RXRPC": true,
+ "AF_SCLUSTER": true,
+ "AF_SECURITY": true,
+ "AF_SIP": true,
+ "AF_SLOW": true,
+ "AF_SNA": true,
+ "AF_SYSTEM": true,
+ "AF_TIPC": true,
+ "AF_UNIX": true,
+ "AF_UNSPEC": true,
+ "AF_VENDOR00": true,
+ "AF_VENDOR01": true,
+ "AF_VENDOR02": true,
+ "AF_VENDOR03": true,
+ "AF_VENDOR04": true,
+ "AF_VENDOR05": true,
+ "AF_VENDOR06": true,
+ "AF_VENDOR07": true,
+ "AF_VENDOR08": true,
+ "AF_VENDOR09": true,
+ "AF_VENDOR10": true,
+ "AF_VENDOR11": true,
+ "AF_VENDOR12": true,
+ "AF_VENDOR13": true,
+ "AF_VENDOR14": true,
+ "AF_VENDOR15": true,
+ "AF_VENDOR16": true,
+ "AF_VENDOR17": true,
+ "AF_VENDOR18": true,
+ "AF_VENDOR19": true,
+ "AF_VENDOR20": true,
+ "AF_VENDOR21": true,
+ "AF_VENDOR22": true,
+ "AF_VENDOR23": true,
+ "AF_VENDOR24": true,
+ "AF_VENDOR25": true,
+ "AF_VENDOR26": true,
+ "AF_VENDOR27": true,
+ "AF_VENDOR28": true,
+ "AF_VENDOR29": true,
+ "AF_VENDOR30": true,
+ "AF_VENDOR31": true,
+ "AF_VENDOR32": true,
+ "AF_VENDOR33": true,
+ "AF_VENDOR34": true,
+ "AF_VENDOR35": true,
+ "AF_VENDOR36": true,
+ "AF_VENDOR37": true,
+ "AF_VENDOR38": true,
+ "AF_VENDOR39": true,
+ "AF_VENDOR40": true,
+ "AF_VENDOR41": true,
+ "AF_VENDOR42": true,
+ "AF_VENDOR43": true,
+ "AF_VENDOR44": true,
+ "AF_VENDOR45": true,
+ "AF_VENDOR46": true,
+ "AF_VENDOR47": true,
+ "AF_WANPIPE": true,
+ "AF_X25": true,
+ "AI_CANONNAME": true,
+ "AI_NUMERICHOST": true,
+ "AI_PASSIVE": true,
+ "APPLICATION_ERROR": true,
+ "ARPHRD_ADAPT": true,
+ "ARPHRD_APPLETLK": true,
+ "ARPHRD_ARCNET": true,
+ "ARPHRD_ASH": true,
+ "ARPHRD_ATM": true,
+ "ARPHRD_AX25": true,
+ "ARPHRD_BIF": true,
+ "ARPHRD_CHAOS": true,
+ "ARPHRD_CISCO": true,
+ "ARPHRD_CSLIP": true,
+ "ARPHRD_CSLIP6": true,
+ "ARPHRD_DDCMP": true,
+ "ARPHRD_DLCI": true,
+ "ARPHRD_ECONET": true,
+ "ARPHRD_EETHER": true,
+ "ARPHRD_ETHER": true,
+ "ARPHRD_EUI64": true,
+ "ARPHRD_FCAL": true,
+ "ARPHRD_FCFABRIC": true,
+ "ARPHRD_FCPL": true,
+ "ARPHRD_FCPP": true,
+ "ARPHRD_FDDI": true,
+ "ARPHRD_FRAD": true,
+ "ARPHRD_FRELAY": true,
+ "ARPHRD_HDLC": true,
+ "ARPHRD_HIPPI": true,
+ "ARPHRD_HWX25": true,
+ "ARPHRD_IEEE1394": true,
+ "ARPHRD_IEEE802": true,
+ "ARPHRD_IEEE80211": true,
+ "ARPHRD_IEEE80211_PRISM": true,
+ "ARPHRD_IEEE80211_RADIOTAP": true,
+ "ARPHRD_IEEE802154": true,
+ "ARPHRD_IEEE802154_PHY": true,
+ "ARPHRD_IEEE802_TR": true,
+ "ARPHRD_INFINIBAND": true,
+ "ARPHRD_IPDDP": true,
+ "ARPHRD_IPGRE": true,
+ "ARPHRD_IRDA": true,
+ "ARPHRD_LAPB": true,
+ "ARPHRD_LOCALTLK": true,
+ "ARPHRD_LOOPBACK": true,
+ "ARPHRD_METRICOM": true,
+ "ARPHRD_NETROM": true,
+ "ARPHRD_NONE": true,
+ "ARPHRD_PIMREG": true,
+ "ARPHRD_PPP": true,
+ "ARPHRD_PRONET": true,
+ "ARPHRD_RAWHDLC": true,
+ "ARPHRD_ROSE": true,
+ "ARPHRD_RSRVD": true,
+ "ARPHRD_SIT": true,
+ "ARPHRD_SKIP": true,
+ "ARPHRD_SLIP": true,
+ "ARPHRD_SLIP6": true,
+ "ARPHRD_STRIP": true,
+ "ARPHRD_TUNNEL": true,
+ "ARPHRD_TUNNEL6": true,
+ "ARPHRD_VOID": true,
+ "ARPHRD_X25": true,
+ "AUTHTYPE_CLIENT": true,
+ "AUTHTYPE_SERVER": true,
+ "Accept": true,
+ "Accept4": true,
+ "AcceptEx": true,
+ "Access": true,
+ "Acct": true,
+ "AddrinfoW": true,
+ "Adjtime": true,
+ "Adjtimex": true,
+ "AttachLsf": true,
+ "B0": true,
+ "B1000000": true,
+ "B110": true,
+ "B115200": true,
+ "B1152000": true,
+ "B1200": true,
+ "B134": true,
+ "B14400": true,
+ "B150": true,
+ "B1500000": true,
+ "B1800": true,
+ "B19200": true,
+ "B200": true,
+ "B2000000": true,
+ "B230400": true,
+ "B2400": true,
+ "B2500000": true,
+ "B28800": true,
+ "B300": true,
+ "B3000000": true,
+ "B3500000": true,
+ "B38400": true,
+ "B4000000": true,
+ "B460800": true,
+ "B4800": true,
+ "B50": true,
+ "B500000": true,
+ "B57600": true,
+ "B576000": true,
+ "B600": true,
+ "B7200": true,
+ "B75": true,
+ "B76800": true,
+ "B921600": true,
+ "B9600": true,
+ "BASE_PROTOCOL": true,
+ "BIOCFEEDBACK": true,
+ "BIOCFLUSH": true,
+ "BIOCGBLEN": true,
+ "BIOCGDIRECTION": true,
+ "BIOCGDIRFILT": true,
+ "BIOCGDLT": true,
+ "BIOCGDLTLIST": true,
+ "BIOCGETBUFMODE": true,
+ "BIOCGETIF": true,
+ "BIOCGETZMAX": true,
+ "BIOCGFEEDBACK": true,
+ "BIOCGFILDROP": true,
+ "BIOCGHDRCMPLT": true,
+ "BIOCGRSIG": true,
+ "BIOCGRTIMEOUT": true,
+ "BIOCGSEESENT": true,
+ "BIOCGSTATS": true,
+ "BIOCGSTATSOLD": true,
+ "BIOCGTSTAMP": true,
+ "BIOCIMMEDIATE": true,
+ "BIOCLOCK": true,
+ "BIOCPROMISC": true,
+ "BIOCROTZBUF": true,
+ "BIOCSBLEN": true,
+ "BIOCSDIRECTION": true,
+ "BIOCSDIRFILT": true,
+ "BIOCSDLT": true,
+ "BIOCSETBUFMODE": true,
+ "BIOCSETF": true,
+ "BIOCSETFNR": true,
+ "BIOCSETIF": true,
+ "BIOCSETWF": true,
+ "BIOCSETZBUF": true,
+ "BIOCSFEEDBACK": true,
+ "BIOCSFILDROP": true,
+ "BIOCSHDRCMPLT": true,
+ "BIOCSRSIG": true,
+ "BIOCSRTIMEOUT": true,
+ "BIOCSSEESENT": true,
+ "BIOCSTCPF": true,
+ "BIOCSTSTAMP": true,
+ "BIOCSUDPF": true,
+ "BIOCVERSION": true,
+ "BPF_A": true,
+ "BPF_ABS": true,
+ "BPF_ADD": true,
+ "BPF_ALIGNMENT": true,
+ "BPF_ALIGNMENT32": true,
+ "BPF_ALU": true,
+ "BPF_AND": true,
+ "BPF_B": true,
+ "BPF_BUFMODE_BUFFER": true,
+ "BPF_BUFMODE_ZBUF": true,
+ "BPF_DFLTBUFSIZE": true,
+ "BPF_DIRECTION_IN": true,
+ "BPF_DIRECTION_OUT": true,
+ "BPF_DIV": true,
+ "BPF_H": true,
+ "BPF_IMM": true,
+ "BPF_IND": true,
+ "BPF_JA": true,
+ "BPF_JEQ": true,
+ "BPF_JGE": true,
+ "BPF_JGT": true,
+ "BPF_JMP": true,
+ "BPF_JSET": true,
+ "BPF_K": true,
+ "BPF_LD": true,
+ "BPF_LDX": true,
+ "BPF_LEN": true,
+ "BPF_LSH": true,
+ "BPF_MAJOR_VERSION": true,
+ "BPF_MAXBUFSIZE": true,
+ "BPF_MAXINSNS": true,
+ "BPF_MEM": true,
+ "BPF_MEMWORDS": true,
+ "BPF_MINBUFSIZE": true,
+ "BPF_MINOR_VERSION": true,
+ "BPF_MISC": true,
+ "BPF_MSH": true,
+ "BPF_MUL": true,
+ "BPF_NEG": true,
+ "BPF_OR": true,
+ "BPF_RELEASE": true,
+ "BPF_RET": true,
+ "BPF_RSH": true,
+ "BPF_ST": true,
+ "BPF_STX": true,
+ "BPF_SUB": true,
+ "BPF_TAX": true,
+ "BPF_TXA": true,
+ "BPF_T_BINTIME": true,
+ "BPF_T_BINTIME_FAST": true,
+ "BPF_T_BINTIME_MONOTONIC": true,
+ "BPF_T_BINTIME_MONOTONIC_FAST": true,
+ "BPF_T_FAST": true,
+ "BPF_T_FLAG_MASK": true,
+ "BPF_T_FORMAT_MASK": true,
+ "BPF_T_MICROTIME": true,
+ "BPF_T_MICROTIME_FAST": true,
+ "BPF_T_MICROTIME_MONOTONIC": true,
+ "BPF_T_MICROTIME_MONOTONIC_FAST": true,
+ "BPF_T_MONOTONIC": true,
+ "BPF_T_MONOTONIC_FAST": true,
+ "BPF_T_NANOTIME": true,
+ "BPF_T_NANOTIME_FAST": true,
+ "BPF_T_NANOTIME_MONOTONIC": true,
+ "BPF_T_NANOTIME_MONOTONIC_FAST": true,
+ "BPF_T_NONE": true,
+ "BPF_T_NORMAL": true,
+ "BPF_W": true,
+ "BPF_X": true,
+ "BRKINT": true,
+ "Bind": true,
+ "BindToDevice": true,
+ "BpfBuflen": true,
+ "BpfDatalink": true,
+ "BpfHdr": true,
+ "BpfHeadercmpl": true,
+ "BpfInsn": true,
+ "BpfInterface": true,
+ "BpfJump": true,
+ "BpfProgram": true,
+ "BpfStat": true,
+ "BpfStats": true,
+ "BpfStmt": true,
+ "BpfTimeout": true,
+ "BpfTimeval": true,
+ "BpfVersion": true,
+ "BpfZbuf": true,
+ "BpfZbufHeader": true,
+ "ByHandleFileInformation": true,
+ "BytePtrFromString": true,
+ "ByteSliceFromString": true,
+ "CCR0_FLUSH": true,
+ "CERT_CHAIN_POLICY_AUTHENTICODE": true,
+ "CERT_CHAIN_POLICY_AUTHENTICODE_TS": true,
+ "CERT_CHAIN_POLICY_BASE": true,
+ "CERT_CHAIN_POLICY_BASIC_CONSTRAINTS": true,
+ "CERT_CHAIN_POLICY_EV": true,
+ "CERT_CHAIN_POLICY_MICROSOFT_ROOT": true,
+ "CERT_CHAIN_POLICY_NT_AUTH": true,
+ "CERT_CHAIN_POLICY_SSL": true,
+ "CERT_E_CN_NO_MATCH": true,
+ "CERT_E_EXPIRED": true,
+ "CERT_E_PURPOSE": true,
+ "CERT_E_ROLE": true,
+ "CERT_E_UNTRUSTEDROOT": true,
+ "CERT_STORE_ADD_ALWAYS": true,
+ "CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG": true,
+ "CERT_STORE_PROV_MEMORY": true,
+ "CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT": true,
+ "CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT": true,
+ "CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT": true,
+ "CERT_TRUST_HAS_NOT_SUPPORTED_CRITICAL_EXT": true,
+ "CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT": true,
+ "CERT_TRUST_INVALID_BASIC_CONSTRAINTS": true,
+ "CERT_TRUST_INVALID_EXTENSION": true,
+ "CERT_TRUST_INVALID_NAME_CONSTRAINTS": true,
+ "CERT_TRUST_INVALID_POLICY_CONSTRAINTS": true,
+ "CERT_TRUST_IS_CYCLIC": true,
+ "CERT_TRUST_IS_EXPLICIT_DISTRUST": true,
+ "CERT_TRUST_IS_NOT_SIGNATURE_VALID": true,
+ "CERT_TRUST_IS_NOT_TIME_VALID": true,
+ "CERT_TRUST_IS_NOT_VALID_FOR_USAGE": true,
+ "CERT_TRUST_IS_OFFLINE_REVOCATION": true,
+ "CERT_TRUST_IS_REVOKED": true,
+ "CERT_TRUST_IS_UNTRUSTED_ROOT": true,
+ "CERT_TRUST_NO_ERROR": true,
+ "CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY": true,
+ "CERT_TRUST_REVOCATION_STATUS_UNKNOWN": true,
+ "CFLUSH": true,
+ "CLOCAL": true,
+ "CLONE_CHILD_CLEARTID": true,
+ "CLONE_CHILD_SETTID": true,
+ "CLONE_CSIGNAL": true,
+ "CLONE_DETACHED": true,
+ "CLONE_FILES": true,
+ "CLONE_FS": true,
+ "CLONE_IO": true,
+ "CLONE_NEWIPC": true,
+ "CLONE_NEWNET": true,
+ "CLONE_NEWNS": true,
+ "CLONE_NEWPID": true,
+ "CLONE_NEWUSER": true,
+ "CLONE_NEWUTS": true,
+ "CLONE_PARENT": true,
+ "CLONE_PARENT_SETTID": true,
+ "CLONE_PID": true,
+ "CLONE_PTRACE": true,
+ "CLONE_SETTLS": true,
+ "CLONE_SIGHAND": true,
+ "CLONE_SYSVSEM": true,
+ "CLONE_THREAD": true,
+ "CLONE_UNTRACED": true,
+ "CLONE_VFORK": true,
+ "CLONE_VM": true,
+ "CPUID_CFLUSH": true,
+ "CREAD": true,
+ "CREATE_ALWAYS": true,
+ "CREATE_NEW": true,
+ "CREATE_NEW_PROCESS_GROUP": true,
+ "CREATE_UNICODE_ENVIRONMENT": true,
+ "CRYPT_DEFAULT_CONTAINER_OPTIONAL": true,
+ "CRYPT_DELETEKEYSET": true,
+ "CRYPT_MACHINE_KEYSET": true,
+ "CRYPT_NEWKEYSET": true,
+ "CRYPT_SILENT": true,
+ "CRYPT_VERIFYCONTEXT": true,
+ "CS5": true,
+ "CS6": true,
+ "CS7": true,
+ "CS8": true,
+ "CSIZE": true,
+ "CSTART": true,
+ "CSTATUS": true,
+ "CSTOP": true,
+ "CSTOPB": true,
+ "CSUSP": true,
+ "CTL_MAXNAME": true,
+ "CTL_NET": true,
+ "CTL_QUERY": true,
+ "CTRL_BREAK_EVENT": true,
+ "CTRL_C_EVENT": true,
+ "CancelIo": true,
+ "CancelIoEx": true,
+ "CertAddCertificateContextToStore": true,
+ "CertChainContext": true,
+ "CertChainElement": true,
+ "CertChainPara": true,
+ "CertChainPolicyPara": true,
+ "CertChainPolicyStatus": true,
+ "CertCloseStore": true,
+ "CertContext": true,
+ "CertCreateCertificateContext": true,
+ "CertEnhKeyUsage": true,
+ "CertEnumCertificatesInStore": true,
+ "CertFreeCertificateChain": true,
+ "CertFreeCertificateContext": true,
+ "CertGetCertificateChain": true,
+ "CertInfo": true,
+ "CertOpenStore": true,
+ "CertOpenSystemStore": true,
+ "CertRevocationCrlInfo": true,
+ "CertRevocationInfo": true,
+ "CertSimpleChain": true,
+ "CertTrustListInfo": true,
+ "CertTrustStatus": true,
+ "CertUsageMatch": true,
+ "CertVerifyCertificateChainPolicy": true,
+ "Chdir": true,
+ "CheckBpfVersion": true,
+ "Chflags": true,
+ "Chmod": true,
+ "Chown": true,
+ "Chroot": true,
+ "Clearenv": true,
+ "Close": true,
+ "CloseHandle": true,
+ "CloseOnExec": true,
+ "Closesocket": true,
+ "CmsgLen": true,
+ "CmsgSpace": true,
+ "Cmsghdr": true,
+ "CommandLineToArgv": true,
+ "ComputerName": true,
+ "Conn": true,
+ "Connect": true,
+ "ConnectEx": true,
+ "ConvertSidToStringSid": true,
+ "ConvertStringSidToSid": true,
+ "CopySid": true,
+ "Creat": true,
+ "CreateDirectory": true,
+ "CreateFile": true,
+ "CreateFileMapping": true,
+ "CreateHardLink": true,
+ "CreateIoCompletionPort": true,
+ "CreatePipe": true,
+ "CreateProcess": true,
+ "CreateProcessAsUser": true,
+ "CreateSymbolicLink": true,
+ "CreateToolhelp32Snapshot": true,
+ "Credential": true,
+ "CryptAcquireContext": true,
+ "CryptGenRandom": true,
+ "CryptReleaseContext": true,
+ "DIOCBSFLUSH": true,
+ "DIOCOSFPFLUSH": true,
+ "DLL": true,
+ "DLLError": true,
+ "DLT_A429": true,
+ "DLT_A653_ICM": true,
+ "DLT_AIRONET_HEADER": true,
+ "DLT_AOS": true,
+ "DLT_APPLE_IP_OVER_IEEE1394": true,
+ "DLT_ARCNET": true,
+ "DLT_ARCNET_LINUX": true,
+ "DLT_ATM_CLIP": true,
+ "DLT_ATM_RFC1483": true,
+ "DLT_AURORA": true,
+ "DLT_AX25": true,
+ "DLT_AX25_KISS": true,
+ "DLT_BACNET_MS_TP": true,
+ "DLT_BLUETOOTH_HCI_H4": true,
+ "DLT_BLUETOOTH_HCI_H4_WITH_PHDR": true,
+ "DLT_CAN20B": true,
+ "DLT_CAN_SOCKETCAN": true,
+ "DLT_CHAOS": true,
+ "DLT_CHDLC": true,
+ "DLT_CISCO_IOS": true,
+ "DLT_C_HDLC": true,
+ "DLT_C_HDLC_WITH_DIR": true,
+ "DLT_DBUS": true,
+ "DLT_DECT": true,
+ "DLT_DOCSIS": true,
+ "DLT_DVB_CI": true,
+ "DLT_ECONET": true,
+ "DLT_EN10MB": true,
+ "DLT_EN3MB": true,
+ "DLT_ENC": true,
+ "DLT_ERF": true,
+ "DLT_ERF_ETH": true,
+ "DLT_ERF_POS": true,
+ "DLT_FC_2": true,
+ "DLT_FC_2_WITH_FRAME_DELIMS": true,
+ "DLT_FDDI": true,
+ "DLT_FLEXRAY": true,
+ "DLT_FRELAY": true,
+ "DLT_FRELAY_WITH_DIR": true,
+ "DLT_GCOM_SERIAL": true,
+ "DLT_GCOM_T1E1": true,
+ "DLT_GPF_F": true,
+ "DLT_GPF_T": true,
+ "DLT_GPRS_LLC": true,
+ "DLT_GSMTAP_ABIS": true,
+ "DLT_GSMTAP_UM": true,
+ "DLT_HDLC": true,
+ "DLT_HHDLC": true,
+ "DLT_HIPPI": true,
+ "DLT_IBM_SN": true,
+ "DLT_IBM_SP": true,
+ "DLT_IEEE802": true,
+ "DLT_IEEE802_11": true,
+ "DLT_IEEE802_11_RADIO": true,
+ "DLT_IEEE802_11_RADIO_AVS": true,
+ "DLT_IEEE802_15_4": true,
+ "DLT_IEEE802_15_4_LINUX": true,
+ "DLT_IEEE802_15_4_NOFCS": true,
+ "DLT_IEEE802_15_4_NONASK_PHY": true,
+ "DLT_IEEE802_16_MAC_CPS": true,
+ "DLT_IEEE802_16_MAC_CPS_RADIO": true,
+ "DLT_IPFILTER": true,
+ "DLT_IPMB": true,
+ "DLT_IPMB_LINUX": true,
+ "DLT_IPNET": true,
+ "DLT_IPOIB": true,
+ "DLT_IPV4": true,
+ "DLT_IPV6": true,
+ "DLT_IP_OVER_FC": true,
+ "DLT_JUNIPER_ATM1": true,
+ "DLT_JUNIPER_ATM2": true,
+ "DLT_JUNIPER_ATM_CEMIC": true,
+ "DLT_JUNIPER_CHDLC": true,
+ "DLT_JUNIPER_ES": true,
+ "DLT_JUNIPER_ETHER": true,
+ "DLT_JUNIPER_FIBRECHANNEL": true,
+ "DLT_JUNIPER_FRELAY": true,
+ "DLT_JUNIPER_GGSN": true,
+ "DLT_JUNIPER_ISM": true,
+ "DLT_JUNIPER_MFR": true,
+ "DLT_JUNIPER_MLFR": true,
+ "DLT_JUNIPER_MLPPP": true,
+ "DLT_JUNIPER_MONITOR": true,
+ "DLT_JUNIPER_PIC_PEER": true,
+ "DLT_JUNIPER_PPP": true,
+ "DLT_JUNIPER_PPPOE": true,
+ "DLT_JUNIPER_PPPOE_ATM": true,
+ "DLT_JUNIPER_SERVICES": true,
+ "DLT_JUNIPER_SRX_E2E": true,
+ "DLT_JUNIPER_ST": true,
+ "DLT_JUNIPER_VP": true,
+ "DLT_JUNIPER_VS": true,
+ "DLT_LAPB_WITH_DIR": true,
+ "DLT_LAPD": true,
+ "DLT_LIN": true,
+ "DLT_LINUX_EVDEV": true,
+ "DLT_LINUX_IRDA": true,
+ "DLT_LINUX_LAPD": true,
+ "DLT_LINUX_PPP_WITHDIRECTION": true,
+ "DLT_LINUX_SLL": true,
+ "DLT_LOOP": true,
+ "DLT_LTALK": true,
+ "DLT_MATCHING_MAX": true,
+ "DLT_MATCHING_MIN": true,
+ "DLT_MFR": true,
+ "DLT_MOST": true,
+ "DLT_MPEG_2_TS": true,
+ "DLT_MPLS": true,
+ "DLT_MTP2": true,
+ "DLT_MTP2_WITH_PHDR": true,
+ "DLT_MTP3": true,
+ "DLT_MUX27010": true,
+ "DLT_NETANALYZER": true,
+ "DLT_NETANALYZER_TRANSPARENT": true,
+ "DLT_NFC_LLCP": true,
+ "DLT_NFLOG": true,
+ "DLT_NG40": true,
+ "DLT_NULL": true,
+ "DLT_PCI_EXP": true,
+ "DLT_PFLOG": true,
+ "DLT_PFSYNC": true,
+ "DLT_PPI": true,
+ "DLT_PPP": true,
+ "DLT_PPP_BSDOS": true,
+ "DLT_PPP_ETHER": true,
+ "DLT_PPP_PPPD": true,
+ "DLT_PPP_SERIAL": true,
+ "DLT_PPP_WITH_DIR": true,
+ "DLT_PPP_WITH_DIRECTION": true,
+ "DLT_PRISM_HEADER": true,
+ "DLT_PRONET": true,
+ "DLT_RAIF1": true,
+ "DLT_RAW": true,
+ "DLT_RAWAF_MASK": true,
+ "DLT_RIO": true,
+ "DLT_SCCP": true,
+ "DLT_SITA": true,
+ "DLT_SLIP": true,
+ "DLT_SLIP_BSDOS": true,
+ "DLT_STANAG_5066_D_PDU": true,
+ "DLT_SUNATM": true,
+ "DLT_SYMANTEC_FIREWALL": true,
+ "DLT_TZSP": true,
+ "DLT_USB": true,
+ "DLT_USB_LINUX": true,
+ "DLT_USB_LINUX_MMAPPED": true,
+ "DLT_USER0": true,
+ "DLT_USER1": true,
+ "DLT_USER10": true,
+ "DLT_USER11": true,
+ "DLT_USER12": true,
+ "DLT_USER13": true,
+ "DLT_USER14": true,
+ "DLT_USER15": true,
+ "DLT_USER2": true,
+ "DLT_USER3": true,
+ "DLT_USER4": true,
+ "DLT_USER5": true,
+ "DLT_USER6": true,
+ "DLT_USER7": true,
+ "DLT_USER8": true,
+ "DLT_USER9": true,
+ "DLT_WIHART": true,
+ "DLT_X2E_SERIAL": true,
+ "DLT_X2E_XORAYA": true,
+ "DNSMXData": true,
+ "DNSPTRData": true,
+ "DNSRecord": true,
+ "DNSSRVData": true,
+ "DNSTXTData": true,
+ "DNS_INFO_NO_RECORDS": true,
+ "DNS_TYPE_A": true,
+ "DNS_TYPE_A6": true,
+ "DNS_TYPE_AAAA": true,
+ "DNS_TYPE_ADDRS": true,
+ "DNS_TYPE_AFSDB": true,
+ "DNS_TYPE_ALL": true,
+ "DNS_TYPE_ANY": true,
+ "DNS_TYPE_ATMA": true,
+ "DNS_TYPE_AXFR": true,
+ "DNS_TYPE_CERT": true,
+ "DNS_TYPE_CNAME": true,
+ "DNS_TYPE_DHCID": true,
+ "DNS_TYPE_DNAME": true,
+ "DNS_TYPE_DNSKEY": true,
+ "DNS_TYPE_DS": true,
+ "DNS_TYPE_EID": true,
+ "DNS_TYPE_GID": true,
+ "DNS_TYPE_GPOS": true,
+ "DNS_TYPE_HINFO": true,
+ "DNS_TYPE_ISDN": true,
+ "DNS_TYPE_IXFR": true,
+ "DNS_TYPE_KEY": true,
+ "DNS_TYPE_KX": true,
+ "DNS_TYPE_LOC": true,
+ "DNS_TYPE_MAILA": true,
+ "DNS_TYPE_MAILB": true,
+ "DNS_TYPE_MB": true,
+ "DNS_TYPE_MD": true,
+ "DNS_TYPE_MF": true,
+ "DNS_TYPE_MG": true,
+ "DNS_TYPE_MINFO": true,
+ "DNS_TYPE_MR": true,
+ "DNS_TYPE_MX": true,
+ "DNS_TYPE_NAPTR": true,
+ "DNS_TYPE_NBSTAT": true,
+ "DNS_TYPE_NIMLOC": true,
+ "DNS_TYPE_NS": true,
+ "DNS_TYPE_NSAP": true,
+ "DNS_TYPE_NSAPPTR": true,
+ "DNS_TYPE_NSEC": true,
+ "DNS_TYPE_NULL": true,
+ "DNS_TYPE_NXT": true,
+ "DNS_TYPE_OPT": true,
+ "DNS_TYPE_PTR": true,
+ "DNS_TYPE_PX": true,
+ "DNS_TYPE_RP": true,
+ "DNS_TYPE_RRSIG": true,
+ "DNS_TYPE_RT": true,
+ "DNS_TYPE_SIG": true,
+ "DNS_TYPE_SINK": true,
+ "DNS_TYPE_SOA": true,
+ "DNS_TYPE_SRV": true,
+ "DNS_TYPE_TEXT": true,
+ "DNS_TYPE_TKEY": true,
+ "DNS_TYPE_TSIG": true,
+ "DNS_TYPE_UID": true,
+ "DNS_TYPE_UINFO": true,
+ "DNS_TYPE_UNSPEC": true,
+ "DNS_TYPE_WINS": true,
+ "DNS_TYPE_WINSR": true,
+ "DNS_TYPE_WKS": true,
+ "DNS_TYPE_X25": true,
+ "DT_BLK": true,
+ "DT_CHR": true,
+ "DT_DIR": true,
+ "DT_FIFO": true,
+ "DT_LNK": true,
+ "DT_REG": true,
+ "DT_SOCK": true,
+ "DT_UNKNOWN": true,
+ "DT_WHT": true,
+ "DUPLICATE_CLOSE_SOURCE": true,
+ "DUPLICATE_SAME_ACCESS": true,
+ "DeleteFile": true,
+ "DetachLsf": true,
+ "DeviceIoControl": true,
+ "Dirent": true,
+ "DnsNameCompare": true,
+ "DnsQuery": true,
+ "DnsRecordListFree": true,
+ "DnsSectionAdditional": true,
+ "DnsSectionAnswer": true,
+ "DnsSectionAuthority": true,
+ "DnsSectionQuestion": true,
+ "Dup": true,
+ "Dup2": true,
+ "Dup3": true,
+ "DuplicateHandle": true,
+ "E2BIG": true,
+ "EACCES": true,
+ "EADDRINUSE": true,
+ "EADDRNOTAVAIL": true,
+ "EADV": true,
+ "EAFNOSUPPORT": true,
+ "EAGAIN": true,
+ "EALREADY": true,
+ "EAUTH": true,
+ "EBADARCH": true,
+ "EBADE": true,
+ "EBADEXEC": true,
+ "EBADF": true,
+ "EBADFD": true,
+ "EBADMACHO": true,
+ "EBADMSG": true,
+ "EBADR": true,
+ "EBADRPC": true,
+ "EBADRQC": true,
+ "EBADSLT": true,
+ "EBFONT": true,
+ "EBUSY": true,
+ "ECANCELED": true,
+ "ECAPMODE": true,
+ "ECHILD": true,
+ "ECHO": true,
+ "ECHOCTL": true,
+ "ECHOE": true,
+ "ECHOK": true,
+ "ECHOKE": true,
+ "ECHONL": true,
+ "ECHOPRT": true,
+ "ECHRNG": true,
+ "ECOMM": true,
+ "ECONNABORTED": true,
+ "ECONNREFUSED": true,
+ "ECONNRESET": true,
+ "EDEADLK": true,
+ "EDEADLOCK": true,
+ "EDESTADDRREQ": true,
+ "EDEVERR": true,
+ "EDOM": true,
+ "EDOOFUS": true,
+ "EDOTDOT": true,
+ "EDQUOT": true,
+ "EEXIST": true,
+ "EFAULT": true,
+ "EFBIG": true,
+ "EFER_LMA": true,
+ "EFER_LME": true,
+ "EFER_NXE": true,
+ "EFER_SCE": true,
+ "EFTYPE": true,
+ "EHOSTDOWN": true,
+ "EHOSTUNREACH": true,
+ "EHWPOISON": true,
+ "EIDRM": true,
+ "EILSEQ": true,
+ "EINPROGRESS": true,
+ "EINTR": true,
+ "EINVAL": true,
+ "EIO": true,
+ "EIPSEC": true,
+ "EISCONN": true,
+ "EISDIR": true,
+ "EISNAM": true,
+ "EKEYEXPIRED": true,
+ "EKEYREJECTED": true,
+ "EKEYREVOKED": true,
+ "EL2HLT": true,
+ "EL2NSYNC": true,
+ "EL3HLT": true,
+ "EL3RST": true,
+ "ELAST": true,
+ "ELF_NGREG": true,
+ "ELF_PRARGSZ": true,
+ "ELIBACC": true,
+ "ELIBBAD": true,
+ "ELIBEXEC": true,
+ "ELIBMAX": true,
+ "ELIBSCN": true,
+ "ELNRNG": true,
+ "ELOOP": true,
+ "EMEDIUMTYPE": true,
+ "EMFILE": true,
+ "EMLINK": true,
+ "EMSGSIZE": true,
+ "EMT_TAGOVF": true,
+ "EMULTIHOP": true,
+ "EMUL_ENABLED": true,
+ "EMUL_LINUX": true,
+ "EMUL_LINUX32": true,
+ "EMUL_MAXID": true,
+ "EMUL_NATIVE": true,
+ "ENAMETOOLONG": true,
+ "ENAVAIL": true,
+ "ENDRUNDISC": true,
+ "ENEEDAUTH": true,
+ "ENETDOWN": true,
+ "ENETRESET": true,
+ "ENETUNREACH": true,
+ "ENFILE": true,
+ "ENOANO": true,
+ "ENOATTR": true,
+ "ENOBUFS": true,
+ "ENOCSI": true,
+ "ENODATA": true,
+ "ENODEV": true,
+ "ENOENT": true,
+ "ENOEXEC": true,
+ "ENOKEY": true,
+ "ENOLCK": true,
+ "ENOLINK": true,
+ "ENOMEDIUM": true,
+ "ENOMEM": true,
+ "ENOMSG": true,
+ "ENONET": true,
+ "ENOPKG": true,
+ "ENOPOLICY": true,
+ "ENOPROTOOPT": true,
+ "ENOSPC": true,
+ "ENOSR": true,
+ "ENOSTR": true,
+ "ENOSYS": true,
+ "ENOTBLK": true,
+ "ENOTCAPABLE": true,
+ "ENOTCONN": true,
+ "ENOTDIR": true,
+ "ENOTEMPTY": true,
+ "ENOTNAM": true,
+ "ENOTRECOVERABLE": true,
+ "ENOTSOCK": true,
+ "ENOTSUP": true,
+ "ENOTTY": true,
+ "ENOTUNIQ": true,
+ "ENXIO": true,
+ "EN_SW_CTL_INF": true,
+ "EN_SW_CTL_PREC": true,
+ "EN_SW_CTL_ROUND": true,
+ "EN_SW_DATACHAIN": true,
+ "EN_SW_DENORM": true,
+ "EN_SW_INVOP": true,
+ "EN_SW_OVERFLOW": true,
+ "EN_SW_PRECLOSS": true,
+ "EN_SW_UNDERFLOW": true,
+ "EN_SW_ZERODIV": true,
+ "EOPNOTSUPP": true,
+ "EOVERFLOW": true,
+ "EOWNERDEAD": true,
+ "EPERM": true,
+ "EPFNOSUPPORT": true,
+ "EPIPE": true,
+ "EPOLLERR": true,
+ "EPOLLET": true,
+ "EPOLLHUP": true,
+ "EPOLLIN": true,
+ "EPOLLMSG": true,
+ "EPOLLONESHOT": true,
+ "EPOLLOUT": true,
+ "EPOLLPRI": true,
+ "EPOLLRDBAND": true,
+ "EPOLLRDHUP": true,
+ "EPOLLRDNORM": true,
+ "EPOLLWRBAND": true,
+ "EPOLLWRNORM": true,
+ "EPOLL_CLOEXEC": true,
+ "EPOLL_CTL_ADD": true,
+ "EPOLL_CTL_DEL": true,
+ "EPOLL_CTL_MOD": true,
+ "EPOLL_NONBLOCK": true,
+ "EPROCLIM": true,
+ "EPROCUNAVAIL": true,
+ "EPROGMISMATCH": true,
+ "EPROGUNAVAIL": true,
+ "EPROTO": true,
+ "EPROTONOSUPPORT": true,
+ "EPROTOTYPE": true,
+ "EPWROFF": true,
+ "ERANGE": true,
+ "EREMCHG": true,
+ "EREMOTE": true,
+ "EREMOTEIO": true,
+ "ERESTART": true,
+ "ERFKILL": true,
+ "EROFS": true,
+ "ERPCMISMATCH": true,
+ "ERROR_ACCESS_DENIED": true,
+ "ERROR_ALREADY_EXISTS": true,
+ "ERROR_BROKEN_PIPE": true,
+ "ERROR_BUFFER_OVERFLOW": true,
+ "ERROR_DIR_NOT_EMPTY": true,
+ "ERROR_ENVVAR_NOT_FOUND": true,
+ "ERROR_FILE_EXISTS": true,
+ "ERROR_FILE_NOT_FOUND": true,
+ "ERROR_HANDLE_EOF": true,
+ "ERROR_INSUFFICIENT_BUFFER": true,
+ "ERROR_IO_PENDING": true,
+ "ERROR_MOD_NOT_FOUND": true,
+ "ERROR_MORE_DATA": true,
+ "ERROR_NETNAME_DELETED": true,
+ "ERROR_NOT_FOUND": true,
+ "ERROR_NO_MORE_FILES": true,
+ "ERROR_OPERATION_ABORTED": true,
+ "ERROR_PATH_NOT_FOUND": true,
+ "ERROR_PRIVILEGE_NOT_HELD": true,
+ "ERROR_PROC_NOT_FOUND": true,
+ "ESHLIBVERS": true,
+ "ESHUTDOWN": true,
+ "ESOCKTNOSUPPORT": true,
+ "ESPIPE": true,
+ "ESRCH": true,
+ "ESRMNT": true,
+ "ESTALE": true,
+ "ESTRPIPE": true,
+ "ETHERCAP_JUMBO_MTU": true,
+ "ETHERCAP_VLAN_HWTAGGING": true,
+ "ETHERCAP_VLAN_MTU": true,
+ "ETHERMIN": true,
+ "ETHERMTU": true,
+ "ETHERMTU_JUMBO": true,
+ "ETHERTYPE_8023": true,
+ "ETHERTYPE_AARP": true,
+ "ETHERTYPE_ACCTON": true,
+ "ETHERTYPE_AEONIC": true,
+ "ETHERTYPE_ALPHA": true,
+ "ETHERTYPE_AMBER": true,
+ "ETHERTYPE_AMOEBA": true,
+ "ETHERTYPE_AOE": true,
+ "ETHERTYPE_APOLLO": true,
+ "ETHERTYPE_APOLLODOMAIN": true,
+ "ETHERTYPE_APPLETALK": true,
+ "ETHERTYPE_APPLITEK": true,
+ "ETHERTYPE_ARGONAUT": true,
+ "ETHERTYPE_ARP": true,
+ "ETHERTYPE_AT": true,
+ "ETHERTYPE_ATALK": true,
+ "ETHERTYPE_ATOMIC": true,
+ "ETHERTYPE_ATT": true,
+ "ETHERTYPE_ATTSTANFORD": true,
+ "ETHERTYPE_AUTOPHON": true,
+ "ETHERTYPE_AXIS": true,
+ "ETHERTYPE_BCLOOP": true,
+ "ETHERTYPE_BOFL": true,
+ "ETHERTYPE_CABLETRON": true,
+ "ETHERTYPE_CHAOS": true,
+ "ETHERTYPE_COMDESIGN": true,
+ "ETHERTYPE_COMPUGRAPHIC": true,
+ "ETHERTYPE_COUNTERPOINT": true,
+ "ETHERTYPE_CRONUS": true,
+ "ETHERTYPE_CRONUSVLN": true,
+ "ETHERTYPE_DCA": true,
+ "ETHERTYPE_DDE": true,
+ "ETHERTYPE_DEBNI": true,
+ "ETHERTYPE_DECAM": true,
+ "ETHERTYPE_DECCUST": true,
+ "ETHERTYPE_DECDIAG": true,
+ "ETHERTYPE_DECDNS": true,
+ "ETHERTYPE_DECDTS": true,
+ "ETHERTYPE_DECEXPER": true,
+ "ETHERTYPE_DECLAST": true,
+ "ETHERTYPE_DECLTM": true,
+ "ETHERTYPE_DECMUMPS": true,
+ "ETHERTYPE_DECNETBIOS": true,
+ "ETHERTYPE_DELTACON": true,
+ "ETHERTYPE_DIDDLE": true,
+ "ETHERTYPE_DLOG1": true,
+ "ETHERTYPE_DLOG2": true,
+ "ETHERTYPE_DN": true,
+ "ETHERTYPE_DOGFIGHT": true,
+ "ETHERTYPE_DSMD": true,
+ "ETHERTYPE_ECMA": true,
+ "ETHERTYPE_ENCRYPT": true,
+ "ETHERTYPE_ES": true,
+ "ETHERTYPE_EXCELAN": true,
+ "ETHERTYPE_EXPERDATA": true,
+ "ETHERTYPE_FLIP": true,
+ "ETHERTYPE_FLOWCONTROL": true,
+ "ETHERTYPE_FRARP": true,
+ "ETHERTYPE_GENDYN": true,
+ "ETHERTYPE_HAYES": true,
+ "ETHERTYPE_HIPPI_FP": true,
+ "ETHERTYPE_HITACHI": true,
+ "ETHERTYPE_HP": true,
+ "ETHERTYPE_IEEEPUP": true,
+ "ETHERTYPE_IEEEPUPAT": true,
+ "ETHERTYPE_IMLBL": true,
+ "ETHERTYPE_IMLBLDIAG": true,
+ "ETHERTYPE_IP": true,
+ "ETHERTYPE_IPAS": true,
+ "ETHERTYPE_IPV6": true,
+ "ETHERTYPE_IPX": true,
+ "ETHERTYPE_IPXNEW": true,
+ "ETHERTYPE_KALPANA": true,
+ "ETHERTYPE_LANBRIDGE": true,
+ "ETHERTYPE_LANPROBE": true,
+ "ETHERTYPE_LAT": true,
+ "ETHERTYPE_LBACK": true,
+ "ETHERTYPE_LITTLE": true,
+ "ETHERTYPE_LLDP": true,
+ "ETHERTYPE_LOGICRAFT": true,
+ "ETHERTYPE_LOOPBACK": true,
+ "ETHERTYPE_MATRA": true,
+ "ETHERTYPE_MAX": true,
+ "ETHERTYPE_MERIT": true,
+ "ETHERTYPE_MICP": true,
+ "ETHERTYPE_MOPDL": true,
+ "ETHERTYPE_MOPRC": true,
+ "ETHERTYPE_MOTOROLA": true,
+ "ETHERTYPE_MPLS": true,
+ "ETHERTYPE_MPLS_MCAST": true,
+ "ETHERTYPE_MUMPS": true,
+ "ETHERTYPE_NBPCC": true,
+ "ETHERTYPE_NBPCLAIM": true,
+ "ETHERTYPE_NBPCLREQ": true,
+ "ETHERTYPE_NBPCLRSP": true,
+ "ETHERTYPE_NBPCREQ": true,
+ "ETHERTYPE_NBPCRSP": true,
+ "ETHERTYPE_NBPDG": true,
+ "ETHERTYPE_NBPDGB": true,
+ "ETHERTYPE_NBPDLTE": true,
+ "ETHERTYPE_NBPRAR": true,
+ "ETHERTYPE_NBPRAS": true,
+ "ETHERTYPE_NBPRST": true,
+ "ETHERTYPE_NBPSCD": true,
+ "ETHERTYPE_NBPVCD": true,
+ "ETHERTYPE_NBS": true,
+ "ETHERTYPE_NCD": true,
+ "ETHERTYPE_NESTAR": true,
+ "ETHERTYPE_NETBEUI": true,
+ "ETHERTYPE_NOVELL": true,
+ "ETHERTYPE_NS": true,
+ "ETHERTYPE_NSAT": true,
+ "ETHERTYPE_NSCOMPAT": true,
+ "ETHERTYPE_NTRAILER": true,
+ "ETHERTYPE_OS9": true,
+ "ETHERTYPE_OS9NET": true,
+ "ETHERTYPE_PACER": true,
+ "ETHERTYPE_PAE": true,
+ "ETHERTYPE_PCS": true,
+ "ETHERTYPE_PLANNING": true,
+ "ETHERTYPE_PPP": true,
+ "ETHERTYPE_PPPOE": true,
+ "ETHERTYPE_PPPOEDISC": true,
+ "ETHERTYPE_PRIMENTS": true,
+ "ETHERTYPE_PUP": true,
+ "ETHERTYPE_PUPAT": true,
+ "ETHERTYPE_QINQ": true,
+ "ETHERTYPE_RACAL": true,
+ "ETHERTYPE_RATIONAL": true,
+ "ETHERTYPE_RAWFR": true,
+ "ETHERTYPE_RCL": true,
+ "ETHERTYPE_RDP": true,
+ "ETHERTYPE_RETIX": true,
+ "ETHERTYPE_REVARP": true,
+ "ETHERTYPE_SCA": true,
+ "ETHERTYPE_SECTRA": true,
+ "ETHERTYPE_SECUREDATA": true,
+ "ETHERTYPE_SGITW": true,
+ "ETHERTYPE_SG_BOUNCE": true,
+ "ETHERTYPE_SG_DIAG": true,
+ "ETHERTYPE_SG_NETGAMES": true,
+ "ETHERTYPE_SG_RESV": true,
+ "ETHERTYPE_SIMNET": true,
+ "ETHERTYPE_SLOW": true,
+ "ETHERTYPE_SLOWPROTOCOLS": true,
+ "ETHERTYPE_SNA": true,
+ "ETHERTYPE_SNMP": true,
+ "ETHERTYPE_SONIX": true,
+ "ETHERTYPE_SPIDER": true,
+ "ETHERTYPE_SPRITE": true,
+ "ETHERTYPE_STP": true,
+ "ETHERTYPE_TALARIS": true,
+ "ETHERTYPE_TALARISMC": true,
+ "ETHERTYPE_TCPCOMP": true,
+ "ETHERTYPE_TCPSM": true,
+ "ETHERTYPE_TEC": true,
+ "ETHERTYPE_TIGAN": true,
+ "ETHERTYPE_TRAIL": true,
+ "ETHERTYPE_TRANSETHER": true,
+ "ETHERTYPE_TYMSHARE": true,
+ "ETHERTYPE_UBBST": true,
+ "ETHERTYPE_UBDEBUG": true,
+ "ETHERTYPE_UBDIAGLOOP": true,
+ "ETHERTYPE_UBDL": true,
+ "ETHERTYPE_UBNIU": true,
+ "ETHERTYPE_UBNMC": true,
+ "ETHERTYPE_VALID": true,
+ "ETHERTYPE_VARIAN": true,
+ "ETHERTYPE_VAXELN": true,
+ "ETHERTYPE_VEECO": true,
+ "ETHERTYPE_VEXP": true,
+ "ETHERTYPE_VGLAB": true,
+ "ETHERTYPE_VINES": true,
+ "ETHERTYPE_VINESECHO": true,
+ "ETHERTYPE_VINESLOOP": true,
+ "ETHERTYPE_VITAL": true,
+ "ETHERTYPE_VLAN": true,
+ "ETHERTYPE_VLTLMAN": true,
+ "ETHERTYPE_VPROD": true,
+ "ETHERTYPE_VURESERVED": true,
+ "ETHERTYPE_WATERLOO": true,
+ "ETHERTYPE_WELLFLEET": true,
+ "ETHERTYPE_X25": true,
+ "ETHERTYPE_X75": true,
+ "ETHERTYPE_XNSSM": true,
+ "ETHERTYPE_XTP": true,
+ "ETHER_ADDR_LEN": true,
+ "ETHER_ALIGN": true,
+ "ETHER_CRC_LEN": true,
+ "ETHER_CRC_POLY_BE": true,
+ "ETHER_CRC_POLY_LE": true,
+ "ETHER_HDR_LEN": true,
+ "ETHER_MAX_DIX_LEN": true,
+ "ETHER_MAX_LEN": true,
+ "ETHER_MAX_LEN_JUMBO": true,
+ "ETHER_MIN_LEN": true,
+ "ETHER_PPPOE_ENCAP_LEN": true,
+ "ETHER_TYPE_LEN": true,
+ "ETHER_VLAN_ENCAP_LEN": true,
+ "ETH_P_1588": true,
+ "ETH_P_8021Q": true,
+ "ETH_P_802_2": true,
+ "ETH_P_802_3": true,
+ "ETH_P_AARP": true,
+ "ETH_P_ALL": true,
+ "ETH_P_AOE": true,
+ "ETH_P_ARCNET": true,
+ "ETH_P_ARP": true,
+ "ETH_P_ATALK": true,
+ "ETH_P_ATMFATE": true,
+ "ETH_P_ATMMPOA": true,
+ "ETH_P_AX25": true,
+ "ETH_P_BPQ": true,
+ "ETH_P_CAIF": true,
+ "ETH_P_CAN": true,
+ "ETH_P_CONTROL": true,
+ "ETH_P_CUST": true,
+ "ETH_P_DDCMP": true,
+ "ETH_P_DEC": true,
+ "ETH_P_DIAG": true,
+ "ETH_P_DNA_DL": true,
+ "ETH_P_DNA_RC": true,
+ "ETH_P_DNA_RT": true,
+ "ETH_P_DSA": true,
+ "ETH_P_ECONET": true,
+ "ETH_P_EDSA": true,
+ "ETH_P_FCOE": true,
+ "ETH_P_FIP": true,
+ "ETH_P_HDLC": true,
+ "ETH_P_IEEE802154": true,
+ "ETH_P_IEEEPUP": true,
+ "ETH_P_IEEEPUPAT": true,
+ "ETH_P_IP": true,
+ "ETH_P_IPV6": true,
+ "ETH_P_IPX": true,
+ "ETH_P_IRDA": true,
+ "ETH_P_LAT": true,
+ "ETH_P_LINK_CTL": true,
+ "ETH_P_LOCALTALK": true,
+ "ETH_P_LOOP": true,
+ "ETH_P_MOBITEX": true,
+ "ETH_P_MPLS_MC": true,
+ "ETH_P_MPLS_UC": true,
+ "ETH_P_PAE": true,
+ "ETH_P_PAUSE": true,
+ "ETH_P_PHONET": true,
+ "ETH_P_PPPTALK": true,
+ "ETH_P_PPP_DISC": true,
+ "ETH_P_PPP_MP": true,
+ "ETH_P_PPP_SES": true,
+ "ETH_P_PUP": true,
+ "ETH_P_PUPAT": true,
+ "ETH_P_RARP": true,
+ "ETH_P_SCA": true,
+ "ETH_P_SLOW": true,
+ "ETH_P_SNAP": true,
+ "ETH_P_TEB": true,
+ "ETH_P_TIPC": true,
+ "ETH_P_TRAILER": true,
+ "ETH_P_TR_802_2": true,
+ "ETH_P_WAN_PPP": true,
+ "ETH_P_WCCP": true,
+ "ETH_P_X25": true,
+ "ETIME": true,
+ "ETIMEDOUT": true,
+ "ETOOMANYREFS": true,
+ "ETXTBSY": true,
+ "EUCLEAN": true,
+ "EUNATCH": true,
+ "EUSERS": true,
+ "EVFILT_AIO": true,
+ "EVFILT_FS": true,
+ "EVFILT_LIO": true,
+ "EVFILT_MACHPORT": true,
+ "EVFILT_PROC": true,
+ "EVFILT_READ": true,
+ "EVFILT_SIGNAL": true,
+ "EVFILT_SYSCOUNT": true,
+ "EVFILT_THREADMARKER": true,
+ "EVFILT_TIMER": true,
+ "EVFILT_USER": true,
+ "EVFILT_VM": true,
+ "EVFILT_VNODE": true,
+ "EVFILT_WRITE": true,
+ "EV_ADD": true,
+ "EV_CLEAR": true,
+ "EV_DELETE": true,
+ "EV_DISABLE": true,
+ "EV_DISPATCH": true,
+ "EV_DROP": true,
+ "EV_ENABLE": true,
+ "EV_EOF": true,
+ "EV_ERROR": true,
+ "EV_FLAG0": true,
+ "EV_FLAG1": true,
+ "EV_ONESHOT": true,
+ "EV_OOBAND": true,
+ "EV_POLL": true,
+ "EV_RECEIPT": true,
+ "EV_SYSFLAGS": true,
+ "EWINDOWS": true,
+ "EWOULDBLOCK": true,
+ "EXDEV": true,
+ "EXFULL": true,
+ "EXTA": true,
+ "EXTB": true,
+ "EXTPROC": true,
+ "Environ": true,
+ "EpollCreate": true,
+ "EpollCreate1": true,
+ "EpollCtl": true,
+ "EpollEvent": true,
+ "EpollWait": true,
+ "Errno": true,
+ "EscapeArg": true,
+ "Exchangedata": true,
+ "Exec": true,
+ "Exit": true,
+ "ExitProcess": true,
+ "FD_CLOEXEC": true,
+ "FD_SETSIZE": true,
+ "FILE_ACTION_ADDED": true,
+ "FILE_ACTION_MODIFIED": true,
+ "FILE_ACTION_REMOVED": true,
+ "FILE_ACTION_RENAMED_NEW_NAME": true,
+ "FILE_ACTION_RENAMED_OLD_NAME": true,
+ "FILE_APPEND_DATA": true,
+ "FILE_ATTRIBUTE_ARCHIVE": true,
+ "FILE_ATTRIBUTE_DIRECTORY": true,
+ "FILE_ATTRIBUTE_HIDDEN": true,
+ "FILE_ATTRIBUTE_NORMAL": true,
+ "FILE_ATTRIBUTE_READONLY": true,
+ "FILE_ATTRIBUTE_REPARSE_POINT": true,
+ "FILE_ATTRIBUTE_SYSTEM": true,
+ "FILE_BEGIN": true,
+ "FILE_CURRENT": true,
+ "FILE_END": true,
+ "FILE_FLAG_BACKUP_SEMANTICS": true,
+ "FILE_FLAG_OPEN_REPARSE_POINT": true,
+ "FILE_FLAG_OVERLAPPED": true,
+ "FILE_LIST_DIRECTORY": true,
+ "FILE_MAP_COPY": true,
+ "FILE_MAP_EXECUTE": true,
+ "FILE_MAP_READ": true,
+ "FILE_MAP_WRITE": true,
+ "FILE_NOTIFY_CHANGE_ATTRIBUTES": true,
+ "FILE_NOTIFY_CHANGE_CREATION": true,
+ "FILE_NOTIFY_CHANGE_DIR_NAME": true,
+ "FILE_NOTIFY_CHANGE_FILE_NAME": true,
+ "FILE_NOTIFY_CHANGE_LAST_ACCESS": true,
+ "FILE_NOTIFY_CHANGE_LAST_WRITE": true,
+ "FILE_NOTIFY_CHANGE_SIZE": true,
+ "FILE_SHARE_DELETE": true,
+ "FILE_SHARE_READ": true,
+ "FILE_SHARE_WRITE": true,
+ "FILE_SKIP_COMPLETION_PORT_ON_SUCCESS": true,
+ "FILE_SKIP_SET_EVENT_ON_HANDLE": true,
+ "FILE_TYPE_CHAR": true,
+ "FILE_TYPE_DISK": true,
+ "FILE_TYPE_PIPE": true,
+ "FILE_TYPE_REMOTE": true,
+ "FILE_TYPE_UNKNOWN": true,
+ "FILE_WRITE_ATTRIBUTES": true,
+ "FLUSHO": true,
+ "FORMAT_MESSAGE_ALLOCATE_BUFFER": true,
+ "FORMAT_MESSAGE_ARGUMENT_ARRAY": true,
+ "FORMAT_MESSAGE_FROM_HMODULE": true,
+ "FORMAT_MESSAGE_FROM_STRING": true,
+ "FORMAT_MESSAGE_FROM_SYSTEM": true,
+ "FORMAT_MESSAGE_IGNORE_INSERTS": true,
+ "FORMAT_MESSAGE_MAX_WIDTH_MASK": true,
+ "FSCTL_GET_REPARSE_POINT": true,
+ "F_ADDFILESIGS": true,
+ "F_ADDSIGS": true,
+ "F_ALLOCATEALL": true,
+ "F_ALLOCATECONTIG": true,
+ "F_CANCEL": true,
+ "F_CHKCLEAN": true,
+ "F_CLOSEM": true,
+ "F_DUP2FD": true,
+ "F_DUP2FD_CLOEXEC": true,
+ "F_DUPFD": true,
+ "F_DUPFD_CLOEXEC": true,
+ "F_EXLCK": true,
+ "F_FLUSH_DATA": true,
+ "F_FREEZE_FS": true,
+ "F_FSCTL": true,
+ "F_FSDIRMASK": true,
+ "F_FSIN": true,
+ "F_FSINOUT": true,
+ "F_FSOUT": true,
+ "F_FSPRIV": true,
+ "F_FSVOID": true,
+ "F_FULLFSYNC": true,
+ "F_GETFD": true,
+ "F_GETFL": true,
+ "F_GETLEASE": true,
+ "F_GETLK": true,
+ "F_GETLK64": true,
+ "F_GETLKPID": true,
+ "F_GETNOSIGPIPE": true,
+ "F_GETOWN": true,
+ "F_GETOWN_EX": true,
+ "F_GETPATH": true,
+ "F_GETPATH_MTMINFO": true,
+ "F_GETPIPE_SZ": true,
+ "F_GETPROTECTIONCLASS": true,
+ "F_GETSIG": true,
+ "F_GLOBAL_NOCACHE": true,
+ "F_LOCK": true,
+ "F_LOG2PHYS": true,
+ "F_LOG2PHYS_EXT": true,
+ "F_MARKDEPENDENCY": true,
+ "F_MAXFD": true,
+ "F_NOCACHE": true,
+ "F_NODIRECT": true,
+ "F_NOTIFY": true,
+ "F_OGETLK": true,
+ "F_OK": true,
+ "F_OSETLK": true,
+ "F_OSETLKW": true,
+ "F_PARAM_MASK": true,
+ "F_PARAM_MAX": true,
+ "F_PATHPKG_CHECK": true,
+ "F_PEOFPOSMODE": true,
+ "F_PREALLOCATE": true,
+ "F_RDADVISE": true,
+ "F_RDAHEAD": true,
+ "F_RDLCK": true,
+ "F_READAHEAD": true,
+ "F_READBOOTSTRAP": true,
+ "F_SETBACKINGSTORE": true,
+ "F_SETFD": true,
+ "F_SETFL": true,
+ "F_SETLEASE": true,
+ "F_SETLK": true,
+ "F_SETLK64": true,
+ "F_SETLKW": true,
+ "F_SETLKW64": true,
+ "F_SETLK_REMOTE": true,
+ "F_SETNOSIGPIPE": true,
+ "F_SETOWN": true,
+ "F_SETOWN_EX": true,
+ "F_SETPIPE_SZ": true,
+ "F_SETPROTECTIONCLASS": true,
+ "F_SETSIG": true,
+ "F_SETSIZE": true,
+ "F_SHLCK": true,
+ "F_TEST": true,
+ "F_THAW_FS": true,
+ "F_TLOCK": true,
+ "F_ULOCK": true,
+ "F_UNLCK": true,
+ "F_UNLCKSYS": true,
+ "F_VOLPOSMODE": true,
+ "F_WRITEBOOTSTRAP": true,
+ "F_WRLCK": true,
+ "Faccessat": true,
+ "Fallocate": true,
+ "Fbootstraptransfer_t": true,
+ "Fchdir": true,
+ "Fchflags": true,
+ "Fchmod": true,
+ "Fchmodat": true,
+ "Fchown": true,
+ "Fchownat": true,
+ "FcntlFlock": true,
+ "FdSet": true,
+ "Fdatasync": true,
+ "FileNotifyInformation": true,
+ "Filetime": true,
+ "FindClose": true,
+ "FindFirstFile": true,
+ "FindNextFile": true,
+ "Flock": true,
+ "Flock_t": true,
+ "FlushBpf": true,
+ "FlushFileBuffers": true,
+ "FlushViewOfFile": true,
+ "ForkExec": true,
+ "ForkLock": true,
+ "FormatMessage": true,
+ "Fpathconf": true,
+ "FreeAddrInfoW": true,
+ "FreeEnvironmentStrings": true,
+ "FreeLibrary": true,
+ "Fsid": true,
+ "Fstat": true,
+ "Fstatfs": true,
+ "Fstore_t": true,
+ "Fsync": true,
+ "Ftruncate": true,
+ "FullPath": true,
+ "Futimes": true,
+ "Futimesat": true,
+ "GENERIC_ALL": true,
+ "GENERIC_EXECUTE": true,
+ "GENERIC_READ": true,
+ "GENERIC_WRITE": true,
+ "GUID": true,
+ "GetAcceptExSockaddrs": true,
+ "GetAdaptersInfo": true,
+ "GetAddrInfoW": true,
+ "GetCommandLine": true,
+ "GetComputerName": true,
+ "GetConsoleMode": true,
+ "GetCurrentDirectory": true,
+ "GetCurrentProcess": true,
+ "GetEnvironmentStrings": true,
+ "GetEnvironmentVariable": true,
+ "GetExitCodeProcess": true,
+ "GetFileAttributes": true,
+ "GetFileAttributesEx": true,
+ "GetFileExInfoStandard": true,
+ "GetFileExMaxInfoLevel": true,
+ "GetFileInformationByHandle": true,
+ "GetFileType": true,
+ "GetFullPathName": true,
+ "GetHostByName": true,
+ "GetIfEntry": true,
+ "GetLastError": true,
+ "GetLengthSid": true,
+ "GetLongPathName": true,
+ "GetProcAddress": true,
+ "GetProcessTimes": true,
+ "GetProtoByName": true,
+ "GetQueuedCompletionStatus": true,
+ "GetServByName": true,
+ "GetShortPathName": true,
+ "GetStartupInfo": true,
+ "GetStdHandle": true,
+ "GetSystemTimeAsFileTime": true,
+ "GetTempPath": true,
+ "GetTimeZoneInformation": true,
+ "GetTokenInformation": true,
+ "GetUserNameEx": true,
+ "GetUserProfileDirectory": true,
+ "GetVersion": true,
+ "Getcwd": true,
+ "Getdents": true,
+ "Getdirentries": true,
+ "Getdtablesize": true,
+ "Getegid": true,
+ "Getenv": true,
+ "Geteuid": true,
+ "Getfsstat": true,
+ "Getgid": true,
+ "Getgroups": true,
+ "Getpagesize": true,
+ "Getpeername": true,
+ "Getpgid": true,
+ "Getpgrp": true,
+ "Getpid": true,
+ "Getppid": true,
+ "Getpriority": true,
+ "Getrlimit": true,
+ "Getrusage": true,
+ "Getsid": true,
+ "Getsockname": true,
+ "Getsockopt": true,
+ "GetsockoptByte": true,
+ "GetsockoptICMPv6Filter": true,
+ "GetsockoptIPMreq": true,
+ "GetsockoptIPMreqn": true,
+ "GetsockoptIPv6MTUInfo": true,
+ "GetsockoptIPv6Mreq": true,
+ "GetsockoptInet4Addr": true,
+ "GetsockoptInt": true,
+ "GetsockoptUcred": true,
+ "Gettid": true,
+ "Gettimeofday": true,
+ "Getuid": true,
+ "Getwd": true,
+ "Getxattr": true,
+ "HANDLE_FLAG_INHERIT": true,
+ "HKEY_CLASSES_ROOT": true,
+ "HKEY_CURRENT_CONFIG": true,
+ "HKEY_CURRENT_USER": true,
+ "HKEY_DYN_DATA": true,
+ "HKEY_LOCAL_MACHINE": true,
+ "HKEY_PERFORMANCE_DATA": true,
+ "HKEY_USERS": true,
+ "HUPCL": true,
+ "Handle": true,
+ "Hostent": true,
+ "ICANON": true,
+ "ICMP6_FILTER": true,
+ "ICMPV6_FILTER": true,
+ "ICMPv6Filter": true,
+ "ICRNL": true,
+ "IEXTEN": true,
+ "IFAN_ARRIVAL": true,
+ "IFAN_DEPARTURE": true,
+ "IFA_ADDRESS": true,
+ "IFA_ANYCAST": true,
+ "IFA_BROADCAST": true,
+ "IFA_CACHEINFO": true,
+ "IFA_F_DADFAILED": true,
+ "IFA_F_DEPRECATED": true,
+ "IFA_F_HOMEADDRESS": true,
+ "IFA_F_NODAD": true,
+ "IFA_F_OPTIMISTIC": true,
+ "IFA_F_PERMANENT": true,
+ "IFA_F_SECONDARY": true,
+ "IFA_F_TEMPORARY": true,
+ "IFA_F_TENTATIVE": true,
+ "IFA_LABEL": true,
+ "IFA_LOCAL": true,
+ "IFA_MAX": true,
+ "IFA_MULTICAST": true,
+ "IFA_ROUTE": true,
+ "IFA_UNSPEC": true,
+ "IFF_ALLMULTI": true,
+ "IFF_ALTPHYS": true,
+ "IFF_AUTOMEDIA": true,
+ "IFF_BROADCAST": true,
+ "IFF_CANTCHANGE": true,
+ "IFF_CANTCONFIG": true,
+ "IFF_DEBUG": true,
+ "IFF_DRV_OACTIVE": true,
+ "IFF_DRV_RUNNING": true,
+ "IFF_DYING": true,
+ "IFF_DYNAMIC": true,
+ "IFF_LINK0": true,
+ "IFF_LINK1": true,
+ "IFF_LINK2": true,
+ "IFF_LOOPBACK": true,
+ "IFF_MASTER": true,
+ "IFF_MONITOR": true,
+ "IFF_MULTICAST": true,
+ "IFF_NOARP": true,
+ "IFF_NOTRAILERS": true,
+ "IFF_NO_PI": true,
+ "IFF_OACTIVE": true,
+ "IFF_ONE_QUEUE": true,
+ "IFF_POINTOPOINT": true,
+ "IFF_POINTTOPOINT": true,
+ "IFF_PORTSEL": true,
+ "IFF_PPROMISC": true,
+ "IFF_PROMISC": true,
+ "IFF_RENAMING": true,
+ "IFF_RUNNING": true,
+ "IFF_SIMPLEX": true,
+ "IFF_SLAVE": true,
+ "IFF_SMART": true,
+ "IFF_STATICARP": true,
+ "IFF_TAP": true,
+ "IFF_TUN": true,
+ "IFF_TUN_EXCL": true,
+ "IFF_UP": true,
+ "IFF_VNET_HDR": true,
+ "IFLA_ADDRESS": true,
+ "IFLA_BROADCAST": true,
+ "IFLA_COST": true,
+ "IFLA_IFALIAS": true,
+ "IFLA_IFNAME": true,
+ "IFLA_LINK": true,
+ "IFLA_LINKINFO": true,
+ "IFLA_LINKMODE": true,
+ "IFLA_MAP": true,
+ "IFLA_MASTER": true,
+ "IFLA_MAX": true,
+ "IFLA_MTU": true,
+ "IFLA_NET_NS_PID": true,
+ "IFLA_OPERSTATE": true,
+ "IFLA_PRIORITY": true,
+ "IFLA_PROTINFO": true,
+ "IFLA_QDISC": true,
+ "IFLA_STATS": true,
+ "IFLA_TXQLEN": true,
+ "IFLA_UNSPEC": true,
+ "IFLA_WEIGHT": true,
+ "IFLA_WIRELESS": true,
+ "IFNAMSIZ": true,
+ "IFT_1822": true,
+ "IFT_A12MPPSWITCH": true,
+ "IFT_AAL2": true,
+ "IFT_AAL5": true,
+ "IFT_ADSL": true,
+ "IFT_AFLANE8023": true,
+ "IFT_AFLANE8025": true,
+ "IFT_ARAP": true,
+ "IFT_ARCNET": true,
+ "IFT_ARCNETPLUS": true,
+ "IFT_ASYNC": true,
+ "IFT_ATM": true,
+ "IFT_ATMDXI": true,
+ "IFT_ATMFUNI": true,
+ "IFT_ATMIMA": true,
+ "IFT_ATMLOGICAL": true,
+ "IFT_ATMRADIO": true,
+ "IFT_ATMSUBINTERFACE": true,
+ "IFT_ATMVCIENDPT": true,
+ "IFT_ATMVIRTUAL": true,
+ "IFT_BGPPOLICYACCOUNTING": true,
+ "IFT_BLUETOOTH": true,
+ "IFT_BRIDGE": true,
+ "IFT_BSC": true,
+ "IFT_CARP": true,
+ "IFT_CCTEMUL": true,
+ "IFT_CELLULAR": true,
+ "IFT_CEPT": true,
+ "IFT_CES": true,
+ "IFT_CHANNEL": true,
+ "IFT_CNR": true,
+ "IFT_COFFEE": true,
+ "IFT_COMPOSITELINK": true,
+ "IFT_DCN": true,
+ "IFT_DIGITALPOWERLINE": true,
+ "IFT_DIGITALWRAPPEROVERHEADCHANNEL": true,
+ "IFT_DLSW": true,
+ "IFT_DOCSCABLEDOWNSTREAM": true,
+ "IFT_DOCSCABLEMACLAYER": true,
+ "IFT_DOCSCABLEUPSTREAM": true,
+ "IFT_DOCSCABLEUPSTREAMCHANNEL": true,
+ "IFT_DS0": true,
+ "IFT_DS0BUNDLE": true,
+ "IFT_DS1FDL": true,
+ "IFT_DS3": true,
+ "IFT_DTM": true,
+ "IFT_DUMMY": true,
+ "IFT_DVBASILN": true,
+ "IFT_DVBASIOUT": true,
+ "IFT_DVBRCCDOWNSTREAM": true,
+ "IFT_DVBRCCMACLAYER": true,
+ "IFT_DVBRCCUPSTREAM": true,
+ "IFT_ECONET": true,
+ "IFT_ENC": true,
+ "IFT_EON": true,
+ "IFT_EPLRS": true,
+ "IFT_ESCON": true,
+ "IFT_ETHER": true,
+ "IFT_FAITH": true,
+ "IFT_FAST": true,
+ "IFT_FASTETHER": true,
+ "IFT_FASTETHERFX": true,
+ "IFT_FDDI": true,
+ "IFT_FIBRECHANNEL": true,
+ "IFT_FRAMERELAYINTERCONNECT": true,
+ "IFT_FRAMERELAYMPI": true,
+ "IFT_FRDLCIENDPT": true,
+ "IFT_FRELAY": true,
+ "IFT_FRELAYDCE": true,
+ "IFT_FRF16MFRBUNDLE": true,
+ "IFT_FRFORWARD": true,
+ "IFT_G703AT2MB": true,
+ "IFT_G703AT64K": true,
+ "IFT_GIF": true,
+ "IFT_GIGABITETHERNET": true,
+ "IFT_GR303IDT": true,
+ "IFT_GR303RDT": true,
+ "IFT_H323GATEKEEPER": true,
+ "IFT_H323PROXY": true,
+ "IFT_HDH1822": true,
+ "IFT_HDLC": true,
+ "IFT_HDSL2": true,
+ "IFT_HIPERLAN2": true,
+ "IFT_HIPPI": true,
+ "IFT_HIPPIINTERFACE": true,
+ "IFT_HOSTPAD": true,
+ "IFT_HSSI": true,
+ "IFT_HY": true,
+ "IFT_IBM370PARCHAN": true,
+ "IFT_IDSL": true,
+ "IFT_IEEE1394": true,
+ "IFT_IEEE80211": true,
+ "IFT_IEEE80212": true,
+ "IFT_IEEE8023ADLAG": true,
+ "IFT_IFGSN": true,
+ "IFT_IMT": true,
+ "IFT_INFINIBAND": true,
+ "IFT_INTERLEAVE": true,
+ "IFT_IP": true,
+ "IFT_IPFORWARD": true,
+ "IFT_IPOVERATM": true,
+ "IFT_IPOVERCDLC": true,
+ "IFT_IPOVERCLAW": true,
+ "IFT_IPSWITCH": true,
+ "IFT_IPXIP": true,
+ "IFT_ISDN": true,
+ "IFT_ISDNBASIC": true,
+ "IFT_ISDNPRIMARY": true,
+ "IFT_ISDNS": true,
+ "IFT_ISDNU": true,
+ "IFT_ISO88022LLC": true,
+ "IFT_ISO88023": true,
+ "IFT_ISO88024": true,
+ "IFT_ISO88025": true,
+ "IFT_ISO88025CRFPINT": true,
+ "IFT_ISO88025DTR": true,
+ "IFT_ISO88025FIBER": true,
+ "IFT_ISO88026": true,
+ "IFT_ISUP": true,
+ "IFT_L2VLAN": true,
+ "IFT_L3IPVLAN": true,
+ "IFT_L3IPXVLAN": true,
+ "IFT_LAPB": true,
+ "IFT_LAPD": true,
+ "IFT_LAPF": true,
+ "IFT_LINEGROUP": true,
+ "IFT_LOCALTALK": true,
+ "IFT_LOOP": true,
+ "IFT_MEDIAMAILOVERIP": true,
+ "IFT_MFSIGLINK": true,
+ "IFT_MIOX25": true,
+ "IFT_MODEM": true,
+ "IFT_MPC": true,
+ "IFT_MPLS": true,
+ "IFT_MPLSTUNNEL": true,
+ "IFT_MSDSL": true,
+ "IFT_MVL": true,
+ "IFT_MYRINET": true,
+ "IFT_NFAS": true,
+ "IFT_NSIP": true,
+ "IFT_OPTICALCHANNEL": true,
+ "IFT_OPTICALTRANSPORT": true,
+ "IFT_OTHER": true,
+ "IFT_P10": true,
+ "IFT_P80": true,
+ "IFT_PARA": true,
+ "IFT_PDP": true,
+ "IFT_PFLOG": true,
+ "IFT_PFLOW": true,
+ "IFT_PFSYNC": true,
+ "IFT_PLC": true,
+ "IFT_PON155": true,
+ "IFT_PON622": true,
+ "IFT_POS": true,
+ "IFT_PPP": true,
+ "IFT_PPPMULTILINKBUNDLE": true,
+ "IFT_PROPATM": true,
+ "IFT_PROPBWAP2MP": true,
+ "IFT_PROPCNLS": true,
+ "IFT_PROPDOCSWIRELESSDOWNSTREAM": true,
+ "IFT_PROPDOCSWIRELESSMACLAYER": true,
+ "IFT_PROPDOCSWIRELESSUPSTREAM": true,
+ "IFT_PROPMUX": true,
+ "IFT_PROPVIRTUAL": true,
+ "IFT_PROPWIRELESSP2P": true,
+ "IFT_PTPSERIAL": true,
+ "IFT_PVC": true,
+ "IFT_Q2931": true,
+ "IFT_QLLC": true,
+ "IFT_RADIOMAC": true,
+ "IFT_RADSL": true,
+ "IFT_REACHDSL": true,
+ "IFT_RFC1483": true,
+ "IFT_RS232": true,
+ "IFT_RSRB": true,
+ "IFT_SDLC": true,
+ "IFT_SDSL": true,
+ "IFT_SHDSL": true,
+ "IFT_SIP": true,
+ "IFT_SIPSIG": true,
+ "IFT_SIPTG": true,
+ "IFT_SLIP": true,
+ "IFT_SMDSDXI": true,
+ "IFT_SMDSICIP": true,
+ "IFT_SONET": true,
+ "IFT_SONETOVERHEADCHANNEL": true,
+ "IFT_SONETPATH": true,
+ "IFT_SONETVT": true,
+ "IFT_SRP": true,
+ "IFT_SS7SIGLINK": true,
+ "IFT_STACKTOSTACK": true,
+ "IFT_STARLAN": true,
+ "IFT_STF": true,
+ "IFT_T1": true,
+ "IFT_TDLC": true,
+ "IFT_TELINK": true,
+ "IFT_TERMPAD": true,
+ "IFT_TR008": true,
+ "IFT_TRANSPHDLC": true,
+ "IFT_TUNNEL": true,
+ "IFT_ULTRA": true,
+ "IFT_USB": true,
+ "IFT_V11": true,
+ "IFT_V35": true,
+ "IFT_V36": true,
+ "IFT_V37": true,
+ "IFT_VDSL": true,
+ "IFT_VIRTUALIPADDRESS": true,
+ "IFT_VIRTUALTG": true,
+ "IFT_VOICEDID": true,
+ "IFT_VOICEEM": true,
+ "IFT_VOICEEMFGD": true,
+ "IFT_VOICEENCAP": true,
+ "IFT_VOICEFGDEANA": true,
+ "IFT_VOICEFXO": true,
+ "IFT_VOICEFXS": true,
+ "IFT_VOICEOVERATM": true,
+ "IFT_VOICEOVERCABLE": true,
+ "IFT_VOICEOVERFRAMERELAY": true,
+ "IFT_VOICEOVERIP": true,
+ "IFT_X213": true,
+ "IFT_X25": true,
+ "IFT_X25DDN": true,
+ "IFT_X25HUNTGROUP": true,
+ "IFT_X25MLP": true,
+ "IFT_X25PLE": true,
+ "IFT_XETHER": true,
+ "IGNBRK": true,
+ "IGNCR": true,
+ "IGNORE": true,
+ "IGNPAR": true,
+ "IMAXBEL": true,
+ "INFINITE": true,
+ "INLCR": true,
+ "INPCK": true,
+ "INVALID_FILE_ATTRIBUTES": true,
+ "IN_ACCESS": true,
+ "IN_ALL_EVENTS": true,
+ "IN_ATTRIB": true,
+ "IN_CLASSA_HOST": true,
+ "IN_CLASSA_MAX": true,
+ "IN_CLASSA_NET": true,
+ "IN_CLASSA_NSHIFT": true,
+ "IN_CLASSB_HOST": true,
+ "IN_CLASSB_MAX": true,
+ "IN_CLASSB_NET": true,
+ "IN_CLASSB_NSHIFT": true,
+ "IN_CLASSC_HOST": true,
+ "IN_CLASSC_NET": true,
+ "IN_CLASSC_NSHIFT": true,
+ "IN_CLASSD_HOST": true,
+ "IN_CLASSD_NET": true,
+ "IN_CLASSD_NSHIFT": true,
+ "IN_CLOEXEC": true,
+ "IN_CLOSE": true,
+ "IN_CLOSE_NOWRITE": true,
+ "IN_CLOSE_WRITE": true,
+ "IN_CREATE": true,
+ "IN_DELETE": true,
+ "IN_DELETE_SELF": true,
+ "IN_DONT_FOLLOW": true,
+ "IN_EXCL_UNLINK": true,
+ "IN_IGNORED": true,
+ "IN_ISDIR": true,
+ "IN_LINKLOCALNETNUM": true,
+ "IN_LOOPBACKNET": true,
+ "IN_MASK_ADD": true,
+ "IN_MODIFY": true,
+ "IN_MOVE": true,
+ "IN_MOVED_FROM": true,
+ "IN_MOVED_TO": true,
+ "IN_MOVE_SELF": true,
+ "IN_NONBLOCK": true,
+ "IN_ONESHOT": true,
+ "IN_ONLYDIR": true,
+ "IN_OPEN": true,
+ "IN_Q_OVERFLOW": true,
+ "IN_RFC3021_HOST": true,
+ "IN_RFC3021_MASK": true,
+ "IN_RFC3021_NET": true,
+ "IN_RFC3021_NSHIFT": true,
+ "IN_UNMOUNT": true,
+ "IOC_IN": true,
+ "IOC_INOUT": true,
+ "IOC_OUT": true,
+ "IOC_VENDOR": true,
+ "IOC_WS2": true,
+ "IO_REPARSE_TAG_SYMLINK": true,
+ "IPMreq": true,
+ "IPMreqn": true,
+ "IPPROTO_3PC": true,
+ "IPPROTO_ADFS": true,
+ "IPPROTO_AH": true,
+ "IPPROTO_AHIP": true,
+ "IPPROTO_APES": true,
+ "IPPROTO_ARGUS": true,
+ "IPPROTO_AX25": true,
+ "IPPROTO_BHA": true,
+ "IPPROTO_BLT": true,
+ "IPPROTO_BRSATMON": true,
+ "IPPROTO_CARP": true,
+ "IPPROTO_CFTP": true,
+ "IPPROTO_CHAOS": true,
+ "IPPROTO_CMTP": true,
+ "IPPROTO_COMP": true,
+ "IPPROTO_CPHB": true,
+ "IPPROTO_CPNX": true,
+ "IPPROTO_DCCP": true,
+ "IPPROTO_DDP": true,
+ "IPPROTO_DGP": true,
+ "IPPROTO_DIVERT": true,
+ "IPPROTO_DIVERT_INIT": true,
+ "IPPROTO_DIVERT_RESP": true,
+ "IPPROTO_DONE": true,
+ "IPPROTO_DSTOPTS": true,
+ "IPPROTO_EGP": true,
+ "IPPROTO_EMCON": true,
+ "IPPROTO_ENCAP": true,
+ "IPPROTO_EON": true,
+ "IPPROTO_ESP": true,
+ "IPPROTO_ETHERIP": true,
+ "IPPROTO_FRAGMENT": true,
+ "IPPROTO_GGP": true,
+ "IPPROTO_GMTP": true,
+ "IPPROTO_GRE": true,
+ "IPPROTO_HELLO": true,
+ "IPPROTO_HMP": true,
+ "IPPROTO_HOPOPTS": true,
+ "IPPROTO_ICMP": true,
+ "IPPROTO_ICMPV6": true,
+ "IPPROTO_IDP": true,
+ "IPPROTO_IDPR": true,
+ "IPPROTO_IDRP": true,
+ "IPPROTO_IGMP": true,
+ "IPPROTO_IGP": true,
+ "IPPROTO_IGRP": true,
+ "IPPROTO_IL": true,
+ "IPPROTO_INLSP": true,
+ "IPPROTO_INP": true,
+ "IPPROTO_IP": true,
+ "IPPROTO_IPCOMP": true,
+ "IPPROTO_IPCV": true,
+ "IPPROTO_IPEIP": true,
+ "IPPROTO_IPIP": true,
+ "IPPROTO_IPPC": true,
+ "IPPROTO_IPV4": true,
+ "IPPROTO_IPV6": true,
+ "IPPROTO_IPV6_ICMP": true,
+ "IPPROTO_IRTP": true,
+ "IPPROTO_KRYPTOLAN": true,
+ "IPPROTO_LARP": true,
+ "IPPROTO_LEAF1": true,
+ "IPPROTO_LEAF2": true,
+ "IPPROTO_MAX": true,
+ "IPPROTO_MAXID": true,
+ "IPPROTO_MEAS": true,
+ "IPPROTO_MH": true,
+ "IPPROTO_MHRP": true,
+ "IPPROTO_MICP": true,
+ "IPPROTO_MOBILE": true,
+ "IPPROTO_MPLS": true,
+ "IPPROTO_MTP": true,
+ "IPPROTO_MUX": true,
+ "IPPROTO_ND": true,
+ "IPPROTO_NHRP": true,
+ "IPPROTO_NONE": true,
+ "IPPROTO_NSP": true,
+ "IPPROTO_NVPII": true,
+ "IPPROTO_OLD_DIVERT": true,
+ "IPPROTO_OSPFIGP": true,
+ "IPPROTO_PFSYNC": true,
+ "IPPROTO_PGM": true,
+ "IPPROTO_PIGP": true,
+ "IPPROTO_PIM": true,
+ "IPPROTO_PRM": true,
+ "IPPROTO_PUP": true,
+ "IPPROTO_PVP": true,
+ "IPPROTO_RAW": true,
+ "IPPROTO_RCCMON": true,
+ "IPPROTO_RDP": true,
+ "IPPROTO_ROUTING": true,
+ "IPPROTO_RSVP": true,
+ "IPPROTO_RVD": true,
+ "IPPROTO_SATEXPAK": true,
+ "IPPROTO_SATMON": true,
+ "IPPROTO_SCCSP": true,
+ "IPPROTO_SCTP": true,
+ "IPPROTO_SDRP": true,
+ "IPPROTO_SEND": true,
+ "IPPROTO_SEP": true,
+ "IPPROTO_SKIP": true,
+ "IPPROTO_SPACER": true,
+ "IPPROTO_SRPC": true,
+ "IPPROTO_ST": true,
+ "IPPROTO_SVMTP": true,
+ "IPPROTO_SWIPE": true,
+ "IPPROTO_TCF": true,
+ "IPPROTO_TCP": true,
+ "IPPROTO_TLSP": true,
+ "IPPROTO_TP": true,
+ "IPPROTO_TPXX": true,
+ "IPPROTO_TRUNK1": true,
+ "IPPROTO_TRUNK2": true,
+ "IPPROTO_TTP": true,
+ "IPPROTO_UDP": true,
+ "IPPROTO_UDPLITE": true,
+ "IPPROTO_VINES": true,
+ "IPPROTO_VISA": true,
+ "IPPROTO_VMTP": true,
+ "IPPROTO_VRRP": true,
+ "IPPROTO_WBEXPAK": true,
+ "IPPROTO_WBMON": true,
+ "IPPROTO_WSN": true,
+ "IPPROTO_XNET": true,
+ "IPPROTO_XTP": true,
+ "IPV6_2292DSTOPTS": true,
+ "IPV6_2292HOPLIMIT": true,
+ "IPV6_2292HOPOPTS": true,
+ "IPV6_2292NEXTHOP": true,
+ "IPV6_2292PKTINFO": true,
+ "IPV6_2292PKTOPTIONS": true,
+ "IPV6_2292RTHDR": true,
+ "IPV6_ADDRFORM": true,
+ "IPV6_ADD_MEMBERSHIP": true,
+ "IPV6_AUTHHDR": true,
+ "IPV6_AUTH_LEVEL": true,
+ "IPV6_AUTOFLOWLABEL": true,
+ "IPV6_BINDANY": true,
+ "IPV6_BINDV6ONLY": true,
+ "IPV6_BOUND_IF": true,
+ "IPV6_CHECKSUM": true,
+ "IPV6_DEFAULT_MULTICAST_HOPS": true,
+ "IPV6_DEFAULT_MULTICAST_LOOP": true,
+ "IPV6_DEFHLIM": true,
+ "IPV6_DONTFRAG": true,
+ "IPV6_DROP_MEMBERSHIP": true,
+ "IPV6_DSTOPTS": true,
+ "IPV6_ESP_NETWORK_LEVEL": true,
+ "IPV6_ESP_TRANS_LEVEL": true,
+ "IPV6_FAITH": true,
+ "IPV6_FLOWINFO_MASK": true,
+ "IPV6_FLOWLABEL_MASK": true,
+ "IPV6_FRAGTTL": true,
+ "IPV6_FW_ADD": true,
+ "IPV6_FW_DEL": true,
+ "IPV6_FW_FLUSH": true,
+ "IPV6_FW_GET": true,
+ "IPV6_FW_ZERO": true,
+ "IPV6_HLIMDEC": true,
+ "IPV6_HOPLIMIT": true,
+ "IPV6_HOPOPTS": true,
+ "IPV6_IPCOMP_LEVEL": true,
+ "IPV6_IPSEC_POLICY": true,
+ "IPV6_JOIN_ANYCAST": true,
+ "IPV6_JOIN_GROUP": true,
+ "IPV6_LEAVE_ANYCAST": true,
+ "IPV6_LEAVE_GROUP": true,
+ "IPV6_MAXHLIM": true,
+ "IPV6_MAXOPTHDR": true,
+ "IPV6_MAXPACKET": true,
+ "IPV6_MAX_GROUP_SRC_FILTER": true,
+ "IPV6_MAX_MEMBERSHIPS": true,
+ "IPV6_MAX_SOCK_SRC_FILTER": true,
+ "IPV6_MIN_MEMBERSHIPS": true,
+ "IPV6_MMTU": true,
+ "IPV6_MSFILTER": true,
+ "IPV6_MTU": true,
+ "IPV6_MTU_DISCOVER": true,
+ "IPV6_MULTICAST_HOPS": true,
+ "IPV6_MULTICAST_IF": true,
+ "IPV6_MULTICAST_LOOP": true,
+ "IPV6_NEXTHOP": true,
+ "IPV6_OPTIONS": true,
+ "IPV6_PATHMTU": true,
+ "IPV6_PIPEX": true,
+ "IPV6_PKTINFO": true,
+ "IPV6_PMTUDISC_DO": true,
+ "IPV6_PMTUDISC_DONT": true,
+ "IPV6_PMTUDISC_PROBE": true,
+ "IPV6_PMTUDISC_WANT": true,
+ "IPV6_PORTRANGE": true,
+ "IPV6_PORTRANGE_DEFAULT": true,
+ "IPV6_PORTRANGE_HIGH": true,
+ "IPV6_PORTRANGE_LOW": true,
+ "IPV6_PREFER_TEMPADDR": true,
+ "IPV6_RECVDSTOPTS": true,
+ "IPV6_RECVDSTPORT": true,
+ "IPV6_RECVERR": true,
+ "IPV6_RECVHOPLIMIT": true,
+ "IPV6_RECVHOPOPTS": true,
+ "IPV6_RECVPATHMTU": true,
+ "IPV6_RECVPKTINFO": true,
+ "IPV6_RECVRTHDR": true,
+ "IPV6_RECVTCLASS": true,
+ "IPV6_ROUTER_ALERT": true,
+ "IPV6_RTABLE": true,
+ "IPV6_RTHDR": true,
+ "IPV6_RTHDRDSTOPTS": true,
+ "IPV6_RTHDR_LOOSE": true,
+ "IPV6_RTHDR_STRICT": true,
+ "IPV6_RTHDR_TYPE_0": true,
+ "IPV6_RXDSTOPTS": true,
+ "IPV6_RXHOPOPTS": true,
+ "IPV6_SOCKOPT_RESERVED1": true,
+ "IPV6_TCLASS": true,
+ "IPV6_UNICAST_HOPS": true,
+ "IPV6_USE_MIN_MTU": true,
+ "IPV6_V6ONLY": true,
+ "IPV6_VERSION": true,
+ "IPV6_VERSION_MASK": true,
+ "IPV6_XFRM_POLICY": true,
+ "IP_ADD_MEMBERSHIP": true,
+ "IP_ADD_SOURCE_MEMBERSHIP": true,
+ "IP_AUTH_LEVEL": true,
+ "IP_BINDANY": true,
+ "IP_BLOCK_SOURCE": true,
+ "IP_BOUND_IF": true,
+ "IP_DEFAULT_MULTICAST_LOOP": true,
+ "IP_DEFAULT_MULTICAST_TTL": true,
+ "IP_DF": true,
+ "IP_DIVERTFL": true,
+ "IP_DONTFRAG": true,
+ "IP_DROP_MEMBERSHIP": true,
+ "IP_DROP_SOURCE_MEMBERSHIP": true,
+ "IP_DUMMYNET3": true,
+ "IP_DUMMYNET_CONFIGURE": true,
+ "IP_DUMMYNET_DEL": true,
+ "IP_DUMMYNET_FLUSH": true,
+ "IP_DUMMYNET_GET": true,
+ "IP_EF": true,
+ "IP_ERRORMTU": true,
+ "IP_ESP_NETWORK_LEVEL": true,
+ "IP_ESP_TRANS_LEVEL": true,
+ "IP_FAITH": true,
+ "IP_FREEBIND": true,
+ "IP_FW3": true,
+ "IP_FW_ADD": true,
+ "IP_FW_DEL": true,
+ "IP_FW_FLUSH": true,
+ "IP_FW_GET": true,
+ "IP_FW_NAT_CFG": true,
+ "IP_FW_NAT_DEL": true,
+ "IP_FW_NAT_GET_CONFIG": true,
+ "IP_FW_NAT_GET_LOG": true,
+ "IP_FW_RESETLOG": true,
+ "IP_FW_TABLE_ADD": true,
+ "IP_FW_TABLE_DEL": true,
+ "IP_FW_TABLE_FLUSH": true,
+ "IP_FW_TABLE_GETSIZE": true,
+ "IP_FW_TABLE_LIST": true,
+ "IP_FW_ZERO": true,
+ "IP_HDRINCL": true,
+ "IP_IPCOMP_LEVEL": true,
+ "IP_IPSECFLOWINFO": true,
+ "IP_IPSEC_LOCAL_AUTH": true,
+ "IP_IPSEC_LOCAL_CRED": true,
+ "IP_IPSEC_LOCAL_ID": true,
+ "IP_IPSEC_POLICY": true,
+ "IP_IPSEC_REMOTE_AUTH": true,
+ "IP_IPSEC_REMOTE_CRED": true,
+ "IP_IPSEC_REMOTE_ID": true,
+ "IP_MAXPACKET": true,
+ "IP_MAX_GROUP_SRC_FILTER": true,
+ "IP_MAX_MEMBERSHIPS": true,
+ "IP_MAX_SOCK_MUTE_FILTER": true,
+ "IP_MAX_SOCK_SRC_FILTER": true,
+ "IP_MAX_SOURCE_FILTER": true,
+ "IP_MF": true,
+ "IP_MINFRAGSIZE": true,
+ "IP_MINTTL": true,
+ "IP_MIN_MEMBERSHIPS": true,
+ "IP_MSFILTER": true,
+ "IP_MSS": true,
+ "IP_MTU": true,
+ "IP_MTU_DISCOVER": true,
+ "IP_MULTICAST_IF": true,
+ "IP_MULTICAST_IFINDEX": true,
+ "IP_MULTICAST_LOOP": true,
+ "IP_MULTICAST_TTL": true,
+ "IP_MULTICAST_VIF": true,
+ "IP_NAT__XXX": true,
+ "IP_OFFMASK": true,
+ "IP_OLD_FW_ADD": true,
+ "IP_OLD_FW_DEL": true,
+ "IP_OLD_FW_FLUSH": true,
+ "IP_OLD_FW_GET": true,
+ "IP_OLD_FW_RESETLOG": true,
+ "IP_OLD_FW_ZERO": true,
+ "IP_ONESBCAST": true,
+ "IP_OPTIONS": true,
+ "IP_ORIGDSTADDR": true,
+ "IP_PASSSEC": true,
+ "IP_PIPEX": true,
+ "IP_PKTINFO": true,
+ "IP_PKTOPTIONS": true,
+ "IP_PMTUDISC": true,
+ "IP_PMTUDISC_DO": true,
+ "IP_PMTUDISC_DONT": true,
+ "IP_PMTUDISC_PROBE": true,
+ "IP_PMTUDISC_WANT": true,
+ "IP_PORTRANGE": true,
+ "IP_PORTRANGE_DEFAULT": true,
+ "IP_PORTRANGE_HIGH": true,
+ "IP_PORTRANGE_LOW": true,
+ "IP_RECVDSTADDR": true,
+ "IP_RECVDSTPORT": true,
+ "IP_RECVERR": true,
+ "IP_RECVIF": true,
+ "IP_RECVOPTS": true,
+ "IP_RECVORIGDSTADDR": true,
+ "IP_RECVPKTINFO": true,
+ "IP_RECVRETOPTS": true,
+ "IP_RECVRTABLE": true,
+ "IP_RECVTOS": true,
+ "IP_RECVTTL": true,
+ "IP_RETOPTS": true,
+ "IP_RF": true,
+ "IP_ROUTER_ALERT": true,
+ "IP_RSVP_OFF": true,
+ "IP_RSVP_ON": true,
+ "IP_RSVP_VIF_OFF": true,
+ "IP_RSVP_VIF_ON": true,
+ "IP_RTABLE": true,
+ "IP_SENDSRCADDR": true,
+ "IP_STRIPHDR": true,
+ "IP_TOS": true,
+ "IP_TRAFFIC_MGT_BACKGROUND": true,
+ "IP_TRANSPARENT": true,
+ "IP_TTL": true,
+ "IP_UNBLOCK_SOURCE": true,
+ "IP_XFRM_POLICY": true,
+ "IPv6MTUInfo": true,
+ "IPv6Mreq": true,
+ "ISIG": true,
+ "ISTRIP": true,
+ "IUCLC": true,
+ "IUTF8": true,
+ "IXANY": true,
+ "IXOFF": true,
+ "IXON": true,
+ "IfAddrmsg": true,
+ "IfAnnounceMsghdr": true,
+ "IfData": true,
+ "IfInfomsg": true,
+ "IfMsghdr": true,
+ "IfaMsghdr": true,
+ "IfmaMsghdr": true,
+ "IfmaMsghdr2": true,
+ "ImplementsGetwd": true,
+ "Inet4Pktinfo": true,
+ "Inet6Pktinfo": true,
+ "InotifyAddWatch": true,
+ "InotifyEvent": true,
+ "InotifyInit": true,
+ "InotifyInit1": true,
+ "InotifyRmWatch": true,
+ "InterfaceAddrMessage": true,
+ "InterfaceAnnounceMessage": true,
+ "InterfaceInfo": true,
+ "InterfaceMessage": true,
+ "InterfaceMulticastAddrMessage": true,
+ "InvalidHandle": true,
+ "Ioperm": true,
+ "Iopl": true,
+ "Iovec": true,
+ "IpAdapterInfo": true,
+ "IpAddrString": true,
+ "IpAddressString": true,
+ "IpMaskString": true,
+ "Issetugid": true,
+ "KEY_ALL_ACCESS": true,
+ "KEY_CREATE_LINK": true,
+ "KEY_CREATE_SUB_KEY": true,
+ "KEY_ENUMERATE_SUB_KEYS": true,
+ "KEY_EXECUTE": true,
+ "KEY_NOTIFY": true,
+ "KEY_QUERY_VALUE": true,
+ "KEY_READ": true,
+ "KEY_SET_VALUE": true,
+ "KEY_WOW64_32KEY": true,
+ "KEY_WOW64_64KEY": true,
+ "KEY_WRITE": true,
+ "Kevent": true,
+ "Kevent_t": true,
+ "Kill": true,
+ "Klogctl": true,
+ "Kqueue": true,
+ "LANG_ENGLISH": true,
+ "LAYERED_PROTOCOL": true,
+ "LCNT_OVERLOAD_FLUSH": true,
+ "LINUX_REBOOT_CMD_CAD_OFF": true,
+ "LINUX_REBOOT_CMD_CAD_ON": true,
+ "LINUX_REBOOT_CMD_HALT": true,
+ "LINUX_REBOOT_CMD_KEXEC": true,
+ "LINUX_REBOOT_CMD_POWER_OFF": true,
+ "LINUX_REBOOT_CMD_RESTART": true,
+ "LINUX_REBOOT_CMD_RESTART2": true,
+ "LINUX_REBOOT_CMD_SW_SUSPEND": true,
+ "LINUX_REBOOT_MAGIC1": true,
+ "LINUX_REBOOT_MAGIC2": true,
+ "LOCK_EX": true,
+ "LOCK_NB": true,
+ "LOCK_SH": true,
+ "LOCK_UN": true,
+ "LazyDLL": true,
+ "LazyProc": true,
+ "Lchown": true,
+ "Linger": true,
+ "Link": true,
+ "Listen": true,
+ "Listxattr": true,
+ "LoadCancelIoEx": true,
+ "LoadConnectEx": true,
+ "LoadCreateSymbolicLink": true,
+ "LoadDLL": true,
+ "LoadGetAddrInfo": true,
+ "LoadLibrary": true,
+ "LoadSetFileCompletionNotificationModes": true,
+ "LocalFree": true,
+ "Log2phys_t": true,
+ "LookupAccountName": true,
+ "LookupAccountSid": true,
+ "LookupSID": true,
+ "LsfJump": true,
+ "LsfSocket": true,
+ "LsfStmt": true,
+ "Lstat": true,
+ "MADV_AUTOSYNC": true,
+ "MADV_CAN_REUSE": true,
+ "MADV_CORE": true,
+ "MADV_DOFORK": true,
+ "MADV_DONTFORK": true,
+ "MADV_DONTNEED": true,
+ "MADV_FREE": true,
+ "MADV_FREE_REUSABLE": true,
+ "MADV_FREE_REUSE": true,
+ "MADV_HUGEPAGE": true,
+ "MADV_HWPOISON": true,
+ "MADV_MERGEABLE": true,
+ "MADV_NOCORE": true,
+ "MADV_NOHUGEPAGE": true,
+ "MADV_NORMAL": true,
+ "MADV_NOSYNC": true,
+ "MADV_PROTECT": true,
+ "MADV_RANDOM": true,
+ "MADV_REMOVE": true,
+ "MADV_SEQUENTIAL": true,
+ "MADV_SPACEAVAIL": true,
+ "MADV_UNMERGEABLE": true,
+ "MADV_WILLNEED": true,
+ "MADV_ZERO_WIRED_PAGES": true,
+ "MAP_32BIT": true,
+ "MAP_ALIGNED_SUPER": true,
+ "MAP_ALIGNMENT_16MB": true,
+ "MAP_ALIGNMENT_1TB": true,
+ "MAP_ALIGNMENT_256TB": true,
+ "MAP_ALIGNMENT_4GB": true,
+ "MAP_ALIGNMENT_64KB": true,
+ "MAP_ALIGNMENT_64PB": true,
+ "MAP_ALIGNMENT_MASK": true,
+ "MAP_ALIGNMENT_SHIFT": true,
+ "MAP_ANON": true,
+ "MAP_ANONYMOUS": true,
+ "MAP_COPY": true,
+ "MAP_DENYWRITE": true,
+ "MAP_EXECUTABLE": true,
+ "MAP_FILE": true,
+ "MAP_FIXED": true,
+ "MAP_FLAGMASK": true,
+ "MAP_GROWSDOWN": true,
+ "MAP_HASSEMAPHORE": true,
+ "MAP_HUGETLB": true,
+ "MAP_INHERIT": true,
+ "MAP_INHERIT_COPY": true,
+ "MAP_INHERIT_DEFAULT": true,
+ "MAP_INHERIT_DONATE_COPY": true,
+ "MAP_INHERIT_NONE": true,
+ "MAP_INHERIT_SHARE": true,
+ "MAP_JIT": true,
+ "MAP_LOCKED": true,
+ "MAP_NOCACHE": true,
+ "MAP_NOCORE": true,
+ "MAP_NOEXTEND": true,
+ "MAP_NONBLOCK": true,
+ "MAP_NORESERVE": true,
+ "MAP_NOSYNC": true,
+ "MAP_POPULATE": true,
+ "MAP_PREFAULT_READ": true,
+ "MAP_PRIVATE": true,
+ "MAP_RENAME": true,
+ "MAP_RESERVED0080": true,
+ "MAP_RESERVED0100": true,
+ "MAP_SHARED": true,
+ "MAP_STACK": true,
+ "MAP_TRYFIXED": true,
+ "MAP_TYPE": true,
+ "MAP_WIRED": true,
+ "MAXIMUM_REPARSE_DATA_BUFFER_SIZE": true,
+ "MAXLEN_IFDESCR": true,
+ "MAXLEN_PHYSADDR": true,
+ "MAX_ADAPTER_ADDRESS_LENGTH": true,
+ "MAX_ADAPTER_DESCRIPTION_LENGTH": true,
+ "MAX_ADAPTER_NAME_LENGTH": true,
+ "MAX_COMPUTERNAME_LENGTH": true,
+ "MAX_INTERFACE_NAME_LEN": true,
+ "MAX_LONG_PATH": true,
+ "MAX_PATH": true,
+ "MAX_PROTOCOL_CHAIN": true,
+ "MCL_CURRENT": true,
+ "MCL_FUTURE": true,
+ "MNT_DETACH": true,
+ "MNT_EXPIRE": true,
+ "MNT_FORCE": true,
+ "MSG_BCAST": true,
+ "MSG_CMSG_CLOEXEC": true,
+ "MSG_COMPAT": true,
+ "MSG_CONFIRM": true,
+ "MSG_CONTROLMBUF": true,
+ "MSG_CTRUNC": true,
+ "MSG_DONTROUTE": true,
+ "MSG_DONTWAIT": true,
+ "MSG_EOF": true,
+ "MSG_EOR": true,
+ "MSG_ERRQUEUE": true,
+ "MSG_FASTOPEN": true,
+ "MSG_FIN": true,
+ "MSG_FLUSH": true,
+ "MSG_HAVEMORE": true,
+ "MSG_HOLD": true,
+ "MSG_IOVUSRSPACE": true,
+ "MSG_LENUSRSPACE": true,
+ "MSG_MCAST": true,
+ "MSG_MORE": true,
+ "MSG_NAMEMBUF": true,
+ "MSG_NBIO": true,
+ "MSG_NEEDSA": true,
+ "MSG_NOSIGNAL": true,
+ "MSG_NOTIFICATION": true,
+ "MSG_OOB": true,
+ "MSG_PEEK": true,
+ "MSG_PROXY": true,
+ "MSG_RCVMORE": true,
+ "MSG_RST": true,
+ "MSG_SEND": true,
+ "MSG_SYN": true,
+ "MSG_TRUNC": true,
+ "MSG_TRYHARD": true,
+ "MSG_USERFLAGS": true,
+ "MSG_WAITALL": true,
+ "MSG_WAITFORONE": true,
+ "MSG_WAITSTREAM": true,
+ "MS_ACTIVE": true,
+ "MS_ASYNC": true,
+ "MS_BIND": true,
+ "MS_DEACTIVATE": true,
+ "MS_DIRSYNC": true,
+ "MS_INVALIDATE": true,
+ "MS_I_VERSION": true,
+ "MS_KERNMOUNT": true,
+ "MS_KILLPAGES": true,
+ "MS_MANDLOCK": true,
+ "MS_MGC_MSK": true,
+ "MS_MGC_VAL": true,
+ "MS_MOVE": true,
+ "MS_NOATIME": true,
+ "MS_NODEV": true,
+ "MS_NODIRATIME": true,
+ "MS_NOEXEC": true,
+ "MS_NOSUID": true,
+ "MS_NOUSER": true,
+ "MS_POSIXACL": true,
+ "MS_PRIVATE": true,
+ "MS_RDONLY": true,
+ "MS_REC": true,
+ "MS_RELATIME": true,
+ "MS_REMOUNT": true,
+ "MS_RMT_MASK": true,
+ "MS_SHARED": true,
+ "MS_SILENT": true,
+ "MS_SLAVE": true,
+ "MS_STRICTATIME": true,
+ "MS_SYNC": true,
+ "MS_SYNCHRONOUS": true,
+ "MS_UNBINDABLE": true,
+ "Madvise": true,
+ "MapViewOfFile": true,
+ "MaxTokenInfoClass": true,
+ "Mclpool": true,
+ "MibIfRow": true,
+ "Mkdir": true,
+ "Mkdirat": true,
+ "Mkfifo": true,
+ "Mknod": true,
+ "Mknodat": true,
+ "Mlock": true,
+ "Mlockall": true,
+ "Mmap": true,
+ "Mount": true,
+ "MoveFile": true,
+ "Mprotect": true,
+ "Msghdr": true,
+ "Munlock": true,
+ "Munlockall": true,
+ "Munmap": true,
+ "MustLoadDLL": true,
+ "NAME_MAX": true,
+ "NETLINK_ADD_MEMBERSHIP": true,
+ "NETLINK_AUDIT": true,
+ "NETLINK_BROADCAST_ERROR": true,
+ "NETLINK_CONNECTOR": true,
+ "NETLINK_DNRTMSG": true,
+ "NETLINK_DROP_MEMBERSHIP": true,
+ "NETLINK_ECRYPTFS": true,
+ "NETLINK_FIB_LOOKUP": true,
+ "NETLINK_FIREWALL": true,
+ "NETLINK_GENERIC": true,
+ "NETLINK_INET_DIAG": true,
+ "NETLINK_IP6_FW": true,
+ "NETLINK_ISCSI": true,
+ "NETLINK_KOBJECT_UEVENT": true,
+ "NETLINK_NETFILTER": true,
+ "NETLINK_NFLOG": true,
+ "NETLINK_NO_ENOBUFS": true,
+ "NETLINK_PKTINFO": true,
+ "NETLINK_RDMA": true,
+ "NETLINK_ROUTE": true,
+ "NETLINK_SCSITRANSPORT": true,
+ "NETLINK_SELINUX": true,
+ "NETLINK_UNUSED": true,
+ "NETLINK_USERSOCK": true,
+ "NETLINK_XFRM": true,
+ "NET_RT_DUMP": true,
+ "NET_RT_DUMP2": true,
+ "NET_RT_FLAGS": true,
+ "NET_RT_IFLIST": true,
+ "NET_RT_IFLIST2": true,
+ "NET_RT_IFLISTL": true,
+ "NET_RT_IFMALIST": true,
+ "NET_RT_MAXID": true,
+ "NET_RT_OIFLIST": true,
+ "NET_RT_OOIFLIST": true,
+ "NET_RT_STAT": true,
+ "NET_RT_STATS": true,
+ "NET_RT_TABLE": true,
+ "NET_RT_TRASH": true,
+ "NLA_ALIGNTO": true,
+ "NLA_F_NESTED": true,
+ "NLA_F_NET_BYTEORDER": true,
+ "NLA_HDRLEN": true,
+ "NLMSG_ALIGNTO": true,
+ "NLMSG_DONE": true,
+ "NLMSG_ERROR": true,
+ "NLMSG_HDRLEN": true,
+ "NLMSG_MIN_TYPE": true,
+ "NLMSG_NOOP": true,
+ "NLMSG_OVERRUN": true,
+ "NLM_F_ACK": true,
+ "NLM_F_APPEND": true,
+ "NLM_F_ATOMIC": true,
+ "NLM_F_CREATE": true,
+ "NLM_F_DUMP": true,
+ "NLM_F_ECHO": true,
+ "NLM_F_EXCL": true,
+ "NLM_F_MATCH": true,
+ "NLM_F_MULTI": true,
+ "NLM_F_REPLACE": true,
+ "NLM_F_REQUEST": true,
+ "NLM_F_ROOT": true,
+ "NOFLSH": true,
+ "NOTE_ABSOLUTE": true,
+ "NOTE_ATTRIB": true,
+ "NOTE_CHILD": true,
+ "NOTE_DELETE": true,
+ "NOTE_EOF": true,
+ "NOTE_EXEC": true,
+ "NOTE_EXIT": true,
+ "NOTE_EXITSTATUS": true,
+ "NOTE_EXTEND": true,
+ "NOTE_FFAND": true,
+ "NOTE_FFCOPY": true,
+ "NOTE_FFCTRLMASK": true,
+ "NOTE_FFLAGSMASK": true,
+ "NOTE_FFNOP": true,
+ "NOTE_FFOR": true,
+ "NOTE_FORK": true,
+ "NOTE_LINK": true,
+ "NOTE_LOWAT": true,
+ "NOTE_NONE": true,
+ "NOTE_NSECONDS": true,
+ "NOTE_PCTRLMASK": true,
+ "NOTE_PDATAMASK": true,
+ "NOTE_REAP": true,
+ "NOTE_RENAME": true,
+ "NOTE_RESOURCEEND": true,
+ "NOTE_REVOKE": true,
+ "NOTE_SECONDS": true,
+ "NOTE_SIGNAL": true,
+ "NOTE_TRACK": true,
+ "NOTE_TRACKERR": true,
+ "NOTE_TRIGGER": true,
+ "NOTE_TRUNCATE": true,
+ "NOTE_USECONDS": true,
+ "NOTE_VM_ERROR": true,
+ "NOTE_VM_PRESSURE": true,
+ "NOTE_VM_PRESSURE_SUDDEN_TERMINATE": true,
+ "NOTE_VM_PRESSURE_TERMINATE": true,
+ "NOTE_WRITE": true,
+ "NameCanonical": true,
+ "NameCanonicalEx": true,
+ "NameDisplay": true,
+ "NameDnsDomain": true,
+ "NameFullyQualifiedDN": true,
+ "NameSamCompatible": true,
+ "NameServicePrincipal": true,
+ "NameUniqueId": true,
+ "NameUnknown": true,
+ "NameUserPrincipal": true,
+ "Nanosleep": true,
+ "NetApiBufferFree": true,
+ "NetGetJoinInformation": true,
+ "NetSetupDomainName": true,
+ "NetSetupUnjoined": true,
+ "NetSetupUnknownStatus": true,
+ "NetSetupWorkgroupName": true,
+ "NetUserGetInfo": true,
+ "NetlinkMessage": true,
+ "NetlinkRIB": true,
+ "NetlinkRouteAttr": true,
+ "NetlinkRouteRequest": true,
+ "NewCallback": true,
+ "NewCallbackCDecl": true,
+ "NewLazyDLL": true,
+ "NlAttr": true,
+ "NlMsgerr": true,
+ "NlMsghdr": true,
+ "NsecToFiletime": true,
+ "NsecToTimespec": true,
+ "NsecToTimeval": true,
+ "Ntohs": true,
+ "OCRNL": true,
+ "OFDEL": true,
+ "OFILL": true,
+ "OFIOGETBMAP": true,
+ "OID_PKIX_KP_SERVER_AUTH": true,
+ "OID_SERVER_GATED_CRYPTO": true,
+ "OID_SGC_NETSCAPE": true,
+ "OLCUC": true,
+ "ONLCR": true,
+ "ONLRET": true,
+ "ONOCR": true,
+ "ONOEOT": true,
+ "OPEN_ALWAYS": true,
+ "OPEN_EXISTING": true,
+ "OPOST": true,
+ "O_ACCMODE": true,
+ "O_ALERT": true,
+ "O_ALT_IO": true,
+ "O_APPEND": true,
+ "O_ASYNC": true,
+ "O_CLOEXEC": true,
+ "O_CREAT": true,
+ "O_DIRECT": true,
+ "O_DIRECTORY": true,
+ "O_DSYNC": true,
+ "O_EVTONLY": true,
+ "O_EXCL": true,
+ "O_EXEC": true,
+ "O_EXLOCK": true,
+ "O_FSYNC": true,
+ "O_LARGEFILE": true,
+ "O_NDELAY": true,
+ "O_NOATIME": true,
+ "O_NOCTTY": true,
+ "O_NOFOLLOW": true,
+ "O_NONBLOCK": true,
+ "O_NOSIGPIPE": true,
+ "O_POPUP": true,
+ "O_RDONLY": true,
+ "O_RDWR": true,
+ "O_RSYNC": true,
+ "O_SHLOCK": true,
+ "O_SYMLINK": true,
+ "O_SYNC": true,
+ "O_TRUNC": true,
+ "O_TTY_INIT": true,
+ "O_WRONLY": true,
+ "Open": true,
+ "OpenCurrentProcessToken": true,
+ "OpenProcess": true,
+ "OpenProcessToken": true,
+ "Openat": true,
+ "Overlapped": true,
+ "PACKET_ADD_MEMBERSHIP": true,
+ "PACKET_BROADCAST": true,
+ "PACKET_DROP_MEMBERSHIP": true,
+ "PACKET_FASTROUTE": true,
+ "PACKET_HOST": true,
+ "PACKET_LOOPBACK": true,
+ "PACKET_MR_ALLMULTI": true,
+ "PACKET_MR_MULTICAST": true,
+ "PACKET_MR_PROMISC": true,
+ "PACKET_MULTICAST": true,
+ "PACKET_OTHERHOST": true,
+ "PACKET_OUTGOING": true,
+ "PACKET_RECV_OUTPUT": true,
+ "PACKET_RX_RING": true,
+ "PACKET_STATISTICS": true,
+ "PAGE_EXECUTE_READ": true,
+ "PAGE_EXECUTE_READWRITE": true,
+ "PAGE_EXECUTE_WRITECOPY": true,
+ "PAGE_READONLY": true,
+ "PAGE_READWRITE": true,
+ "PAGE_WRITECOPY": true,
+ "PARENB": true,
+ "PARMRK": true,
+ "PARODD": true,
+ "PENDIN": true,
+ "PFL_HIDDEN": true,
+ "PFL_MATCHES_PROTOCOL_ZERO": true,
+ "PFL_MULTIPLE_PROTO_ENTRIES": true,
+ "PFL_NETWORKDIRECT_PROVIDER": true,
+ "PFL_RECOMMENDED_PROTO_ENTRY": true,
+ "PF_FLUSH": true,
+ "PKCS_7_ASN_ENCODING": true,
+ "PMC5_PIPELINE_FLUSH": true,
+ "PRIO_PGRP": true,
+ "PRIO_PROCESS": true,
+ "PRIO_USER": true,
+ "PRI_IOFLUSH": true,
+ "PROCESS_QUERY_INFORMATION": true,
+ "PROCESS_TERMINATE": true,
+ "PROT_EXEC": true,
+ "PROT_GROWSDOWN": true,
+ "PROT_GROWSUP": true,
+ "PROT_NONE": true,
+ "PROT_READ": true,
+ "PROT_WRITE": true,
+ "PROV_DH_SCHANNEL": true,
+ "PROV_DSS": true,
+ "PROV_DSS_DH": true,
+ "PROV_EC_ECDSA_FULL": true,
+ "PROV_EC_ECDSA_SIG": true,
+ "PROV_EC_ECNRA_FULL": true,
+ "PROV_EC_ECNRA_SIG": true,
+ "PROV_FORTEZZA": true,
+ "PROV_INTEL_SEC": true,
+ "PROV_MS_EXCHANGE": true,
+ "PROV_REPLACE_OWF": true,
+ "PROV_RNG": true,
+ "PROV_RSA_AES": true,
+ "PROV_RSA_FULL": true,
+ "PROV_RSA_SCHANNEL": true,
+ "PROV_RSA_SIG": true,
+ "PROV_SPYRUS_LYNKS": true,
+ "PROV_SSL": true,
+ "PR_CAPBSET_DROP": true,
+ "PR_CAPBSET_READ": true,
+ "PR_CLEAR_SECCOMP_FILTER": true,
+ "PR_ENDIAN_BIG": true,
+ "PR_ENDIAN_LITTLE": true,
+ "PR_ENDIAN_PPC_LITTLE": true,
+ "PR_FPEMU_NOPRINT": true,
+ "PR_FPEMU_SIGFPE": true,
+ "PR_FP_EXC_ASYNC": true,
+ "PR_FP_EXC_DISABLED": true,
+ "PR_FP_EXC_DIV": true,
+ "PR_FP_EXC_INV": true,
+ "PR_FP_EXC_NONRECOV": true,
+ "PR_FP_EXC_OVF": true,
+ "PR_FP_EXC_PRECISE": true,
+ "PR_FP_EXC_RES": true,
+ "PR_FP_EXC_SW_ENABLE": true,
+ "PR_FP_EXC_UND": true,
+ "PR_GET_DUMPABLE": true,
+ "PR_GET_ENDIAN": true,
+ "PR_GET_FPEMU": true,
+ "PR_GET_FPEXC": true,
+ "PR_GET_KEEPCAPS": true,
+ "PR_GET_NAME": true,
+ "PR_GET_PDEATHSIG": true,
+ "PR_GET_SECCOMP": true,
+ "PR_GET_SECCOMP_FILTER": true,
+ "PR_GET_SECUREBITS": true,
+ "PR_GET_TIMERSLACK": true,
+ "PR_GET_TIMING": true,
+ "PR_GET_TSC": true,
+ "PR_GET_UNALIGN": true,
+ "PR_MCE_KILL": true,
+ "PR_MCE_KILL_CLEAR": true,
+ "PR_MCE_KILL_DEFAULT": true,
+ "PR_MCE_KILL_EARLY": true,
+ "PR_MCE_KILL_GET": true,
+ "PR_MCE_KILL_LATE": true,
+ "PR_MCE_KILL_SET": true,
+ "PR_SECCOMP_FILTER_EVENT": true,
+ "PR_SECCOMP_FILTER_SYSCALL": true,
+ "PR_SET_DUMPABLE": true,
+ "PR_SET_ENDIAN": true,
+ "PR_SET_FPEMU": true,
+ "PR_SET_FPEXC": true,
+ "PR_SET_KEEPCAPS": true,
+ "PR_SET_NAME": true,
+ "PR_SET_PDEATHSIG": true,
+ "PR_SET_PTRACER": true,
+ "PR_SET_SECCOMP": true,
+ "PR_SET_SECCOMP_FILTER": true,
+ "PR_SET_SECUREBITS": true,
+ "PR_SET_TIMERSLACK": true,
+ "PR_SET_TIMING": true,
+ "PR_SET_TSC": true,
+ "PR_SET_UNALIGN": true,
+ "PR_TASK_PERF_EVENTS_DISABLE": true,
+ "PR_TASK_PERF_EVENTS_ENABLE": true,
+ "PR_TIMING_STATISTICAL": true,
+ "PR_TIMING_TIMESTAMP": true,
+ "PR_TSC_ENABLE": true,
+ "PR_TSC_SIGSEGV": true,
+ "PR_UNALIGN_NOPRINT": true,
+ "PR_UNALIGN_SIGBUS": true,
+ "PTRACE_ARCH_PRCTL": true,
+ "PTRACE_ATTACH": true,
+ "PTRACE_CONT": true,
+ "PTRACE_DETACH": true,
+ "PTRACE_EVENT_CLONE": true,
+ "PTRACE_EVENT_EXEC": true,
+ "PTRACE_EVENT_EXIT": true,
+ "PTRACE_EVENT_FORK": true,
+ "PTRACE_EVENT_VFORK": true,
+ "PTRACE_EVENT_VFORK_DONE": true,
+ "PTRACE_GETCRUNCHREGS": true,
+ "PTRACE_GETEVENTMSG": true,
+ "PTRACE_GETFPREGS": true,
+ "PTRACE_GETFPXREGS": true,
+ "PTRACE_GETHBPREGS": true,
+ "PTRACE_GETREGS": true,
+ "PTRACE_GETREGSET": true,
+ "PTRACE_GETSIGINFO": true,
+ "PTRACE_GETVFPREGS": true,
+ "PTRACE_GETWMMXREGS": true,
+ "PTRACE_GET_THREAD_AREA": true,
+ "PTRACE_KILL": true,
+ "PTRACE_OLDSETOPTIONS": true,
+ "PTRACE_O_MASK": true,
+ "PTRACE_O_TRACECLONE": true,
+ "PTRACE_O_TRACEEXEC": true,
+ "PTRACE_O_TRACEEXIT": true,
+ "PTRACE_O_TRACEFORK": true,
+ "PTRACE_O_TRACESYSGOOD": true,
+ "PTRACE_O_TRACEVFORK": true,
+ "PTRACE_O_TRACEVFORKDONE": true,
+ "PTRACE_PEEKDATA": true,
+ "PTRACE_PEEKTEXT": true,
+ "PTRACE_PEEKUSR": true,
+ "PTRACE_POKEDATA": true,
+ "PTRACE_POKETEXT": true,
+ "PTRACE_POKEUSR": true,
+ "PTRACE_SETCRUNCHREGS": true,
+ "PTRACE_SETFPREGS": true,
+ "PTRACE_SETFPXREGS": true,
+ "PTRACE_SETHBPREGS": true,
+ "PTRACE_SETOPTIONS": true,
+ "PTRACE_SETREGS": true,
+ "PTRACE_SETREGSET": true,
+ "PTRACE_SETSIGINFO": true,
+ "PTRACE_SETVFPREGS": true,
+ "PTRACE_SETWMMXREGS": true,
+ "PTRACE_SET_SYSCALL": true,
+ "PTRACE_SET_THREAD_AREA": true,
+ "PTRACE_SINGLEBLOCK": true,
+ "PTRACE_SINGLESTEP": true,
+ "PTRACE_SYSCALL": true,
+ "PTRACE_SYSEMU": true,
+ "PTRACE_SYSEMU_SINGLESTEP": true,
+ "PTRACE_TRACEME": true,
+ "PT_ATTACH": true,
+ "PT_ATTACHEXC": true,
+ "PT_CONTINUE": true,
+ "PT_DATA_ADDR": true,
+ "PT_DENY_ATTACH": true,
+ "PT_DETACH": true,
+ "PT_FIRSTMACH": true,
+ "PT_FORCEQUOTA": true,
+ "PT_KILL": true,
+ "PT_MASK": true,
+ "PT_READ_D": true,
+ "PT_READ_I": true,
+ "PT_READ_U": true,
+ "PT_SIGEXC": true,
+ "PT_STEP": true,
+ "PT_TEXT_ADDR": true,
+ "PT_TEXT_END_ADDR": true,
+ "PT_THUPDATE": true,
+ "PT_TRACE_ME": true,
+ "PT_WRITE_D": true,
+ "PT_WRITE_I": true,
+ "PT_WRITE_U": true,
+ "ParseDirent": true,
+ "ParseNetlinkMessage": true,
+ "ParseNetlinkRouteAttr": true,
+ "ParseRoutingMessage": true,
+ "ParseRoutingSockaddr": true,
+ "ParseSocketControlMessage": true,
+ "ParseUnixCredentials": true,
+ "ParseUnixRights": true,
+ "PathMax": true,
+ "Pathconf": true,
+ "Pause": true,
+ "Pipe": true,
+ "Pipe2": true,
+ "PivotRoot": true,
+ "Pointer": true,
+ "PostQueuedCompletionStatus": true,
+ "Pread": true,
+ "Proc": true,
+ "ProcAttr": true,
+ "Process32First": true,
+ "Process32Next": true,
+ "ProcessEntry32": true,
+ "ProcessInformation": true,
+ "Protoent": true,
+ "PtraceAttach": true,
+ "PtraceCont": true,
+ "PtraceDetach": true,
+ "PtraceGetEventMsg": true,
+ "PtraceGetRegs": true,
+ "PtracePeekData": true,
+ "PtracePeekText": true,
+ "PtracePokeData": true,
+ "PtracePokeText": true,
+ "PtraceRegs": true,
+ "PtraceSetOptions": true,
+ "PtraceSetRegs": true,
+ "PtraceSingleStep": true,
+ "PtraceSyscall": true,
+ "Pwrite": true,
+ "REG_BINARY": true,
+ "REG_DWORD": true,
+ "REG_DWORD_BIG_ENDIAN": true,
+ "REG_DWORD_LITTLE_ENDIAN": true,
+ "REG_EXPAND_SZ": true,
+ "REG_FULL_RESOURCE_DESCRIPTOR": true,
+ "REG_LINK": true,
+ "REG_MULTI_SZ": true,
+ "REG_NONE": true,
+ "REG_QWORD": true,
+ "REG_QWORD_LITTLE_ENDIAN": true,
+ "REG_RESOURCE_LIST": true,
+ "REG_RESOURCE_REQUIREMENTS_LIST": true,
+ "REG_SZ": true,
+ "RLIMIT_AS": true,
+ "RLIMIT_CORE": true,
+ "RLIMIT_CPU": true,
+ "RLIMIT_DATA": true,
+ "RLIMIT_FSIZE": true,
+ "RLIMIT_NOFILE": true,
+ "RLIMIT_STACK": true,
+ "RLIM_INFINITY": true,
+ "RTAX_ADVMSS": true,
+ "RTAX_AUTHOR": true,
+ "RTAX_BRD": true,
+ "RTAX_CWND": true,
+ "RTAX_DST": true,
+ "RTAX_FEATURES": true,
+ "RTAX_FEATURE_ALLFRAG": true,
+ "RTAX_FEATURE_ECN": true,
+ "RTAX_FEATURE_SACK": true,
+ "RTAX_FEATURE_TIMESTAMP": true,
+ "RTAX_GATEWAY": true,
+ "RTAX_GENMASK": true,
+ "RTAX_HOPLIMIT": true,
+ "RTAX_IFA": true,
+ "RTAX_IFP": true,
+ "RTAX_INITCWND": true,
+ "RTAX_INITRWND": true,
+ "RTAX_LABEL": true,
+ "RTAX_LOCK": true,
+ "RTAX_MAX": true,
+ "RTAX_MTU": true,
+ "RTAX_NETMASK": true,
+ "RTAX_REORDERING": true,
+ "RTAX_RTO_MIN": true,
+ "RTAX_RTT": true,
+ "RTAX_RTTVAR": true,
+ "RTAX_SRC": true,
+ "RTAX_SRCMASK": true,
+ "RTAX_SSTHRESH": true,
+ "RTAX_TAG": true,
+ "RTAX_UNSPEC": true,
+ "RTAX_WINDOW": true,
+ "RTA_ALIGNTO": true,
+ "RTA_AUTHOR": true,
+ "RTA_BRD": true,
+ "RTA_CACHEINFO": true,
+ "RTA_DST": true,
+ "RTA_FLOW": true,
+ "RTA_GATEWAY": true,
+ "RTA_GENMASK": true,
+ "RTA_IFA": true,
+ "RTA_IFP": true,
+ "RTA_IIF": true,
+ "RTA_LABEL": true,
+ "RTA_MAX": true,
+ "RTA_METRICS": true,
+ "RTA_MULTIPATH": true,
+ "RTA_NETMASK": true,
+ "RTA_OIF": true,
+ "RTA_PREFSRC": true,
+ "RTA_PRIORITY": true,
+ "RTA_SRC": true,
+ "RTA_SRCMASK": true,
+ "RTA_TABLE": true,
+ "RTA_TAG": true,
+ "RTA_UNSPEC": true,
+ "RTCF_DIRECTSRC": true,
+ "RTCF_DOREDIRECT": true,
+ "RTCF_LOG": true,
+ "RTCF_MASQ": true,
+ "RTCF_NAT": true,
+ "RTCF_VALVE": true,
+ "RTF_ADDRCLASSMASK": true,
+ "RTF_ADDRCONF": true,
+ "RTF_ALLONLINK": true,
+ "RTF_ANNOUNCE": true,
+ "RTF_BLACKHOLE": true,
+ "RTF_BROADCAST": true,
+ "RTF_CACHE": true,
+ "RTF_CLONED": true,
+ "RTF_CLONING": true,
+ "RTF_CONDEMNED": true,
+ "RTF_DEFAULT": true,
+ "RTF_DELCLONE": true,
+ "RTF_DONE": true,
+ "RTF_DYNAMIC": true,
+ "RTF_FLOW": true,
+ "RTF_FMASK": true,
+ "RTF_GATEWAY": true,
+ "RTF_GWFLAG_COMPAT": true,
+ "RTF_HOST": true,
+ "RTF_IFREF": true,
+ "RTF_IFSCOPE": true,
+ "RTF_INTERFACE": true,
+ "RTF_IRTT": true,
+ "RTF_LINKRT": true,
+ "RTF_LLDATA": true,
+ "RTF_LLINFO": true,
+ "RTF_LOCAL": true,
+ "RTF_MASK": true,
+ "RTF_MODIFIED": true,
+ "RTF_MPATH": true,
+ "RTF_MPLS": true,
+ "RTF_MSS": true,
+ "RTF_MTU": true,
+ "RTF_MULTICAST": true,
+ "RTF_NAT": true,
+ "RTF_NOFORWARD": true,
+ "RTF_NONEXTHOP": true,
+ "RTF_NOPMTUDISC": true,
+ "RTF_PERMANENT_ARP": true,
+ "RTF_PINNED": true,
+ "RTF_POLICY": true,
+ "RTF_PRCLONING": true,
+ "RTF_PROTO1": true,
+ "RTF_PROTO2": true,
+ "RTF_PROTO3": true,
+ "RTF_REINSTATE": true,
+ "RTF_REJECT": true,
+ "RTF_RNH_LOCKED": true,
+ "RTF_SOURCE": true,
+ "RTF_SRC": true,
+ "RTF_STATIC": true,
+ "RTF_STICKY": true,
+ "RTF_THROW": true,
+ "RTF_TUNNEL": true,
+ "RTF_UP": true,
+ "RTF_USETRAILERS": true,
+ "RTF_WASCLONED": true,
+ "RTF_WINDOW": true,
+ "RTF_XRESOLVE": true,
+ "RTM_ADD": true,
+ "RTM_BASE": true,
+ "RTM_CHANGE": true,
+ "RTM_CHGADDR": true,
+ "RTM_DELACTION": true,
+ "RTM_DELADDR": true,
+ "RTM_DELADDRLABEL": true,
+ "RTM_DELETE": true,
+ "RTM_DELLINK": true,
+ "RTM_DELMADDR": true,
+ "RTM_DELNEIGH": true,
+ "RTM_DELQDISC": true,
+ "RTM_DELROUTE": true,
+ "RTM_DELRULE": true,
+ "RTM_DELTCLASS": true,
+ "RTM_DELTFILTER": true,
+ "RTM_DESYNC": true,
+ "RTM_F_CLONED": true,
+ "RTM_F_EQUALIZE": true,
+ "RTM_F_NOTIFY": true,
+ "RTM_F_PREFIX": true,
+ "RTM_GET": true,
+ "RTM_GET2": true,
+ "RTM_GETACTION": true,
+ "RTM_GETADDR": true,
+ "RTM_GETADDRLABEL": true,
+ "RTM_GETANYCAST": true,
+ "RTM_GETDCB": true,
+ "RTM_GETLINK": true,
+ "RTM_GETMULTICAST": true,
+ "RTM_GETNEIGH": true,
+ "RTM_GETNEIGHTBL": true,
+ "RTM_GETQDISC": true,
+ "RTM_GETROUTE": true,
+ "RTM_GETRULE": true,
+ "RTM_GETTCLASS": true,
+ "RTM_GETTFILTER": true,
+ "RTM_IEEE80211": true,
+ "RTM_IFANNOUNCE": true,
+ "RTM_IFINFO": true,
+ "RTM_IFINFO2": true,
+ "RTM_LLINFO_UPD": true,
+ "RTM_LOCK": true,
+ "RTM_LOSING": true,
+ "RTM_MAX": true,
+ "RTM_MAXSIZE": true,
+ "RTM_MISS": true,
+ "RTM_NEWACTION": true,
+ "RTM_NEWADDR": true,
+ "RTM_NEWADDRLABEL": true,
+ "RTM_NEWLINK": true,
+ "RTM_NEWMADDR": true,
+ "RTM_NEWMADDR2": true,
+ "RTM_NEWNDUSEROPT": true,
+ "RTM_NEWNEIGH": true,
+ "RTM_NEWNEIGHTBL": true,
+ "RTM_NEWPREFIX": true,
+ "RTM_NEWQDISC": true,
+ "RTM_NEWROUTE": true,
+ "RTM_NEWRULE": true,
+ "RTM_NEWTCLASS": true,
+ "RTM_NEWTFILTER": true,
+ "RTM_NR_FAMILIES": true,
+ "RTM_NR_MSGTYPES": true,
+ "RTM_OIFINFO": true,
+ "RTM_OLDADD": true,
+ "RTM_OLDDEL": true,
+ "RTM_OOIFINFO": true,
+ "RTM_REDIRECT": true,
+ "RTM_RESOLVE": true,
+ "RTM_RTTUNIT": true,
+ "RTM_SETDCB": true,
+ "RTM_SETGATE": true,
+ "RTM_SETLINK": true,
+ "RTM_SETNEIGHTBL": true,
+ "RTM_VERSION": true,
+ "RTNH_ALIGNTO": true,
+ "RTNH_F_DEAD": true,
+ "RTNH_F_ONLINK": true,
+ "RTNH_F_PERVASIVE": true,
+ "RTNLGRP_IPV4_IFADDR": true,
+ "RTNLGRP_IPV4_MROUTE": true,
+ "RTNLGRP_IPV4_ROUTE": true,
+ "RTNLGRP_IPV4_RULE": true,
+ "RTNLGRP_IPV6_IFADDR": true,
+ "RTNLGRP_IPV6_IFINFO": true,
+ "RTNLGRP_IPV6_MROUTE": true,
+ "RTNLGRP_IPV6_PREFIX": true,
+ "RTNLGRP_IPV6_ROUTE": true,
+ "RTNLGRP_IPV6_RULE": true,
+ "RTNLGRP_LINK": true,
+ "RTNLGRP_ND_USEROPT": true,
+ "RTNLGRP_NEIGH": true,
+ "RTNLGRP_NONE": true,
+ "RTNLGRP_NOTIFY": true,
+ "RTNLGRP_TC": true,
+ "RTN_ANYCAST": true,
+ "RTN_BLACKHOLE": true,
+ "RTN_BROADCAST": true,
+ "RTN_LOCAL": true,
+ "RTN_MAX": true,
+ "RTN_MULTICAST": true,
+ "RTN_NAT": true,
+ "RTN_PROHIBIT": true,
+ "RTN_THROW": true,
+ "RTN_UNICAST": true,
+ "RTN_UNREACHABLE": true,
+ "RTN_UNSPEC": true,
+ "RTN_XRESOLVE": true,
+ "RTPROT_BIRD": true,
+ "RTPROT_BOOT": true,
+ "RTPROT_DHCP": true,
+ "RTPROT_DNROUTED": true,
+ "RTPROT_GATED": true,
+ "RTPROT_KERNEL": true,
+ "RTPROT_MRT": true,
+ "RTPROT_NTK": true,
+ "RTPROT_RA": true,
+ "RTPROT_REDIRECT": true,
+ "RTPROT_STATIC": true,
+ "RTPROT_UNSPEC": true,
+ "RTPROT_XORP": true,
+ "RTPROT_ZEBRA": true,
+ "RTV_EXPIRE": true,
+ "RTV_HOPCOUNT": true,
+ "RTV_MTU": true,
+ "RTV_RPIPE": true,
+ "RTV_RTT": true,
+ "RTV_RTTVAR": true,
+ "RTV_SPIPE": true,
+ "RTV_SSTHRESH": true,
+ "RTV_WEIGHT": true,
+ "RT_CACHING_CONTEXT": true,
+ "RT_CLASS_DEFAULT": true,
+ "RT_CLASS_LOCAL": true,
+ "RT_CLASS_MAIN": true,
+ "RT_CLASS_MAX": true,
+ "RT_CLASS_UNSPEC": true,
+ "RT_DEFAULT_FIB": true,
+ "RT_NORTREF": true,
+ "RT_SCOPE_HOST": true,
+ "RT_SCOPE_LINK": true,
+ "RT_SCOPE_NOWHERE": true,
+ "RT_SCOPE_SITE": true,
+ "RT_SCOPE_UNIVERSE": true,
+ "RT_TABLEID_MAX": true,
+ "RT_TABLE_COMPAT": true,
+ "RT_TABLE_DEFAULT": true,
+ "RT_TABLE_LOCAL": true,
+ "RT_TABLE_MAIN": true,
+ "RT_TABLE_MAX": true,
+ "RT_TABLE_UNSPEC": true,
+ "RUSAGE_CHILDREN": true,
+ "RUSAGE_SELF": true,
+ "RUSAGE_THREAD": true,
+ "Radvisory_t": true,
+ "RawConn": true,
+ "RawSockaddr": true,
+ "RawSockaddrAny": true,
+ "RawSockaddrDatalink": true,
+ "RawSockaddrInet4": true,
+ "RawSockaddrInet6": true,
+ "RawSockaddrLinklayer": true,
+ "RawSockaddrNetlink": true,
+ "RawSockaddrUnix": true,
+ "RawSyscall": true,
+ "RawSyscall6": true,
+ "Read": true,
+ "ReadConsole": true,
+ "ReadDirectoryChanges": true,
+ "ReadDirent": true,
+ "ReadFile": true,
+ "Readlink": true,
+ "Reboot": true,
+ "Recvfrom": true,
+ "Recvmsg": true,
+ "RegCloseKey": true,
+ "RegEnumKeyEx": true,
+ "RegOpenKeyEx": true,
+ "RegQueryInfoKey": true,
+ "RegQueryValueEx": true,
+ "RemoveDirectory": true,
+ "Removexattr": true,
+ "Rename": true,
+ "Renameat": true,
+ "Revoke": true,
+ "Rlimit": true,
+ "Rmdir": true,
+ "RouteMessage": true,
+ "RouteRIB": true,
+ "RtAttr": true,
+ "RtGenmsg": true,
+ "RtMetrics": true,
+ "RtMsg": true,
+ "RtMsghdr": true,
+ "RtNexthop": true,
+ "Rusage": true,
+ "SCM_BINTIME": true,
+ "SCM_CREDENTIALS": true,
+ "SCM_CREDS": true,
+ "SCM_RIGHTS": true,
+ "SCM_TIMESTAMP": true,
+ "SCM_TIMESTAMPING": true,
+ "SCM_TIMESTAMPNS": true,
+ "SCM_TIMESTAMP_MONOTONIC": true,
+ "SHUT_RD": true,
+ "SHUT_RDWR": true,
+ "SHUT_WR": true,
+ "SID": true,
+ "SIDAndAttributes": true,
+ "SIGABRT": true,
+ "SIGALRM": true,
+ "SIGBUS": true,
+ "SIGCHLD": true,
+ "SIGCLD": true,
+ "SIGCONT": true,
+ "SIGEMT": true,
+ "SIGFPE": true,
+ "SIGHUP": true,
+ "SIGILL": true,
+ "SIGINFO": true,
+ "SIGINT": true,
+ "SIGIO": true,
+ "SIGIOT": true,
+ "SIGKILL": true,
+ "SIGLIBRT": true,
+ "SIGLWP": true,
+ "SIGPIPE": true,
+ "SIGPOLL": true,
+ "SIGPROF": true,
+ "SIGPWR": true,
+ "SIGQUIT": true,
+ "SIGSEGV": true,
+ "SIGSTKFLT": true,
+ "SIGSTOP": true,
+ "SIGSYS": true,
+ "SIGTERM": true,
+ "SIGTHR": true,
+ "SIGTRAP": true,
+ "SIGTSTP": true,
+ "SIGTTIN": true,
+ "SIGTTOU": true,
+ "SIGUNUSED": true,
+ "SIGURG": true,
+ "SIGUSR1": true,
+ "SIGUSR2": true,
+ "SIGVTALRM": true,
+ "SIGWINCH": true,
+ "SIGXCPU": true,
+ "SIGXFSZ": true,
+ "SIOCADDDLCI": true,
+ "SIOCADDMULTI": true,
+ "SIOCADDRT": true,
+ "SIOCAIFADDR": true,
+ "SIOCAIFGROUP": true,
+ "SIOCALIFADDR": true,
+ "SIOCARPIPLL": true,
+ "SIOCATMARK": true,
+ "SIOCAUTOADDR": true,
+ "SIOCAUTONETMASK": true,
+ "SIOCBRDGADD": true,
+ "SIOCBRDGADDS": true,
+ "SIOCBRDGARL": true,
+ "SIOCBRDGDADDR": true,
+ "SIOCBRDGDEL": true,
+ "SIOCBRDGDELS": true,
+ "SIOCBRDGFLUSH": true,
+ "SIOCBRDGFRL": true,
+ "SIOCBRDGGCACHE": true,
+ "SIOCBRDGGFD": true,
+ "SIOCBRDGGHT": true,
+ "SIOCBRDGGIFFLGS": true,
+ "SIOCBRDGGMA": true,
+ "SIOCBRDGGPARAM": true,
+ "SIOCBRDGGPRI": true,
+ "SIOCBRDGGRL": true,
+ "SIOCBRDGGSIFS": true,
+ "SIOCBRDGGTO": true,
+ "SIOCBRDGIFS": true,
+ "SIOCBRDGRTS": true,
+ "SIOCBRDGSADDR": true,
+ "SIOCBRDGSCACHE": true,
+ "SIOCBRDGSFD": true,
+ "SIOCBRDGSHT": true,
+ "SIOCBRDGSIFCOST": true,
+ "SIOCBRDGSIFFLGS": true,
+ "SIOCBRDGSIFPRIO": true,
+ "SIOCBRDGSMA": true,
+ "SIOCBRDGSPRI": true,
+ "SIOCBRDGSPROTO": true,
+ "SIOCBRDGSTO": true,
+ "SIOCBRDGSTXHC": true,
+ "SIOCDARP": true,
+ "SIOCDELDLCI": true,
+ "SIOCDELMULTI": true,
+ "SIOCDELRT": true,
+ "SIOCDEVPRIVATE": true,
+ "SIOCDIFADDR": true,
+ "SIOCDIFGROUP": true,
+ "SIOCDIFPHYADDR": true,
+ "SIOCDLIFADDR": true,
+ "SIOCDRARP": true,
+ "SIOCGARP": true,
+ "SIOCGDRVSPEC": true,
+ "SIOCGETKALIVE": true,
+ "SIOCGETLABEL": true,
+ "SIOCGETPFLOW": true,
+ "SIOCGETPFSYNC": true,
+ "SIOCGETSGCNT": true,
+ "SIOCGETVIFCNT": true,
+ "SIOCGETVLAN": true,
+ "SIOCGHIWAT": true,
+ "SIOCGIFADDR": true,
+ "SIOCGIFADDRPREF": true,
+ "SIOCGIFALIAS": true,
+ "SIOCGIFALTMTU": true,
+ "SIOCGIFASYNCMAP": true,
+ "SIOCGIFBOND": true,
+ "SIOCGIFBR": true,
+ "SIOCGIFBRDADDR": true,
+ "SIOCGIFCAP": true,
+ "SIOCGIFCONF": true,
+ "SIOCGIFCOUNT": true,
+ "SIOCGIFDATA": true,
+ "SIOCGIFDESCR": true,
+ "SIOCGIFDEVMTU": true,
+ "SIOCGIFDLT": true,
+ "SIOCGIFDSTADDR": true,
+ "SIOCGIFENCAP": true,
+ "SIOCGIFFIB": true,
+ "SIOCGIFFLAGS": true,
+ "SIOCGIFGATTR": true,
+ "SIOCGIFGENERIC": true,
+ "SIOCGIFGMEMB": true,
+ "SIOCGIFGROUP": true,
+ "SIOCGIFHARDMTU": true,
+ "SIOCGIFHWADDR": true,
+ "SIOCGIFINDEX": true,
+ "SIOCGIFKPI": true,
+ "SIOCGIFMAC": true,
+ "SIOCGIFMAP": true,
+ "SIOCGIFMEDIA": true,
+ "SIOCGIFMEM": true,
+ "SIOCGIFMETRIC": true,
+ "SIOCGIFMTU": true,
+ "SIOCGIFNAME": true,
+ "SIOCGIFNETMASK": true,
+ "SIOCGIFPDSTADDR": true,
+ "SIOCGIFPFLAGS": true,
+ "SIOCGIFPHYS": true,
+ "SIOCGIFPRIORITY": true,
+ "SIOCGIFPSRCADDR": true,
+ "SIOCGIFRDOMAIN": true,
+ "SIOCGIFRTLABEL": true,
+ "SIOCGIFSLAVE": true,
+ "SIOCGIFSTATUS": true,
+ "SIOCGIFTIMESLOT": true,
+ "SIOCGIFTXQLEN": true,
+ "SIOCGIFVLAN": true,
+ "SIOCGIFWAKEFLAGS": true,
+ "SIOCGIFXFLAGS": true,
+ "SIOCGLIFADDR": true,
+ "SIOCGLIFPHYADDR": true,
+ "SIOCGLIFPHYRTABLE": true,
+ "SIOCGLIFPHYTTL": true,
+ "SIOCGLINKSTR": true,
+ "SIOCGLOWAT": true,
+ "SIOCGPGRP": true,
+ "SIOCGPRIVATE_0": true,
+ "SIOCGPRIVATE_1": true,
+ "SIOCGRARP": true,
+ "SIOCGSPPPPARAMS": true,
+ "SIOCGSTAMP": true,
+ "SIOCGSTAMPNS": true,
+ "SIOCGVH": true,
+ "SIOCGVNETID": true,
+ "SIOCIFCREATE": true,
+ "SIOCIFCREATE2": true,
+ "SIOCIFDESTROY": true,
+ "SIOCIFGCLONERS": true,
+ "SIOCINITIFADDR": true,
+ "SIOCPROTOPRIVATE": true,
+ "SIOCRSLVMULTI": true,
+ "SIOCRTMSG": true,
+ "SIOCSARP": true,
+ "SIOCSDRVSPEC": true,
+ "SIOCSETKALIVE": true,
+ "SIOCSETLABEL": true,
+ "SIOCSETPFLOW": true,
+ "SIOCSETPFSYNC": true,
+ "SIOCSETVLAN": true,
+ "SIOCSHIWAT": true,
+ "SIOCSIFADDR": true,
+ "SIOCSIFADDRPREF": true,
+ "SIOCSIFALTMTU": true,
+ "SIOCSIFASYNCMAP": true,
+ "SIOCSIFBOND": true,
+ "SIOCSIFBR": true,
+ "SIOCSIFBRDADDR": true,
+ "SIOCSIFCAP": true,
+ "SIOCSIFDESCR": true,
+ "SIOCSIFDSTADDR": true,
+ "SIOCSIFENCAP": true,
+ "SIOCSIFFIB": true,
+ "SIOCSIFFLAGS": true,
+ "SIOCSIFGATTR": true,
+ "SIOCSIFGENERIC": true,
+ "SIOCSIFHWADDR": true,
+ "SIOCSIFHWBROADCAST": true,
+ "SIOCSIFKPI": true,
+ "SIOCSIFLINK": true,
+ "SIOCSIFLLADDR": true,
+ "SIOCSIFMAC": true,
+ "SIOCSIFMAP": true,
+ "SIOCSIFMEDIA": true,
+ "SIOCSIFMEM": true,
+ "SIOCSIFMETRIC": true,
+ "SIOCSIFMTU": true,
+ "SIOCSIFNAME": true,
+ "SIOCSIFNETMASK": true,
+ "SIOCSIFPFLAGS": true,
+ "SIOCSIFPHYADDR": true,
+ "SIOCSIFPHYS": true,
+ "SIOCSIFPRIORITY": true,
+ "SIOCSIFRDOMAIN": true,
+ "SIOCSIFRTLABEL": true,
+ "SIOCSIFRVNET": true,
+ "SIOCSIFSLAVE": true,
+ "SIOCSIFTIMESLOT": true,
+ "SIOCSIFTXQLEN": true,
+ "SIOCSIFVLAN": true,
+ "SIOCSIFVNET": true,
+ "SIOCSIFXFLAGS": true,
+ "SIOCSLIFPHYADDR": true,
+ "SIOCSLIFPHYRTABLE": true,
+ "SIOCSLIFPHYTTL": true,
+ "SIOCSLINKSTR": true,
+ "SIOCSLOWAT": true,
+ "SIOCSPGRP": true,
+ "SIOCSRARP": true,
+ "SIOCSSPPPPARAMS": true,
+ "SIOCSVH": true,
+ "SIOCSVNETID": true,
+ "SIOCZIFDATA": true,
+ "SIO_GET_EXTENSION_FUNCTION_POINTER": true,
+ "SIO_GET_INTERFACE_LIST": true,
+ "SIO_KEEPALIVE_VALS": true,
+ "SIO_UDP_CONNRESET": true,
+ "SOCK_CLOEXEC": true,
+ "SOCK_DCCP": true,
+ "SOCK_DGRAM": true,
+ "SOCK_FLAGS_MASK": true,
+ "SOCK_MAXADDRLEN": true,
+ "SOCK_NONBLOCK": true,
+ "SOCK_NOSIGPIPE": true,
+ "SOCK_PACKET": true,
+ "SOCK_RAW": true,
+ "SOCK_RDM": true,
+ "SOCK_SEQPACKET": true,
+ "SOCK_STREAM": true,
+ "SOL_AAL": true,
+ "SOL_ATM": true,
+ "SOL_DECNET": true,
+ "SOL_ICMPV6": true,
+ "SOL_IP": true,
+ "SOL_IPV6": true,
+ "SOL_IRDA": true,
+ "SOL_PACKET": true,
+ "SOL_RAW": true,
+ "SOL_SOCKET": true,
+ "SOL_TCP": true,
+ "SOL_X25": true,
+ "SOMAXCONN": true,
+ "SO_ACCEPTCONN": true,
+ "SO_ACCEPTFILTER": true,
+ "SO_ATTACH_FILTER": true,
+ "SO_BINDANY": true,
+ "SO_BINDTODEVICE": true,
+ "SO_BINTIME": true,
+ "SO_BROADCAST": true,
+ "SO_BSDCOMPAT": true,
+ "SO_DEBUG": true,
+ "SO_DETACH_FILTER": true,
+ "SO_DOMAIN": true,
+ "SO_DONTROUTE": true,
+ "SO_DONTTRUNC": true,
+ "SO_ERROR": true,
+ "SO_KEEPALIVE": true,
+ "SO_LABEL": true,
+ "SO_LINGER": true,
+ "SO_LINGER_SEC": true,
+ "SO_LISTENINCQLEN": true,
+ "SO_LISTENQLEN": true,
+ "SO_LISTENQLIMIT": true,
+ "SO_MARK": true,
+ "SO_NETPROC": true,
+ "SO_NKE": true,
+ "SO_NOADDRERR": true,
+ "SO_NOHEADER": true,
+ "SO_NOSIGPIPE": true,
+ "SO_NOTIFYCONFLICT": true,
+ "SO_NO_CHECK": true,
+ "SO_NO_DDP": true,
+ "SO_NO_OFFLOAD": true,
+ "SO_NP_EXTENSIONS": true,
+ "SO_NREAD": true,
+ "SO_NWRITE": true,
+ "SO_OOBINLINE": true,
+ "SO_OVERFLOWED": true,
+ "SO_PASSCRED": true,
+ "SO_PASSSEC": true,
+ "SO_PEERCRED": true,
+ "SO_PEERLABEL": true,
+ "SO_PEERNAME": true,
+ "SO_PEERSEC": true,
+ "SO_PRIORITY": true,
+ "SO_PROTOCOL": true,
+ "SO_PROTOTYPE": true,
+ "SO_RANDOMPORT": true,
+ "SO_RCVBUF": true,
+ "SO_RCVBUFFORCE": true,
+ "SO_RCVLOWAT": true,
+ "SO_RCVTIMEO": true,
+ "SO_RESTRICTIONS": true,
+ "SO_RESTRICT_DENYIN": true,
+ "SO_RESTRICT_DENYOUT": true,
+ "SO_RESTRICT_DENYSET": true,
+ "SO_REUSEADDR": true,
+ "SO_REUSEPORT": true,
+ "SO_REUSESHAREUID": true,
+ "SO_RTABLE": true,
+ "SO_RXQ_OVFL": true,
+ "SO_SECURITY_AUTHENTICATION": true,
+ "SO_SECURITY_ENCRYPTION_NETWORK": true,
+ "SO_SECURITY_ENCRYPTION_TRANSPORT": true,
+ "SO_SETFIB": true,
+ "SO_SNDBUF": true,
+ "SO_SNDBUFFORCE": true,
+ "SO_SNDLOWAT": true,
+ "SO_SNDTIMEO": true,
+ "SO_SPLICE": true,
+ "SO_TIMESTAMP": true,
+ "SO_TIMESTAMPING": true,
+ "SO_TIMESTAMPNS": true,
+ "SO_TIMESTAMP_MONOTONIC": true,
+ "SO_TYPE": true,
+ "SO_UPCALLCLOSEWAIT": true,
+ "SO_UPDATE_ACCEPT_CONTEXT": true,
+ "SO_UPDATE_CONNECT_CONTEXT": true,
+ "SO_USELOOPBACK": true,
+ "SO_USER_COOKIE": true,
+ "SO_VENDOR": true,
+ "SO_WANTMORE": true,
+ "SO_WANTOOBFLAG": true,
+ "SSLExtraCertChainPolicyPara": true,
+ "STANDARD_RIGHTS_ALL": true,
+ "STANDARD_RIGHTS_EXECUTE": true,
+ "STANDARD_RIGHTS_READ": true,
+ "STANDARD_RIGHTS_REQUIRED": true,
+ "STANDARD_RIGHTS_WRITE": true,
+ "STARTF_USESHOWWINDOW": true,
+ "STARTF_USESTDHANDLES": true,
+ "STD_ERROR_HANDLE": true,
+ "STD_INPUT_HANDLE": true,
+ "STD_OUTPUT_HANDLE": true,
+ "SUBLANG_ENGLISH_US": true,
+ "SW_FORCEMINIMIZE": true,
+ "SW_HIDE": true,
+ "SW_MAXIMIZE": true,
+ "SW_MINIMIZE": true,
+ "SW_NORMAL": true,
+ "SW_RESTORE": true,
+ "SW_SHOW": true,
+ "SW_SHOWDEFAULT": true,
+ "SW_SHOWMAXIMIZED": true,
+ "SW_SHOWMINIMIZED": true,
+ "SW_SHOWMINNOACTIVE": true,
+ "SW_SHOWNA": true,
+ "SW_SHOWNOACTIVATE": true,
+ "SW_SHOWNORMAL": true,
+ "SYMBOLIC_LINK_FLAG_DIRECTORY": true,
+ "SYNCHRONIZE": true,
+ "SYSCTL_VERSION": true,
+ "SYSCTL_VERS_0": true,
+ "SYSCTL_VERS_1": true,
+ "SYSCTL_VERS_MASK": true,
+ "SYS_ABORT2": true,
+ "SYS_ACCEPT": true,
+ "SYS_ACCEPT4": true,
+ "SYS_ACCEPT_NOCANCEL": true,
+ "SYS_ACCESS": true,
+ "SYS_ACCESS_EXTENDED": true,
+ "SYS_ACCT": true,
+ "SYS_ADD_KEY": true,
+ "SYS_ADD_PROFIL": true,
+ "SYS_ADJFREQ": true,
+ "SYS_ADJTIME": true,
+ "SYS_ADJTIMEX": true,
+ "SYS_AFS_SYSCALL": true,
+ "SYS_AIO_CANCEL": true,
+ "SYS_AIO_ERROR": true,
+ "SYS_AIO_FSYNC": true,
+ "SYS_AIO_READ": true,
+ "SYS_AIO_RETURN": true,
+ "SYS_AIO_SUSPEND": true,
+ "SYS_AIO_SUSPEND_NOCANCEL": true,
+ "SYS_AIO_WRITE": true,
+ "SYS_ALARM": true,
+ "SYS_ARCH_PRCTL": true,
+ "SYS_ARM_FADVISE64_64": true,
+ "SYS_ARM_SYNC_FILE_RANGE": true,
+ "SYS_ATGETMSG": true,
+ "SYS_ATPGETREQ": true,
+ "SYS_ATPGETRSP": true,
+ "SYS_ATPSNDREQ": true,
+ "SYS_ATPSNDRSP": true,
+ "SYS_ATPUTMSG": true,
+ "SYS_ATSOCKET": true,
+ "SYS_AUDIT": true,
+ "SYS_AUDITCTL": true,
+ "SYS_AUDITON": true,
+ "SYS_AUDIT_SESSION_JOIN": true,
+ "SYS_AUDIT_SESSION_PORT": true,
+ "SYS_AUDIT_SESSION_SELF": true,
+ "SYS_BDFLUSH": true,
+ "SYS_BIND": true,
+ "SYS_BINDAT": true,
+ "SYS_BREAK": true,
+ "SYS_BRK": true,
+ "SYS_BSDTHREAD_CREATE": true,
+ "SYS_BSDTHREAD_REGISTER": true,
+ "SYS_BSDTHREAD_TERMINATE": true,
+ "SYS_CAPGET": true,
+ "SYS_CAPSET": true,
+ "SYS_CAP_ENTER": true,
+ "SYS_CAP_FCNTLS_GET": true,
+ "SYS_CAP_FCNTLS_LIMIT": true,
+ "SYS_CAP_GETMODE": true,
+ "SYS_CAP_GETRIGHTS": true,
+ "SYS_CAP_IOCTLS_GET": true,
+ "SYS_CAP_IOCTLS_LIMIT": true,
+ "SYS_CAP_NEW": true,
+ "SYS_CAP_RIGHTS_GET": true,
+ "SYS_CAP_RIGHTS_LIMIT": true,
+ "SYS_CHDIR": true,
+ "SYS_CHFLAGS": true,
+ "SYS_CHFLAGSAT": true,
+ "SYS_CHMOD": true,
+ "SYS_CHMOD_EXTENDED": true,
+ "SYS_CHOWN": true,
+ "SYS_CHOWN32": true,
+ "SYS_CHROOT": true,
+ "SYS_CHUD": true,
+ "SYS_CLOCK_ADJTIME": true,
+ "SYS_CLOCK_GETCPUCLOCKID2": true,
+ "SYS_CLOCK_GETRES": true,
+ "SYS_CLOCK_GETTIME": true,
+ "SYS_CLOCK_NANOSLEEP": true,
+ "SYS_CLOCK_SETTIME": true,
+ "SYS_CLONE": true,
+ "SYS_CLOSE": true,
+ "SYS_CLOSEFROM": true,
+ "SYS_CLOSE_NOCANCEL": true,
+ "SYS_CONNECT": true,
+ "SYS_CONNECTAT": true,
+ "SYS_CONNECT_NOCANCEL": true,
+ "SYS_COPYFILE": true,
+ "SYS_CPUSET": true,
+ "SYS_CPUSET_GETAFFINITY": true,
+ "SYS_CPUSET_GETID": true,
+ "SYS_CPUSET_SETAFFINITY": true,
+ "SYS_CPUSET_SETID": true,
+ "SYS_CREAT": true,
+ "SYS_CREATE_MODULE": true,
+ "SYS_CSOPS": true,
+ "SYS_DELETE": true,
+ "SYS_DELETE_MODULE": true,
+ "SYS_DUP": true,
+ "SYS_DUP2": true,
+ "SYS_DUP3": true,
+ "SYS_EACCESS": true,
+ "SYS_EPOLL_CREATE": true,
+ "SYS_EPOLL_CREATE1": true,
+ "SYS_EPOLL_CTL": true,
+ "SYS_EPOLL_CTL_OLD": true,
+ "SYS_EPOLL_PWAIT": true,
+ "SYS_EPOLL_WAIT": true,
+ "SYS_EPOLL_WAIT_OLD": true,
+ "SYS_EVENTFD": true,
+ "SYS_EVENTFD2": true,
+ "SYS_EXCHANGEDATA": true,
+ "SYS_EXECVE": true,
+ "SYS_EXIT": true,
+ "SYS_EXIT_GROUP": true,
+ "SYS_EXTATTRCTL": true,
+ "SYS_EXTATTR_DELETE_FD": true,
+ "SYS_EXTATTR_DELETE_FILE": true,
+ "SYS_EXTATTR_DELETE_LINK": true,
+ "SYS_EXTATTR_GET_FD": true,
+ "SYS_EXTATTR_GET_FILE": true,
+ "SYS_EXTATTR_GET_LINK": true,
+ "SYS_EXTATTR_LIST_FD": true,
+ "SYS_EXTATTR_LIST_FILE": true,
+ "SYS_EXTATTR_LIST_LINK": true,
+ "SYS_EXTATTR_SET_FD": true,
+ "SYS_EXTATTR_SET_FILE": true,
+ "SYS_EXTATTR_SET_LINK": true,
+ "SYS_FACCESSAT": true,
+ "SYS_FADVISE64": true,
+ "SYS_FADVISE64_64": true,
+ "SYS_FALLOCATE": true,
+ "SYS_FANOTIFY_INIT": true,
+ "SYS_FANOTIFY_MARK": true,
+ "SYS_FCHDIR": true,
+ "SYS_FCHFLAGS": true,
+ "SYS_FCHMOD": true,
+ "SYS_FCHMODAT": true,
+ "SYS_FCHMOD_EXTENDED": true,
+ "SYS_FCHOWN": true,
+ "SYS_FCHOWN32": true,
+ "SYS_FCHOWNAT": true,
+ "SYS_FCHROOT": true,
+ "SYS_FCNTL": true,
+ "SYS_FCNTL64": true,
+ "SYS_FCNTL_NOCANCEL": true,
+ "SYS_FDATASYNC": true,
+ "SYS_FEXECVE": true,
+ "SYS_FFCLOCK_GETCOUNTER": true,
+ "SYS_FFCLOCK_GETESTIMATE": true,
+ "SYS_FFCLOCK_SETESTIMATE": true,
+ "SYS_FFSCTL": true,
+ "SYS_FGETATTRLIST": true,
+ "SYS_FGETXATTR": true,
+ "SYS_FHOPEN": true,
+ "SYS_FHSTAT": true,
+ "SYS_FHSTATFS": true,
+ "SYS_FILEPORT_MAKEFD": true,
+ "SYS_FILEPORT_MAKEPORT": true,
+ "SYS_FKTRACE": true,
+ "SYS_FLISTXATTR": true,
+ "SYS_FLOCK": true,
+ "SYS_FORK": true,
+ "SYS_FPATHCONF": true,
+ "SYS_FREEBSD6_FTRUNCATE": true,
+ "SYS_FREEBSD6_LSEEK": true,
+ "SYS_FREEBSD6_MMAP": true,
+ "SYS_FREEBSD6_PREAD": true,
+ "SYS_FREEBSD6_PWRITE": true,
+ "SYS_FREEBSD6_TRUNCATE": true,
+ "SYS_FREMOVEXATTR": true,
+ "SYS_FSCTL": true,
+ "SYS_FSETATTRLIST": true,
+ "SYS_FSETXATTR": true,
+ "SYS_FSGETPATH": true,
+ "SYS_FSTAT": true,
+ "SYS_FSTAT64": true,
+ "SYS_FSTAT64_EXTENDED": true,
+ "SYS_FSTATAT": true,
+ "SYS_FSTATAT64": true,
+ "SYS_FSTATFS": true,
+ "SYS_FSTATFS64": true,
+ "SYS_FSTATV": true,
+ "SYS_FSTATVFS1": true,
+ "SYS_FSTAT_EXTENDED": true,
+ "SYS_FSYNC": true,
+ "SYS_FSYNC_NOCANCEL": true,
+ "SYS_FSYNC_RANGE": true,
+ "SYS_FTIME": true,
+ "SYS_FTRUNCATE": true,
+ "SYS_FTRUNCATE64": true,
+ "SYS_FUTEX": true,
+ "SYS_FUTIMENS": true,
+ "SYS_FUTIMES": true,
+ "SYS_FUTIMESAT": true,
+ "SYS_GETATTRLIST": true,
+ "SYS_GETAUDIT": true,
+ "SYS_GETAUDIT_ADDR": true,
+ "SYS_GETAUID": true,
+ "SYS_GETCONTEXT": true,
+ "SYS_GETCPU": true,
+ "SYS_GETCWD": true,
+ "SYS_GETDENTS": true,
+ "SYS_GETDENTS64": true,
+ "SYS_GETDIRENTRIES": true,
+ "SYS_GETDIRENTRIES64": true,
+ "SYS_GETDIRENTRIESATTR": true,
+ "SYS_GETDTABLECOUNT": true,
+ "SYS_GETDTABLESIZE": true,
+ "SYS_GETEGID": true,
+ "SYS_GETEGID32": true,
+ "SYS_GETEUID": true,
+ "SYS_GETEUID32": true,
+ "SYS_GETFH": true,
+ "SYS_GETFSSTAT": true,
+ "SYS_GETFSSTAT64": true,
+ "SYS_GETGID": true,
+ "SYS_GETGID32": true,
+ "SYS_GETGROUPS": true,
+ "SYS_GETGROUPS32": true,
+ "SYS_GETHOSTUUID": true,
+ "SYS_GETITIMER": true,
+ "SYS_GETLCID": true,
+ "SYS_GETLOGIN": true,
+ "SYS_GETLOGINCLASS": true,
+ "SYS_GETPEERNAME": true,
+ "SYS_GETPGID": true,
+ "SYS_GETPGRP": true,
+ "SYS_GETPID": true,
+ "SYS_GETPMSG": true,
+ "SYS_GETPPID": true,
+ "SYS_GETPRIORITY": true,
+ "SYS_GETRESGID": true,
+ "SYS_GETRESGID32": true,
+ "SYS_GETRESUID": true,
+ "SYS_GETRESUID32": true,
+ "SYS_GETRLIMIT": true,
+ "SYS_GETRTABLE": true,
+ "SYS_GETRUSAGE": true,
+ "SYS_GETSGROUPS": true,
+ "SYS_GETSID": true,
+ "SYS_GETSOCKNAME": true,
+ "SYS_GETSOCKOPT": true,
+ "SYS_GETTHRID": true,
+ "SYS_GETTID": true,
+ "SYS_GETTIMEOFDAY": true,
+ "SYS_GETUID": true,
+ "SYS_GETUID32": true,
+ "SYS_GETVFSSTAT": true,
+ "SYS_GETWGROUPS": true,
+ "SYS_GETXATTR": true,
+ "SYS_GET_KERNEL_SYMS": true,
+ "SYS_GET_MEMPOLICY": true,
+ "SYS_GET_ROBUST_LIST": true,
+ "SYS_GET_THREAD_AREA": true,
+ "SYS_GTTY": true,
+ "SYS_IDENTITYSVC": true,
+ "SYS_IDLE": true,
+ "SYS_INITGROUPS": true,
+ "SYS_INIT_MODULE": true,
+ "SYS_INOTIFY_ADD_WATCH": true,
+ "SYS_INOTIFY_INIT": true,
+ "SYS_INOTIFY_INIT1": true,
+ "SYS_INOTIFY_RM_WATCH": true,
+ "SYS_IOCTL": true,
+ "SYS_IOPERM": true,
+ "SYS_IOPL": true,
+ "SYS_IOPOLICYSYS": true,
+ "SYS_IOPRIO_GET": true,
+ "SYS_IOPRIO_SET": true,
+ "SYS_IO_CANCEL": true,
+ "SYS_IO_DESTROY": true,
+ "SYS_IO_GETEVENTS": true,
+ "SYS_IO_SETUP": true,
+ "SYS_IO_SUBMIT": true,
+ "SYS_IPC": true,
+ "SYS_ISSETUGID": true,
+ "SYS_JAIL": true,
+ "SYS_JAIL_ATTACH": true,
+ "SYS_JAIL_GET": true,
+ "SYS_JAIL_REMOVE": true,
+ "SYS_JAIL_SET": true,
+ "SYS_KDEBUG_TRACE": true,
+ "SYS_KENV": true,
+ "SYS_KEVENT": true,
+ "SYS_KEVENT64": true,
+ "SYS_KEXEC_LOAD": true,
+ "SYS_KEYCTL": true,
+ "SYS_KILL": true,
+ "SYS_KLDFIND": true,
+ "SYS_KLDFIRSTMOD": true,
+ "SYS_KLDLOAD": true,
+ "SYS_KLDNEXT": true,
+ "SYS_KLDSTAT": true,
+ "SYS_KLDSYM": true,
+ "SYS_KLDUNLOAD": true,
+ "SYS_KLDUNLOADF": true,
+ "SYS_KQUEUE": true,
+ "SYS_KQUEUE1": true,
+ "SYS_KTIMER_CREATE": true,
+ "SYS_KTIMER_DELETE": true,
+ "SYS_KTIMER_GETOVERRUN": true,
+ "SYS_KTIMER_GETTIME": true,
+ "SYS_KTIMER_SETTIME": true,
+ "SYS_KTRACE": true,
+ "SYS_LCHFLAGS": true,
+ "SYS_LCHMOD": true,
+ "SYS_LCHOWN": true,
+ "SYS_LCHOWN32": true,
+ "SYS_LGETFH": true,
+ "SYS_LGETXATTR": true,
+ "SYS_LINK": true,
+ "SYS_LINKAT": true,
+ "SYS_LIO_LISTIO": true,
+ "SYS_LISTEN": true,
+ "SYS_LISTXATTR": true,
+ "SYS_LLISTXATTR": true,
+ "SYS_LOCK": true,
+ "SYS_LOOKUP_DCOOKIE": true,
+ "SYS_LPATHCONF": true,
+ "SYS_LREMOVEXATTR": true,
+ "SYS_LSEEK": true,
+ "SYS_LSETXATTR": true,
+ "SYS_LSTAT": true,
+ "SYS_LSTAT64": true,
+ "SYS_LSTAT64_EXTENDED": true,
+ "SYS_LSTATV": true,
+ "SYS_LSTAT_EXTENDED": true,
+ "SYS_LUTIMES": true,
+ "SYS_MAC_SYSCALL": true,
+ "SYS_MADVISE": true,
+ "SYS_MADVISE1": true,
+ "SYS_MAXSYSCALL": true,
+ "SYS_MBIND": true,
+ "SYS_MIGRATE_PAGES": true,
+ "SYS_MINCORE": true,
+ "SYS_MINHERIT": true,
+ "SYS_MKCOMPLEX": true,
+ "SYS_MKDIR": true,
+ "SYS_MKDIRAT": true,
+ "SYS_MKDIR_EXTENDED": true,
+ "SYS_MKFIFO": true,
+ "SYS_MKFIFOAT": true,
+ "SYS_MKFIFO_EXTENDED": true,
+ "SYS_MKNOD": true,
+ "SYS_MKNODAT": true,
+ "SYS_MLOCK": true,
+ "SYS_MLOCKALL": true,
+ "SYS_MMAP": true,
+ "SYS_MMAP2": true,
+ "SYS_MODCTL": true,
+ "SYS_MODFIND": true,
+ "SYS_MODFNEXT": true,
+ "SYS_MODIFY_LDT": true,
+ "SYS_MODNEXT": true,
+ "SYS_MODSTAT": true,
+ "SYS_MODWATCH": true,
+ "SYS_MOUNT": true,
+ "SYS_MOVE_PAGES": true,
+ "SYS_MPROTECT": true,
+ "SYS_MPX": true,
+ "SYS_MQUERY": true,
+ "SYS_MQ_GETSETATTR": true,
+ "SYS_MQ_NOTIFY": true,
+ "SYS_MQ_OPEN": true,
+ "SYS_MQ_TIMEDRECEIVE": true,
+ "SYS_MQ_TIMEDSEND": true,
+ "SYS_MQ_UNLINK": true,
+ "SYS_MREMAP": true,
+ "SYS_MSGCTL": true,
+ "SYS_MSGGET": true,
+ "SYS_MSGRCV": true,
+ "SYS_MSGRCV_NOCANCEL": true,
+ "SYS_MSGSND": true,
+ "SYS_MSGSND_NOCANCEL": true,
+ "SYS_MSGSYS": true,
+ "SYS_MSYNC": true,
+ "SYS_MSYNC_NOCANCEL": true,
+ "SYS_MUNLOCK": true,
+ "SYS_MUNLOCKALL": true,
+ "SYS_MUNMAP": true,
+ "SYS_NAME_TO_HANDLE_AT": true,
+ "SYS_NANOSLEEP": true,
+ "SYS_NEWFSTATAT": true,
+ "SYS_NFSCLNT": true,
+ "SYS_NFSSERVCTL": true,
+ "SYS_NFSSVC": true,
+ "SYS_NFSTAT": true,
+ "SYS_NICE": true,
+ "SYS_NLSTAT": true,
+ "SYS_NMOUNT": true,
+ "SYS_NSTAT": true,
+ "SYS_NTP_ADJTIME": true,
+ "SYS_NTP_GETTIME": true,
+ "SYS_OABI_SYSCALL_BASE": true,
+ "SYS_OBREAK": true,
+ "SYS_OLDFSTAT": true,
+ "SYS_OLDLSTAT": true,
+ "SYS_OLDOLDUNAME": true,
+ "SYS_OLDSTAT": true,
+ "SYS_OLDUNAME": true,
+ "SYS_OPEN": true,
+ "SYS_OPENAT": true,
+ "SYS_OPENBSD_POLL": true,
+ "SYS_OPEN_BY_HANDLE_AT": true,
+ "SYS_OPEN_EXTENDED": true,
+ "SYS_OPEN_NOCANCEL": true,
+ "SYS_OVADVISE": true,
+ "SYS_PACCEPT": true,
+ "SYS_PATHCONF": true,
+ "SYS_PAUSE": true,
+ "SYS_PCICONFIG_IOBASE": true,
+ "SYS_PCICONFIG_READ": true,
+ "SYS_PCICONFIG_WRITE": true,
+ "SYS_PDFORK": true,
+ "SYS_PDGETPID": true,
+ "SYS_PDKILL": true,
+ "SYS_PERF_EVENT_OPEN": true,
+ "SYS_PERSONALITY": true,
+ "SYS_PID_HIBERNATE": true,
+ "SYS_PID_RESUME": true,
+ "SYS_PID_SHUTDOWN_SOCKETS": true,
+ "SYS_PID_SUSPEND": true,
+ "SYS_PIPE": true,
+ "SYS_PIPE2": true,
+ "SYS_PIVOT_ROOT": true,
+ "SYS_PMC_CONTROL": true,
+ "SYS_PMC_GET_INFO": true,
+ "SYS_POLL": true,
+ "SYS_POLLTS": true,
+ "SYS_POLL_NOCANCEL": true,
+ "SYS_POSIX_FADVISE": true,
+ "SYS_POSIX_FALLOCATE": true,
+ "SYS_POSIX_OPENPT": true,
+ "SYS_POSIX_SPAWN": true,
+ "SYS_PPOLL": true,
+ "SYS_PRCTL": true,
+ "SYS_PREAD": true,
+ "SYS_PREAD64": true,
+ "SYS_PREADV": true,
+ "SYS_PREAD_NOCANCEL": true,
+ "SYS_PRLIMIT64": true,
+ "SYS_PROCCTL": true,
+ "SYS_PROCESS_POLICY": true,
+ "SYS_PROCESS_VM_READV": true,
+ "SYS_PROCESS_VM_WRITEV": true,
+ "SYS_PROC_INFO": true,
+ "SYS_PROF": true,
+ "SYS_PROFIL": true,
+ "SYS_PSELECT": true,
+ "SYS_PSELECT6": true,
+ "SYS_PSET_ASSIGN": true,
+ "SYS_PSET_CREATE": true,
+ "SYS_PSET_DESTROY": true,
+ "SYS_PSYNCH_CVBROAD": true,
+ "SYS_PSYNCH_CVCLRPREPOST": true,
+ "SYS_PSYNCH_CVSIGNAL": true,
+ "SYS_PSYNCH_CVWAIT": true,
+ "SYS_PSYNCH_MUTEXDROP": true,
+ "SYS_PSYNCH_MUTEXWAIT": true,
+ "SYS_PSYNCH_RW_DOWNGRADE": true,
+ "SYS_PSYNCH_RW_LONGRDLOCK": true,
+ "SYS_PSYNCH_RW_RDLOCK": true,
+ "SYS_PSYNCH_RW_UNLOCK": true,
+ "SYS_PSYNCH_RW_UNLOCK2": true,
+ "SYS_PSYNCH_RW_UPGRADE": true,
+ "SYS_PSYNCH_RW_WRLOCK": true,
+ "SYS_PSYNCH_RW_YIELDWRLOCK": true,
+ "SYS_PTRACE": true,
+ "SYS_PUTPMSG": true,
+ "SYS_PWRITE": true,
+ "SYS_PWRITE64": true,
+ "SYS_PWRITEV": true,
+ "SYS_PWRITE_NOCANCEL": true,
+ "SYS_QUERY_MODULE": true,
+ "SYS_QUOTACTL": true,
+ "SYS_RASCTL": true,
+ "SYS_RCTL_ADD_RULE": true,
+ "SYS_RCTL_GET_LIMITS": true,
+ "SYS_RCTL_GET_RACCT": true,
+ "SYS_RCTL_GET_RULES": true,
+ "SYS_RCTL_REMOVE_RULE": true,
+ "SYS_READ": true,
+ "SYS_READAHEAD": true,
+ "SYS_READDIR": true,
+ "SYS_READLINK": true,
+ "SYS_READLINKAT": true,
+ "SYS_READV": true,
+ "SYS_READV_NOCANCEL": true,
+ "SYS_READ_NOCANCEL": true,
+ "SYS_REBOOT": true,
+ "SYS_RECV": true,
+ "SYS_RECVFROM": true,
+ "SYS_RECVFROM_NOCANCEL": true,
+ "SYS_RECVMMSG": true,
+ "SYS_RECVMSG": true,
+ "SYS_RECVMSG_NOCANCEL": true,
+ "SYS_REMAP_FILE_PAGES": true,
+ "SYS_REMOVEXATTR": true,
+ "SYS_RENAME": true,
+ "SYS_RENAMEAT": true,
+ "SYS_REQUEST_KEY": true,
+ "SYS_RESTART_SYSCALL": true,
+ "SYS_REVOKE": true,
+ "SYS_RFORK": true,
+ "SYS_RMDIR": true,
+ "SYS_RTPRIO": true,
+ "SYS_RTPRIO_THREAD": true,
+ "SYS_RT_SIGACTION": true,
+ "SYS_RT_SIGPENDING": true,
+ "SYS_RT_SIGPROCMASK": true,
+ "SYS_RT_SIGQUEUEINFO": true,
+ "SYS_RT_SIGRETURN": true,
+ "SYS_RT_SIGSUSPEND": true,
+ "SYS_RT_SIGTIMEDWAIT": true,
+ "SYS_RT_TGSIGQUEUEINFO": true,
+ "SYS_SBRK": true,
+ "SYS_SCHED_GETAFFINITY": true,
+ "SYS_SCHED_GETPARAM": true,
+ "SYS_SCHED_GETSCHEDULER": true,
+ "SYS_SCHED_GET_PRIORITY_MAX": true,
+ "SYS_SCHED_GET_PRIORITY_MIN": true,
+ "SYS_SCHED_RR_GET_INTERVAL": true,
+ "SYS_SCHED_SETAFFINITY": true,
+ "SYS_SCHED_SETPARAM": true,
+ "SYS_SCHED_SETSCHEDULER": true,
+ "SYS_SCHED_YIELD": true,
+ "SYS_SCTP_GENERIC_RECVMSG": true,
+ "SYS_SCTP_GENERIC_SENDMSG": true,
+ "SYS_SCTP_GENERIC_SENDMSG_IOV": true,
+ "SYS_SCTP_PEELOFF": true,
+ "SYS_SEARCHFS": true,
+ "SYS_SECURITY": true,
+ "SYS_SELECT": true,
+ "SYS_SELECT_NOCANCEL": true,
+ "SYS_SEMCONFIG": true,
+ "SYS_SEMCTL": true,
+ "SYS_SEMGET": true,
+ "SYS_SEMOP": true,
+ "SYS_SEMSYS": true,
+ "SYS_SEMTIMEDOP": true,
+ "SYS_SEM_CLOSE": true,
+ "SYS_SEM_DESTROY": true,
+ "SYS_SEM_GETVALUE": true,
+ "SYS_SEM_INIT": true,
+ "SYS_SEM_OPEN": true,
+ "SYS_SEM_POST": true,
+ "SYS_SEM_TRYWAIT": true,
+ "SYS_SEM_UNLINK": true,
+ "SYS_SEM_WAIT": true,
+ "SYS_SEM_WAIT_NOCANCEL": true,
+ "SYS_SEND": true,
+ "SYS_SENDFILE": true,
+ "SYS_SENDFILE64": true,
+ "SYS_SENDMMSG": true,
+ "SYS_SENDMSG": true,
+ "SYS_SENDMSG_NOCANCEL": true,
+ "SYS_SENDTO": true,
+ "SYS_SENDTO_NOCANCEL": true,
+ "SYS_SETATTRLIST": true,
+ "SYS_SETAUDIT": true,
+ "SYS_SETAUDIT_ADDR": true,
+ "SYS_SETAUID": true,
+ "SYS_SETCONTEXT": true,
+ "SYS_SETDOMAINNAME": true,
+ "SYS_SETEGID": true,
+ "SYS_SETEUID": true,
+ "SYS_SETFIB": true,
+ "SYS_SETFSGID": true,
+ "SYS_SETFSGID32": true,
+ "SYS_SETFSUID": true,
+ "SYS_SETFSUID32": true,
+ "SYS_SETGID": true,
+ "SYS_SETGID32": true,
+ "SYS_SETGROUPS": true,
+ "SYS_SETGROUPS32": true,
+ "SYS_SETHOSTNAME": true,
+ "SYS_SETITIMER": true,
+ "SYS_SETLCID": true,
+ "SYS_SETLOGIN": true,
+ "SYS_SETLOGINCLASS": true,
+ "SYS_SETNS": true,
+ "SYS_SETPGID": true,
+ "SYS_SETPRIORITY": true,
+ "SYS_SETPRIVEXEC": true,
+ "SYS_SETREGID": true,
+ "SYS_SETREGID32": true,
+ "SYS_SETRESGID": true,
+ "SYS_SETRESGID32": true,
+ "SYS_SETRESUID": true,
+ "SYS_SETRESUID32": true,
+ "SYS_SETREUID": true,
+ "SYS_SETREUID32": true,
+ "SYS_SETRLIMIT": true,
+ "SYS_SETRTABLE": true,
+ "SYS_SETSGROUPS": true,
+ "SYS_SETSID": true,
+ "SYS_SETSOCKOPT": true,
+ "SYS_SETTID": true,
+ "SYS_SETTID_WITH_PID": true,
+ "SYS_SETTIMEOFDAY": true,
+ "SYS_SETUID": true,
+ "SYS_SETUID32": true,
+ "SYS_SETWGROUPS": true,
+ "SYS_SETXATTR": true,
+ "SYS_SET_MEMPOLICY": true,
+ "SYS_SET_ROBUST_LIST": true,
+ "SYS_SET_THREAD_AREA": true,
+ "SYS_SET_TID_ADDRESS": true,
+ "SYS_SGETMASK": true,
+ "SYS_SHARED_REGION_CHECK_NP": true,
+ "SYS_SHARED_REGION_MAP_AND_SLIDE_NP": true,
+ "SYS_SHMAT": true,
+ "SYS_SHMCTL": true,
+ "SYS_SHMDT": true,
+ "SYS_SHMGET": true,
+ "SYS_SHMSYS": true,
+ "SYS_SHM_OPEN": true,
+ "SYS_SHM_UNLINK": true,
+ "SYS_SHUTDOWN": true,
+ "SYS_SIGACTION": true,
+ "SYS_SIGALTSTACK": true,
+ "SYS_SIGNAL": true,
+ "SYS_SIGNALFD": true,
+ "SYS_SIGNALFD4": true,
+ "SYS_SIGPENDING": true,
+ "SYS_SIGPROCMASK": true,
+ "SYS_SIGQUEUE": true,
+ "SYS_SIGQUEUEINFO": true,
+ "SYS_SIGRETURN": true,
+ "SYS_SIGSUSPEND": true,
+ "SYS_SIGSUSPEND_NOCANCEL": true,
+ "SYS_SIGTIMEDWAIT": true,
+ "SYS_SIGWAIT": true,
+ "SYS_SIGWAITINFO": true,
+ "SYS_SOCKET": true,
+ "SYS_SOCKETCALL": true,
+ "SYS_SOCKETPAIR": true,
+ "SYS_SPLICE": true,
+ "SYS_SSETMASK": true,
+ "SYS_SSTK": true,
+ "SYS_STACK_SNAPSHOT": true,
+ "SYS_STAT": true,
+ "SYS_STAT64": true,
+ "SYS_STAT64_EXTENDED": true,
+ "SYS_STATFS": true,
+ "SYS_STATFS64": true,
+ "SYS_STATV": true,
+ "SYS_STATVFS1": true,
+ "SYS_STAT_EXTENDED": true,
+ "SYS_STIME": true,
+ "SYS_STTY": true,
+ "SYS_SWAPCONTEXT": true,
+ "SYS_SWAPCTL": true,
+ "SYS_SWAPOFF": true,
+ "SYS_SWAPON": true,
+ "SYS_SYMLINK": true,
+ "SYS_SYMLINKAT": true,
+ "SYS_SYNC": true,
+ "SYS_SYNCFS": true,
+ "SYS_SYNC_FILE_RANGE": true,
+ "SYS_SYSARCH": true,
+ "SYS_SYSCALL": true,
+ "SYS_SYSCALL_BASE": true,
+ "SYS_SYSFS": true,
+ "SYS_SYSINFO": true,
+ "SYS_SYSLOG": true,
+ "SYS_TEE": true,
+ "SYS_TGKILL": true,
+ "SYS_THREAD_SELFID": true,
+ "SYS_THR_CREATE": true,
+ "SYS_THR_EXIT": true,
+ "SYS_THR_KILL": true,
+ "SYS_THR_KILL2": true,
+ "SYS_THR_NEW": true,
+ "SYS_THR_SELF": true,
+ "SYS_THR_SET_NAME": true,
+ "SYS_THR_SUSPEND": true,
+ "SYS_THR_WAKE": true,
+ "SYS_TIME": true,
+ "SYS_TIMERFD_CREATE": true,
+ "SYS_TIMERFD_GETTIME": true,
+ "SYS_TIMERFD_SETTIME": true,
+ "SYS_TIMER_CREATE": true,
+ "SYS_TIMER_DELETE": true,
+ "SYS_TIMER_GETOVERRUN": true,
+ "SYS_TIMER_GETTIME": true,
+ "SYS_TIMER_SETTIME": true,
+ "SYS_TIMES": true,
+ "SYS_TKILL": true,
+ "SYS_TRUNCATE": true,
+ "SYS_TRUNCATE64": true,
+ "SYS_TUXCALL": true,
+ "SYS_UGETRLIMIT": true,
+ "SYS_ULIMIT": true,
+ "SYS_UMASK": true,
+ "SYS_UMASK_EXTENDED": true,
+ "SYS_UMOUNT": true,
+ "SYS_UMOUNT2": true,
+ "SYS_UNAME": true,
+ "SYS_UNDELETE": true,
+ "SYS_UNLINK": true,
+ "SYS_UNLINKAT": true,
+ "SYS_UNMOUNT": true,
+ "SYS_UNSHARE": true,
+ "SYS_USELIB": true,
+ "SYS_USTAT": true,
+ "SYS_UTIME": true,
+ "SYS_UTIMENSAT": true,
+ "SYS_UTIMES": true,
+ "SYS_UTRACE": true,
+ "SYS_UUIDGEN": true,
+ "SYS_VADVISE": true,
+ "SYS_VFORK": true,
+ "SYS_VHANGUP": true,
+ "SYS_VM86": true,
+ "SYS_VM86OLD": true,
+ "SYS_VMSPLICE": true,
+ "SYS_VM_PRESSURE_MONITOR": true,
+ "SYS_VSERVER": true,
+ "SYS_WAIT4": true,
+ "SYS_WAIT4_NOCANCEL": true,
+ "SYS_WAIT6": true,
+ "SYS_WAITEVENT": true,
+ "SYS_WAITID": true,
+ "SYS_WAITID_NOCANCEL": true,
+ "SYS_WAITPID": true,
+ "SYS_WATCHEVENT": true,
+ "SYS_WORKQ_KERNRETURN": true,
+ "SYS_WORKQ_OPEN": true,
+ "SYS_WRITE": true,
+ "SYS_WRITEV": true,
+ "SYS_WRITEV_NOCANCEL": true,
+ "SYS_WRITE_NOCANCEL": true,
+ "SYS_YIELD": true,
+ "SYS__LLSEEK": true,
+ "SYS__LWP_CONTINUE": true,
+ "SYS__LWP_CREATE": true,
+ "SYS__LWP_CTL": true,
+ "SYS__LWP_DETACH": true,
+ "SYS__LWP_EXIT": true,
+ "SYS__LWP_GETNAME": true,
+ "SYS__LWP_GETPRIVATE": true,
+ "SYS__LWP_KILL": true,
+ "SYS__LWP_PARK": true,
+ "SYS__LWP_SELF": true,
+ "SYS__LWP_SETNAME": true,
+ "SYS__LWP_SETPRIVATE": true,
+ "SYS__LWP_SUSPEND": true,
+ "SYS__LWP_UNPARK": true,
+ "SYS__LWP_UNPARK_ALL": true,
+ "SYS__LWP_WAIT": true,
+ "SYS__LWP_WAKEUP": true,
+ "SYS__NEWSELECT": true,
+ "SYS__PSET_BIND": true,
+ "SYS__SCHED_GETAFFINITY": true,
+ "SYS__SCHED_GETPARAM": true,
+ "SYS__SCHED_SETAFFINITY": true,
+ "SYS__SCHED_SETPARAM": true,
+ "SYS__SYSCTL": true,
+ "SYS__UMTX_LOCK": true,
+ "SYS__UMTX_OP": true,
+ "SYS__UMTX_UNLOCK": true,
+ "SYS___ACL_ACLCHECK_FD": true,
+ "SYS___ACL_ACLCHECK_FILE": true,
+ "SYS___ACL_ACLCHECK_LINK": true,
+ "SYS___ACL_DELETE_FD": true,
+ "SYS___ACL_DELETE_FILE": true,
+ "SYS___ACL_DELETE_LINK": true,
+ "SYS___ACL_GET_FD": true,
+ "SYS___ACL_GET_FILE": true,
+ "SYS___ACL_GET_LINK": true,
+ "SYS___ACL_SET_FD": true,
+ "SYS___ACL_SET_FILE": true,
+ "SYS___ACL_SET_LINK": true,
+ "SYS___CLONE": true,
+ "SYS___DISABLE_THREADSIGNAL": true,
+ "SYS___GETCWD": true,
+ "SYS___GETLOGIN": true,
+ "SYS___GET_TCB": true,
+ "SYS___MAC_EXECVE": true,
+ "SYS___MAC_GETFSSTAT": true,
+ "SYS___MAC_GET_FD": true,
+ "SYS___MAC_GET_FILE": true,
+ "SYS___MAC_GET_LCID": true,
+ "SYS___MAC_GET_LCTX": true,
+ "SYS___MAC_GET_LINK": true,
+ "SYS___MAC_GET_MOUNT": true,
+ "SYS___MAC_GET_PID": true,
+ "SYS___MAC_GET_PROC": true,
+ "SYS___MAC_MOUNT": true,
+ "SYS___MAC_SET_FD": true,
+ "SYS___MAC_SET_FILE": true,
+ "SYS___MAC_SET_LCTX": true,
+ "SYS___MAC_SET_LINK": true,
+ "SYS___MAC_SET_PROC": true,
+ "SYS___MAC_SYSCALL": true,
+ "SYS___OLD_SEMWAIT_SIGNAL": true,
+ "SYS___OLD_SEMWAIT_SIGNAL_NOCANCEL": true,
+ "SYS___POSIX_CHOWN": true,
+ "SYS___POSIX_FCHOWN": true,
+ "SYS___POSIX_LCHOWN": true,
+ "SYS___POSIX_RENAME": true,
+ "SYS___PTHREAD_CANCELED": true,
+ "SYS___PTHREAD_CHDIR": true,
+ "SYS___PTHREAD_FCHDIR": true,
+ "SYS___PTHREAD_KILL": true,
+ "SYS___PTHREAD_MARKCANCEL": true,
+ "SYS___PTHREAD_SIGMASK": true,
+ "SYS___QUOTACTL": true,
+ "SYS___SEMCTL": true,
+ "SYS___SEMWAIT_SIGNAL": true,
+ "SYS___SEMWAIT_SIGNAL_NOCANCEL": true,
+ "SYS___SETLOGIN": true,
+ "SYS___SETUGID": true,
+ "SYS___SET_TCB": true,
+ "SYS___SIGACTION_SIGTRAMP": true,
+ "SYS___SIGTIMEDWAIT": true,
+ "SYS___SIGWAIT": true,
+ "SYS___SIGWAIT_NOCANCEL": true,
+ "SYS___SYSCTL": true,
+ "SYS___TFORK": true,
+ "SYS___THREXIT": true,
+ "SYS___THRSIGDIVERT": true,
+ "SYS___THRSLEEP": true,
+ "SYS___THRWAKEUP": true,
+ "S_ARCH1": true,
+ "S_ARCH2": true,
+ "S_BLKSIZE": true,
+ "S_IEXEC": true,
+ "S_IFBLK": true,
+ "S_IFCHR": true,
+ "S_IFDIR": true,
+ "S_IFIFO": true,
+ "S_IFLNK": true,
+ "S_IFMT": true,
+ "S_IFREG": true,
+ "S_IFSOCK": true,
+ "S_IFWHT": true,
+ "S_IREAD": true,
+ "S_IRGRP": true,
+ "S_IROTH": true,
+ "S_IRUSR": true,
+ "S_IRWXG": true,
+ "S_IRWXO": true,
+ "S_IRWXU": true,
+ "S_ISGID": true,
+ "S_ISTXT": true,
+ "S_ISUID": true,
+ "S_ISVTX": true,
+ "S_IWGRP": true,
+ "S_IWOTH": true,
+ "S_IWRITE": true,
+ "S_IWUSR": true,
+ "S_IXGRP": true,
+ "S_IXOTH": true,
+ "S_IXUSR": true,
+ "S_LOGIN_SET": true,
+ "SecurityAttributes": true,
+ "Seek": true,
+ "Select": true,
+ "Sendfile": true,
+ "Sendmsg": true,
+ "SendmsgN": true,
+ "Sendto": true,
+ "Servent": true,
+ "SetBpf": true,
+ "SetBpfBuflen": true,
+ "SetBpfDatalink": true,
+ "SetBpfHeadercmpl": true,
+ "SetBpfImmediate": true,
+ "SetBpfInterface": true,
+ "SetBpfPromisc": true,
+ "SetBpfTimeout": true,
+ "SetCurrentDirectory": true,
+ "SetEndOfFile": true,
+ "SetEnvironmentVariable": true,
+ "SetFileAttributes": true,
+ "SetFileCompletionNotificationModes": true,
+ "SetFilePointer": true,
+ "SetFileTime": true,
+ "SetHandleInformation": true,
+ "SetKevent": true,
+ "SetLsfPromisc": true,
+ "SetNonblock": true,
+ "Setdomainname": true,
+ "Setegid": true,
+ "Setenv": true,
+ "Seteuid": true,
+ "Setfsgid": true,
+ "Setfsuid": true,
+ "Setgid": true,
+ "Setgroups": true,
+ "Sethostname": true,
+ "Setlogin": true,
+ "Setpgid": true,
+ "Setpriority": true,
+ "Setprivexec": true,
+ "Setregid": true,
+ "Setresgid": true,
+ "Setresuid": true,
+ "Setreuid": true,
+ "Setrlimit": true,
+ "Setsid": true,
+ "Setsockopt": true,
+ "SetsockoptByte": true,
+ "SetsockoptICMPv6Filter": true,
+ "SetsockoptIPMreq": true,
+ "SetsockoptIPMreqn": true,
+ "SetsockoptIPv6Mreq": true,
+ "SetsockoptInet4Addr": true,
+ "SetsockoptInt": true,
+ "SetsockoptLinger": true,
+ "SetsockoptString": true,
+ "SetsockoptTimeval": true,
+ "Settimeofday": true,
+ "Setuid": true,
+ "Setxattr": true,
+ "Shutdown": true,
+ "SidTypeAlias": true,
+ "SidTypeComputer": true,
+ "SidTypeDeletedAccount": true,
+ "SidTypeDomain": true,
+ "SidTypeGroup": true,
+ "SidTypeInvalid": true,
+ "SidTypeLabel": true,
+ "SidTypeUnknown": true,
+ "SidTypeUser": true,
+ "SidTypeWellKnownGroup": true,
+ "Signal": true,
+ "SizeofBpfHdr": true,
+ "SizeofBpfInsn": true,
+ "SizeofBpfProgram": true,
+ "SizeofBpfStat": true,
+ "SizeofBpfVersion": true,
+ "SizeofBpfZbuf": true,
+ "SizeofBpfZbufHeader": true,
+ "SizeofCmsghdr": true,
+ "SizeofICMPv6Filter": true,
+ "SizeofIPMreq": true,
+ "SizeofIPMreqn": true,
+ "SizeofIPv6MTUInfo": true,
+ "SizeofIPv6Mreq": true,
+ "SizeofIfAddrmsg": true,
+ "SizeofIfAnnounceMsghdr": true,
+ "SizeofIfData": true,
+ "SizeofIfInfomsg": true,
+ "SizeofIfMsghdr": true,
+ "SizeofIfaMsghdr": true,
+ "SizeofIfmaMsghdr": true,
+ "SizeofIfmaMsghdr2": true,
+ "SizeofInet4Pktinfo": true,
+ "SizeofInet6Pktinfo": true,
+ "SizeofInotifyEvent": true,
+ "SizeofLinger": true,
+ "SizeofMsghdr": true,
+ "SizeofNlAttr": true,
+ "SizeofNlMsgerr": true,
+ "SizeofNlMsghdr": true,
+ "SizeofRtAttr": true,
+ "SizeofRtGenmsg": true,
+ "SizeofRtMetrics": true,
+ "SizeofRtMsg": true,
+ "SizeofRtMsghdr": true,
+ "SizeofRtNexthop": true,
+ "SizeofSockFilter": true,
+ "SizeofSockFprog": true,
+ "SizeofSockaddrAny": true,
+ "SizeofSockaddrDatalink": true,
+ "SizeofSockaddrInet4": true,
+ "SizeofSockaddrInet6": true,
+ "SizeofSockaddrLinklayer": true,
+ "SizeofSockaddrNetlink": true,
+ "SizeofSockaddrUnix": true,
+ "SizeofTCPInfo": true,
+ "SizeofUcred": true,
+ "SlicePtrFromStrings": true,
+ "SockFilter": true,
+ "SockFprog": true,
+ "SockaddrDatalink": true,
+ "SockaddrGen": true,
+ "SockaddrInet4": true,
+ "SockaddrInet6": true,
+ "SockaddrLinklayer": true,
+ "SockaddrNetlink": true,
+ "SockaddrUnix": true,
+ "Socket": true,
+ "SocketControlMessage": true,
+ "SocketDisableIPv6": true,
+ "Socketpair": true,
+ "Splice": true,
+ "StartProcess": true,
+ "StartupInfo": true,
+ "Stat": true,
+ "Stat_t": true,
+ "Statfs": true,
+ "Statfs_t": true,
+ "Stderr": true,
+ "Stdin": true,
+ "Stdout": true,
+ "StringBytePtr": true,
+ "StringByteSlice": true,
+ "StringSlicePtr": true,
+ "StringToSid": true,
+ "StringToUTF16": true,
+ "StringToUTF16Ptr": true,
+ "Symlink": true,
+ "Sync": true,
+ "SyncFileRange": true,
+ "SysProcAttr": true,
+ "SysProcIDMap": true,
+ "Syscall": true,
+ "Syscall12": true,
+ "Syscall15": true,
+ "Syscall6": true,
+ "Syscall9": true,
+ "Sysctl": true,
+ "SysctlUint32": true,
+ "Sysctlnode": true,
+ "Sysinfo": true,
+ "Sysinfo_t": true,
+ "Systemtime": true,
+ "TCGETS": true,
+ "TCIFLUSH": true,
+ "TCIOFLUSH": true,
+ "TCOFLUSH": true,
+ "TCPInfo": true,
+ "TCPKeepalive": true,
+ "TCP_CA_NAME_MAX": true,
+ "TCP_CONGCTL": true,
+ "TCP_CONGESTION": true,
+ "TCP_CONNECTIONTIMEOUT": true,
+ "TCP_CORK": true,
+ "TCP_DEFER_ACCEPT": true,
+ "TCP_INFO": true,
+ "TCP_KEEPALIVE": true,
+ "TCP_KEEPCNT": true,
+ "TCP_KEEPIDLE": true,
+ "TCP_KEEPINIT": true,
+ "TCP_KEEPINTVL": true,
+ "TCP_LINGER2": true,
+ "TCP_MAXBURST": true,
+ "TCP_MAXHLEN": true,
+ "TCP_MAXOLEN": true,
+ "TCP_MAXSEG": true,
+ "TCP_MAXWIN": true,
+ "TCP_MAX_SACK": true,
+ "TCP_MAX_WINSHIFT": true,
+ "TCP_MD5SIG": true,
+ "TCP_MD5SIG_MAXKEYLEN": true,
+ "TCP_MINMSS": true,
+ "TCP_MINMSSOVERLOAD": true,
+ "TCP_MSS": true,
+ "TCP_NODELAY": true,
+ "TCP_NOOPT": true,
+ "TCP_NOPUSH": true,
+ "TCP_NSTATES": true,
+ "TCP_QUICKACK": true,
+ "TCP_RXT_CONNDROPTIME": true,
+ "TCP_RXT_FINDROP": true,
+ "TCP_SACK_ENABLE": true,
+ "TCP_SYNCNT": true,
+ "TCP_VENDOR": true,
+ "TCP_WINDOW_CLAMP": true,
+ "TCSAFLUSH": true,
+ "TCSETS": true,
+ "TF_DISCONNECT": true,
+ "TF_REUSE_SOCKET": true,
+ "TF_USE_DEFAULT_WORKER": true,
+ "TF_USE_KERNEL_APC": true,
+ "TF_USE_SYSTEM_THREAD": true,
+ "TF_WRITE_BEHIND": true,
+ "TH32CS_INHERIT": true,
+ "TH32CS_SNAPALL": true,
+ "TH32CS_SNAPHEAPLIST": true,
+ "TH32CS_SNAPMODULE": true,
+ "TH32CS_SNAPMODULE32": true,
+ "TH32CS_SNAPPROCESS": true,
+ "TH32CS_SNAPTHREAD": true,
+ "TIME_ZONE_ID_DAYLIGHT": true,
+ "TIME_ZONE_ID_STANDARD": true,
+ "TIME_ZONE_ID_UNKNOWN": true,
+ "TIOCCBRK": true,
+ "TIOCCDTR": true,
+ "TIOCCONS": true,
+ "TIOCDCDTIMESTAMP": true,
+ "TIOCDRAIN": true,
+ "TIOCDSIMICROCODE": true,
+ "TIOCEXCL": true,
+ "TIOCEXT": true,
+ "TIOCFLAG_CDTRCTS": true,
+ "TIOCFLAG_CLOCAL": true,
+ "TIOCFLAG_CRTSCTS": true,
+ "TIOCFLAG_MDMBUF": true,
+ "TIOCFLAG_PPS": true,
+ "TIOCFLAG_SOFTCAR": true,
+ "TIOCFLUSH": true,
+ "TIOCGDEV": true,
+ "TIOCGDRAINWAIT": true,
+ "TIOCGETA": true,
+ "TIOCGETD": true,
+ "TIOCGFLAGS": true,
+ "TIOCGICOUNT": true,
+ "TIOCGLCKTRMIOS": true,
+ "TIOCGLINED": true,
+ "TIOCGPGRP": true,
+ "TIOCGPTN": true,
+ "TIOCGQSIZE": true,
+ "TIOCGRANTPT": true,
+ "TIOCGRS485": true,
+ "TIOCGSERIAL": true,
+ "TIOCGSID": true,
+ "TIOCGSIZE": true,
+ "TIOCGSOFTCAR": true,
+ "TIOCGTSTAMP": true,
+ "TIOCGWINSZ": true,
+ "TIOCINQ": true,
+ "TIOCIXOFF": true,
+ "TIOCIXON": true,
+ "TIOCLINUX": true,
+ "TIOCMBIC": true,
+ "TIOCMBIS": true,
+ "TIOCMGDTRWAIT": true,
+ "TIOCMGET": true,
+ "TIOCMIWAIT": true,
+ "TIOCMODG": true,
+ "TIOCMODS": true,
+ "TIOCMSDTRWAIT": true,
+ "TIOCMSET": true,
+ "TIOCM_CAR": true,
+ "TIOCM_CD": true,
+ "TIOCM_CTS": true,
+ "TIOCM_DCD": true,
+ "TIOCM_DSR": true,
+ "TIOCM_DTR": true,
+ "TIOCM_LE": true,
+ "TIOCM_RI": true,
+ "TIOCM_RNG": true,
+ "TIOCM_RTS": true,
+ "TIOCM_SR": true,
+ "TIOCM_ST": true,
+ "TIOCNOTTY": true,
+ "TIOCNXCL": true,
+ "TIOCOUTQ": true,
+ "TIOCPKT": true,
+ "TIOCPKT_DATA": true,
+ "TIOCPKT_DOSTOP": true,
+ "TIOCPKT_FLUSHREAD": true,
+ "TIOCPKT_FLUSHWRITE": true,
+ "TIOCPKT_IOCTL": true,
+ "TIOCPKT_NOSTOP": true,
+ "TIOCPKT_START": true,
+ "TIOCPKT_STOP": true,
+ "TIOCPTMASTER": true,
+ "TIOCPTMGET": true,
+ "TIOCPTSNAME": true,
+ "TIOCPTYGNAME": true,
+ "TIOCPTYGRANT": true,
+ "TIOCPTYUNLK": true,
+ "TIOCRCVFRAME": true,
+ "TIOCREMOTE": true,
+ "TIOCSBRK": true,
+ "TIOCSCONS": true,
+ "TIOCSCTTY": true,
+ "TIOCSDRAINWAIT": true,
+ "TIOCSDTR": true,
+ "TIOCSERCONFIG": true,
+ "TIOCSERGETLSR": true,
+ "TIOCSERGETMULTI": true,
+ "TIOCSERGSTRUCT": true,
+ "TIOCSERGWILD": true,
+ "TIOCSERSETMULTI": true,
+ "TIOCSERSWILD": true,
+ "TIOCSER_TEMT": true,
+ "TIOCSETA": true,
+ "TIOCSETAF": true,
+ "TIOCSETAW": true,
+ "TIOCSETD": true,
+ "TIOCSFLAGS": true,
+ "TIOCSIG": true,
+ "TIOCSLCKTRMIOS": true,
+ "TIOCSLINED": true,
+ "TIOCSPGRP": true,
+ "TIOCSPTLCK": true,
+ "TIOCSQSIZE": true,
+ "TIOCSRS485": true,
+ "TIOCSSERIAL": true,
+ "TIOCSSIZE": true,
+ "TIOCSSOFTCAR": true,
+ "TIOCSTART": true,
+ "TIOCSTAT": true,
+ "TIOCSTI": true,
+ "TIOCSTOP": true,
+ "TIOCSTSTAMP": true,
+ "TIOCSWINSZ": true,
+ "TIOCTIMESTAMP": true,
+ "TIOCUCNTL": true,
+ "TIOCVHANGUP": true,
+ "TIOCXMTFRAME": true,
+ "TOKEN_ADJUST_DEFAULT": true,
+ "TOKEN_ADJUST_GROUPS": true,
+ "TOKEN_ADJUST_PRIVILEGES": true,
+ "TOKEN_ADJUST_SESSIONID": true,
+ "TOKEN_ALL_ACCESS": true,
+ "TOKEN_ASSIGN_PRIMARY": true,
+ "TOKEN_DUPLICATE": true,
+ "TOKEN_EXECUTE": true,
+ "TOKEN_IMPERSONATE": true,
+ "TOKEN_QUERY": true,
+ "TOKEN_QUERY_SOURCE": true,
+ "TOKEN_READ": true,
+ "TOKEN_WRITE": true,
+ "TOSTOP": true,
+ "TRUNCATE_EXISTING": true,
+ "TUNATTACHFILTER": true,
+ "TUNDETACHFILTER": true,
+ "TUNGETFEATURES": true,
+ "TUNGETIFF": true,
+ "TUNGETSNDBUF": true,
+ "TUNGETVNETHDRSZ": true,
+ "TUNSETDEBUG": true,
+ "TUNSETGROUP": true,
+ "TUNSETIFF": true,
+ "TUNSETLINK": true,
+ "TUNSETNOCSUM": true,
+ "TUNSETOFFLOAD": true,
+ "TUNSETOWNER": true,
+ "TUNSETPERSIST": true,
+ "TUNSETSNDBUF": true,
+ "TUNSETTXFILTER": true,
+ "TUNSETVNETHDRSZ": true,
+ "Tee": true,
+ "TerminateProcess": true,
+ "Termios": true,
+ "Tgkill": true,
+ "Time": true,
+ "Time_t": true,
+ "Times": true,
+ "Timespec": true,
+ "TimespecToNsec": true,
+ "Timeval": true,
+ "Timeval32": true,
+ "TimevalToNsec": true,
+ "Timex": true,
+ "Timezoneinformation": true,
+ "Tms": true,
+ "Token": true,
+ "TokenAccessInformation": true,
+ "TokenAuditPolicy": true,
+ "TokenDefaultDacl": true,
+ "TokenElevation": true,
+ "TokenElevationType": true,
+ "TokenGroups": true,
+ "TokenGroupsAndPrivileges": true,
+ "TokenHasRestrictions": true,
+ "TokenImpersonationLevel": true,
+ "TokenIntegrityLevel": true,
+ "TokenLinkedToken": true,
+ "TokenLogonSid": true,
+ "TokenMandatoryPolicy": true,
+ "TokenOrigin": true,
+ "TokenOwner": true,
+ "TokenPrimaryGroup": true,
+ "TokenPrivileges": true,
+ "TokenRestrictedSids": true,
+ "TokenSandBoxInert": true,
+ "TokenSessionId": true,
+ "TokenSessionReference": true,
+ "TokenSource": true,
+ "TokenStatistics": true,
+ "TokenType": true,
+ "TokenUIAccess": true,
+ "TokenUser": true,
+ "TokenVirtualizationAllowed": true,
+ "TokenVirtualizationEnabled": true,
+ "Tokenprimarygroup": true,
+ "Tokenuser": true,
+ "TranslateAccountName": true,
+ "TranslateName": true,
+ "TransmitFile": true,
+ "TransmitFileBuffers": true,
+ "Truncate": true,
+ "USAGE_MATCH_TYPE_AND": true,
+ "USAGE_MATCH_TYPE_OR": true,
+ "UTF16FromString": true,
+ "UTF16PtrFromString": true,
+ "UTF16ToString": true,
+ "Ucred": true,
+ "Umask": true,
+ "Uname": true,
+ "Undelete": true,
+ "UnixCredentials": true,
+ "UnixRights": true,
+ "Unlink": true,
+ "Unlinkat": true,
+ "UnmapViewOfFile": true,
+ "Unmount": true,
+ "Unsetenv": true,
+ "Unshare": true,
+ "UserInfo10": true,
+ "Ustat": true,
+ "Ustat_t": true,
+ "Utimbuf": true,
+ "Utime": true,
+ "Utimes": true,
+ "UtimesNano": true,
+ "Utsname": true,
+ "VDISCARD": true,
+ "VDSUSP": true,
+ "VEOF": true,
+ "VEOL": true,
+ "VEOL2": true,
+ "VERASE": true,
+ "VERASE2": true,
+ "VINTR": true,
+ "VKILL": true,
+ "VLNEXT": true,
+ "VMIN": true,
+ "VQUIT": true,
+ "VREPRINT": true,
+ "VSTART": true,
+ "VSTATUS": true,
+ "VSTOP": true,
+ "VSUSP": true,
+ "VSWTC": true,
+ "VT0": true,
+ "VT1": true,
+ "VTDLY": true,
+ "VTIME": true,
+ "VWERASE": true,
+ "VirtualLock": true,
+ "VirtualUnlock": true,
+ "WAIT_ABANDONED": true,
+ "WAIT_FAILED": true,
+ "WAIT_OBJECT_0": true,
+ "WAIT_TIMEOUT": true,
+ "WALL": true,
+ "WALLSIG": true,
+ "WALTSIG": true,
+ "WCLONE": true,
+ "WCONTINUED": true,
+ "WCOREFLAG": true,
+ "WEXITED": true,
+ "WLINUXCLONE": true,
+ "WNOHANG": true,
+ "WNOTHREAD": true,
+ "WNOWAIT": true,
+ "WNOZOMBIE": true,
+ "WOPTSCHECKED": true,
+ "WORDSIZE": true,
+ "WSABuf": true,
+ "WSACleanup": true,
+ "WSADESCRIPTION_LEN": true,
+ "WSAData": true,
+ "WSAEACCES": true,
+ "WSAECONNABORTED": true,
+ "WSAECONNRESET": true,
+ "WSAEnumProtocols": true,
+ "WSAID_CONNECTEX": true,
+ "WSAIoctl": true,
+ "WSAPROTOCOL_LEN": true,
+ "WSAProtocolChain": true,
+ "WSAProtocolInfo": true,
+ "WSARecv": true,
+ "WSARecvFrom": true,
+ "WSASYS_STATUS_LEN": true,
+ "WSASend": true,
+ "WSASendTo": true,
+ "WSASendto": true,
+ "WSAStartup": true,
+ "WSTOPPED": true,
+ "WTRAPPED": true,
+ "WUNTRACED": true,
+ "Wait4": true,
+ "WaitForSingleObject": true,
+ "WaitStatus": true,
+ "Win32FileAttributeData": true,
+ "Win32finddata": true,
+ "Write": true,
+ "WriteConsole": true,
+ "WriteFile": true,
+ "X509_ASN_ENCODING": true,
+ "XCASE": true,
+ "XP1_CONNECTIONLESS": true,
+ "XP1_CONNECT_DATA": true,
+ "XP1_DISCONNECT_DATA": true,
+ "XP1_EXPEDITED_DATA": true,
+ "XP1_GRACEFUL_CLOSE": true,
+ "XP1_GUARANTEED_DELIVERY": true,
+ "XP1_GUARANTEED_ORDER": true,
+ "XP1_IFS_HANDLES": true,
+ "XP1_MESSAGE_ORIENTED": true,
+ "XP1_MULTIPOINT_CONTROL_PLANE": true,
+ "XP1_MULTIPOINT_DATA_PLANE": true,
+ "XP1_PARTIAL_MESSAGE": true,
+ "XP1_PSEUDO_STREAM": true,
+ "XP1_QOS_SUPPORTED": true,
+ "XP1_SAN_SUPPORT_SDP": true,
+ "XP1_SUPPORT_BROADCAST": true,
+ "XP1_SUPPORT_MULTIPOINT": true,
+ "XP1_UNI_RECV": true,
+ "XP1_UNI_SEND": true,
+ },
+ "testing": map[string]bool{
+ "AllocsPerRun": true,
+ "B": true,
+ "Benchmark": true,
+ "BenchmarkResult": true,
+ "Cover": true,
+ "CoverBlock": true,
+ "CoverMode": true,
+ "Coverage": true,
+ "InternalBenchmark": true,
+ "InternalExample": true,
+ "InternalTest": true,
+ "M": true,
+ "Main": true,
+ "MainStart": true,
+ "PB": true,
+ "RegisterCover": true,
+ "RunBenchmarks": true,
+ "RunExamples": true,
+ "RunTests": true,
+ "Short": true,
+ "T": true,
+ "Verbose": true,
+ },
+ "testing/iotest": map[string]bool{
+ "DataErrReader": true,
+ "ErrTimeout": true,
+ "HalfReader": true,
+ "NewReadLogger": true,
+ "NewWriteLogger": true,
+ "OneByteReader": true,
+ "TimeoutReader": true,
+ "TruncateWriter": true,
+ },
+ "testing/quick": map[string]bool{
+ "Check": true,
+ "CheckEqual": true,
+ "CheckEqualError": true,
+ "CheckError": true,
+ "Config": true,
+ "Generator": true,
+ "SetupError": true,
+ "Value": true,
+ },
+ "text/scanner": map[string]bool{
+ "Char": true,
+ "Comment": true,
+ "EOF": true,
+ "Float": true,
+ "GoTokens": true,
+ "GoWhitespace": true,
+ "Ident": true,
+ "Int": true,
+ "Position": true,
+ "RawString": true,
+ "ScanChars": true,
+ "ScanComments": true,
+ "ScanFloats": true,
+ "ScanIdents": true,
+ "ScanInts": true,
+ "ScanRawStrings": true,
+ "ScanStrings": true,
+ "Scanner": true,
+ "SkipComments": true,
+ "String": true,
+ "TokenString": true,
+ },
+ "text/tabwriter": map[string]bool{
+ "AlignRight": true,
+ "Debug": true,
+ "DiscardEmptyColumns": true,
+ "Escape": true,
+ "FilterHTML": true,
+ "NewWriter": true,
+ "StripEscape": true,
+ "TabIndent": true,
+ "Writer": true,
+ },
+ "text/template": map[string]bool{
+ "ExecError": true,
+ "FuncMap": true,
+ "HTMLEscape": true,
+ "HTMLEscapeString": true,
+ "HTMLEscaper": true,
+ "IsTrue": true,
+ "JSEscape": true,
+ "JSEscapeString": true,
+ "JSEscaper": true,
+ "Must": true,
+ "New": true,
+ "ParseFiles": true,
+ "ParseGlob": true,
+ "Template": true,
+ "URLQueryEscaper": true,
+ },
+ "text/template/parse": map[string]bool{
+ "ActionNode": true,
+ "BoolNode": true,
+ "BranchNode": true,
+ "ChainNode": true,
+ "CommandNode": true,
+ "DotNode": true,
+ "FieldNode": true,
+ "IdentifierNode": true,
+ "IfNode": true,
+ "IsEmptyTree": true,
+ "ListNode": true,
+ "New": true,
+ "NewIdentifier": true,
+ "NilNode": true,
+ "Node": true,
+ "NodeAction": true,
+ "NodeBool": true,
+ "NodeChain": true,
+ "NodeCommand": true,
+ "NodeDot": true,
+ "NodeField": true,
+ "NodeIdentifier": true,
+ "NodeIf": true,
+ "NodeList": true,
+ "NodeNil": true,
+ "NodeNumber": true,
+ "NodePipe": true,
+ "NodeRange": true,
+ "NodeString": true,
+ "NodeTemplate": true,
+ "NodeText": true,
+ "NodeType": true,
+ "NodeVariable": true,
+ "NodeWith": true,
+ "NumberNode": true,
+ "Parse": true,
+ "PipeNode": true,
+ "Pos": true,
+ "RangeNode": true,
+ "StringNode": true,
+ "TemplateNode": true,
+ "TextNode": true,
+ "Tree": true,
+ "VariableNode": true,
+ "WithNode": true,
+ },
+ "time": map[string]bool{
+ "ANSIC": true,
+ "After": true,
+ "AfterFunc": true,
+ "April": true,
+ "August": true,
+ "Date": true,
+ "December": true,
+ "Duration": true,
+ "February": true,
+ "FixedZone": true,
+ "Friday": true,
+ "Hour": true,
+ "January": true,
+ "July": true,
+ "June": true,
+ "Kitchen": true,
+ "LoadLocation": true,
+ "LoadLocationFromTZData": true,
+ "Local": true,
+ "Location": true,
+ "March": true,
+ "May": true,
+ "Microsecond": true,
+ "Millisecond": true,
+ "Minute": true,
+ "Monday": true,
+ "Month": true,
+ "Nanosecond": true,
+ "NewTicker": true,
+ "NewTimer": true,
+ "November": true,
+ "Now": true,
+ "October": true,
+ "Parse": true,
+ "ParseDuration": true,
+ "ParseError": true,
+ "ParseInLocation": true,
+ "RFC1123": true,
+ "RFC1123Z": true,
+ "RFC3339": true,
+ "RFC3339Nano": true,
+ "RFC822": true,
+ "RFC822Z": true,
+ "RFC850": true,
+ "RubyDate": true,
+ "Saturday": true,
+ "Second": true,
+ "September": true,
+ "Since": true,
+ "Sleep": true,
+ "Stamp": true,
+ "StampMicro": true,
+ "StampMilli": true,
+ "StampNano": true,
+ "Sunday": true,
+ "Thursday": true,
+ "Tick": true,
+ "Ticker": true,
+ "Time": true,
+ "Timer": true,
+ "Tuesday": true,
+ "UTC": true,
+ "Unix": true,
+ "UnixDate": true,
+ "Until": true,
+ "Wednesday": true,
+ "Weekday": true,
+ },
+ "unicode": map[string]bool{
+ "ASCII_Hex_Digit": true,
+ "Adlam": true,
+ "Ahom": true,
+ "Anatolian_Hieroglyphs": true,
+ "Arabic": true,
+ "Armenian": true,
+ "Avestan": true,
+ "AzeriCase": true,
+ "Balinese": true,
+ "Bamum": true,
+ "Bassa_Vah": true,
+ "Batak": true,
+ "Bengali": true,
+ "Bhaiksuki": true,
+ "Bidi_Control": true,
+ "Bopomofo": true,
+ "Brahmi": true,
+ "Braille": true,
+ "Buginese": true,
+ "Buhid": true,
+ "C": true,
+ "Canadian_Aboriginal": true,
+ "Carian": true,
+ "CaseRange": true,
+ "CaseRanges": true,
+ "Categories": true,
+ "Caucasian_Albanian": true,
+ "Cc": true,
+ "Cf": true,
+ "Chakma": true,
+ "Cham": true,
+ "Cherokee": true,
+ "Co": true,
+ "Common": true,
+ "Coptic": true,
+ "Cs": true,
+ "Cuneiform": true,
+ "Cypriot": true,
+ "Cyrillic": true,
+ "Dash": true,
+ "Deprecated": true,
+ "Deseret": true,
+ "Devanagari": true,
+ "Diacritic": true,
+ "Digit": true,
+ "Duployan": true,
+ "Egyptian_Hieroglyphs": true,
+ "Elbasan": true,
+ "Ethiopic": true,
+ "Extender": true,
+ "FoldCategory": true,
+ "FoldScript": true,
+ "Georgian": true,
+ "Glagolitic": true,
+ "Gothic": true,
+ "Grantha": true,
+ "GraphicRanges": true,
+ "Greek": true,
+ "Gujarati": true,
+ "Gurmukhi": true,
+ "Han": true,
+ "Hangul": true,
+ "Hanunoo": true,
+ "Hatran": true,
+ "Hebrew": true,
+ "Hex_Digit": true,
+ "Hiragana": true,
+ "Hyphen": true,
+ "IDS_Binary_Operator": true,
+ "IDS_Trinary_Operator": true,
+ "Ideographic": true,
+ "Imperial_Aramaic": true,
+ "In": true,
+ "Inherited": true,
+ "Inscriptional_Pahlavi": true,
+ "Inscriptional_Parthian": true,
+ "Is": true,
+ "IsControl": true,
+ "IsDigit": true,
+ "IsGraphic": true,
+ "IsLetter": true,
+ "IsLower": true,
+ "IsMark": true,
+ "IsNumber": true,
+ "IsOneOf": true,
+ "IsPrint": true,
+ "IsPunct": true,
+ "IsSpace": true,
+ "IsSymbol": true,
+ "IsTitle": true,
+ "IsUpper": true,
+ "Javanese": true,
+ "Join_Control": true,
+ "Kaithi": true,
+ "Kannada": true,
+ "Katakana": true,
+ "Kayah_Li": true,
+ "Kharoshthi": true,
+ "Khmer": true,
+ "Khojki": true,
+ "Khudawadi": true,
+ "L": true,
+ "Lao": true,
+ "Latin": true,
+ "Lepcha": true,
+ "Letter": true,
+ "Limbu": true,
+ "Linear_A": true,
+ "Linear_B": true,
+ "Lisu": true,
+ "Ll": true,
+ "Lm": true,
+ "Lo": true,
+ "Logical_Order_Exception": true,
+ "Lower": true,
+ "LowerCase": true,
+ "Lt": true,
+ "Lu": true,
+ "Lycian": true,
+ "Lydian": true,
+ "M": true,
+ "Mahajani": true,
+ "Malayalam": true,
+ "Mandaic": true,
+ "Manichaean": true,
+ "Marchen": true,
+ "Mark": true,
+ "Masaram_Gondi": true,
+ "MaxASCII": true,
+ "MaxCase": true,
+ "MaxLatin1": true,
+ "MaxRune": true,
+ "Mc": true,
+ "Me": true,
+ "Meetei_Mayek": true,
+ "Mende_Kikakui": true,
+ "Meroitic_Cursive": true,
+ "Meroitic_Hieroglyphs": true,
+ "Miao": true,
+ "Mn": true,
+ "Modi": true,
+ "Mongolian": true,
+ "Mro": true,
+ "Multani": true,
+ "Myanmar": true,
+ "N": true,
+ "Nabataean": true,
+ "Nd": true,
+ "New_Tai_Lue": true,
+ "Newa": true,
+ "Nko": true,
+ "Nl": true,
+ "No": true,
+ "Noncharacter_Code_Point": true,
+ "Number": true,
+ "Nushu": true,
+ "Ogham": true,
+ "Ol_Chiki": true,
+ "Old_Hungarian": true,
+ "Old_Italic": true,
+ "Old_North_Arabian": true,
+ "Old_Permic": true,
+ "Old_Persian": true,
+ "Old_South_Arabian": true,
+ "Old_Turkic": true,
+ "Oriya": true,
+ "Osage": true,
+ "Osmanya": true,
+ "Other": true,
+ "Other_Alphabetic": true,
+ "Other_Default_Ignorable_Code_Point": true,
+ "Other_Grapheme_Extend": true,
+ "Other_ID_Continue": true,
+ "Other_ID_Start": true,
+ "Other_Lowercase": true,
+ "Other_Math": true,
+ "Other_Uppercase": true,
+ "P": true,
+ "Pahawh_Hmong": true,
+ "Palmyrene": true,
+ "Pattern_Syntax": true,
+ "Pattern_White_Space": true,
+ "Pau_Cin_Hau": true,
+ "Pc": true,
+ "Pd": true,
+ "Pe": true,
+ "Pf": true,
+ "Phags_Pa": true,
+ "Phoenician": true,
+ "Pi": true,
+ "Po": true,
+ "Prepended_Concatenation_Mark": true,
+ "PrintRanges": true,
+ "Properties": true,
+ "Ps": true,
+ "Psalter_Pahlavi": true,
+ "Punct": true,
+ "Quotation_Mark": true,
+ "Radical": true,
+ "Range16": true,
+ "Range32": true,
+ "RangeTable": true,
+ "Regional_Indicator": true,
+ "Rejang": true,
+ "ReplacementChar": true,
+ "Runic": true,
+ "S": true,
+ "STerm": true,
+ "Samaritan": true,
+ "Saurashtra": true,
+ "Sc": true,
+ "Scripts": true,
+ "Sentence_Terminal": true,
+ "Sharada": true,
+ "Shavian": true,
+ "Siddham": true,
+ "SignWriting": true,
+ "SimpleFold": true,
+ "Sinhala": true,
+ "Sk": true,
+ "Sm": true,
+ "So": true,
+ "Soft_Dotted": true,
+ "Sora_Sompeng": true,
+ "Soyombo": true,
+ "Space": true,
+ "SpecialCase": true,
+ "Sundanese": true,
+ "Syloti_Nagri": true,
+ "Symbol": true,
+ "Syriac": true,
+ "Tagalog": true,
+ "Tagbanwa": true,
+ "Tai_Le": true,
+ "Tai_Tham": true,
+ "Tai_Viet": true,
+ "Takri": true,
+ "Tamil": true,
+ "Tangut": true,
+ "Telugu": true,
+ "Terminal_Punctuation": true,
+ "Thaana": true,
+ "Thai": true,
+ "Tibetan": true,
+ "Tifinagh": true,
+ "Tirhuta": true,
+ "Title": true,
+ "TitleCase": true,
+ "To": true,
+ "ToLower": true,
+ "ToTitle": true,
+ "ToUpper": true,
+ "TurkishCase": true,
+ "Ugaritic": true,
+ "Unified_Ideograph": true,
+ "Upper": true,
+ "UpperCase": true,
+ "UpperLower": true,
+ "Vai": true,
+ "Variation_Selector": true,
+ "Version": true,
+ "Warang_Citi": true,
+ "White_Space": true,
+ "Yi": true,
+ "Z": true,
+ "Zanabazar_Square": true,
+ "Zl": true,
+ "Zp": true,
+ "Zs": true,
+ },
+ "unicode/utf16": map[string]bool{
+ "Decode": true,
+ "DecodeRune": true,
+ "Encode": true,
+ "EncodeRune": true,
+ "IsSurrogate": true,
+ },
+ "unicode/utf8": map[string]bool{
+ "DecodeLastRune": true,
+ "DecodeLastRuneInString": true,
+ "DecodeRune": true,
+ "DecodeRuneInString": true,
+ "EncodeRune": true,
+ "FullRune": true,
+ "FullRuneInString": true,
+ "MaxRune": true,
+ "RuneCount": true,
+ "RuneCountInString": true,
+ "RuneError": true,
+ "RuneLen": true,
+ "RuneSelf": true,
+ "RuneStart": true,
+ "UTFMax": true,
+ "Valid": true,
+ "ValidRune": true,
+ "ValidString": true,
+ },
+ "unsafe": map[string]bool{
+ "Alignof": true,
+ "ArbitraryType": true,
+ "Offsetof": true,
+ "Pointer": true,
+ "Sizeof": true,
+ },
+}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go
new file mode 100644
index 000000000..7219c8e9f
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go
@@ -0,0 +1,196 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package fastwalk provides a faster version of filepath.Walk for file system
+// scanning tools.
+package fastwalk
+
+import (
+ "errors"
+ "os"
+ "path/filepath"
+ "runtime"
+ "sync"
+)
+
+// TraverseLink is used as a return value from WalkFuncs to indicate that the
+// symlink named in the call may be traversed.
+var TraverseLink = errors.New("fastwalk: traverse symlink, assuming target is a directory")
+
+// SkipFiles is a used as a return value from WalkFuncs to indicate that the
+// callback should not be called for any other files in the current directory.
+// Child directories will still be traversed.
+var SkipFiles = errors.New("fastwalk: skip remaining files in directory")
+
+// Walk is a faster implementation of filepath.Walk.
+//
+// filepath.Walk's design necessarily calls os.Lstat on each file,
+// even if the caller needs less info.
+// Many tools need only the type of each file.
+// On some platforms, this information is provided directly by the readdir
+// system call, avoiding the need to stat each file individually.
+// fastwalk_unix.go contains a fork of the syscall routines.
+//
+// See golang.org/issue/16399
+//
+// Walk walks the file tree rooted at root, calling walkFn for
+// each file or directory in the tree, including root.
+//
+// If fastWalk returns filepath.SkipDir, the directory is skipped.
+//
+// Unlike filepath.Walk:
+// * file stat calls must be done by the user.
+// The only provided metadata is the file type, which does not include
+// any permission bits.
+// * multiple goroutines stat the filesystem concurrently. The provided
+// walkFn must be safe for concurrent use.
+// * fastWalk can follow symlinks if walkFn returns the TraverseLink
+// sentinel error. It is the walkFn's responsibility to prevent
+// fastWalk from going into symlink cycles.
+func Walk(root string, walkFn func(path string, typ os.FileMode) error) error {
+ // TODO(bradfitz): make numWorkers configurable? We used a
+ // minimum of 4 to give the kernel more info about multiple
+ // things we want, in hopes its I/O scheduling can take
+ // advantage of that. Hopefully most are in cache. Maybe 4 is
+ // even too low of a minimum. Profile more.
+ numWorkers := 4
+ if n := runtime.NumCPU(); n > numWorkers {
+ numWorkers = n
+ }
+
+ // Make sure to wait for all workers to finish, otherwise
+ // walkFn could still be called after returning. This Wait call
+ // runs after close(e.donec) below.
+ var wg sync.WaitGroup
+ defer wg.Wait()
+
+ w := &walker{
+ fn: walkFn,
+ enqueuec: make(chan walkItem, numWorkers), // buffered for performance
+ workc: make(chan walkItem, numWorkers), // buffered for performance
+ donec: make(chan struct{}),
+
+ // buffered for correctness & not leaking goroutines:
+ resc: make(chan error, numWorkers),
+ }
+ defer close(w.donec)
+
+ for i := 0; i < numWorkers; i++ {
+ wg.Add(1)
+ go w.doWork(&wg)
+ }
+ todo := []walkItem{{dir: root}}
+ out := 0
+ for {
+ workc := w.workc
+ var workItem walkItem
+ if len(todo) == 0 {
+ workc = nil
+ } else {
+ workItem = todo[len(todo)-1]
+ }
+ select {
+ case workc <- workItem:
+ todo = todo[:len(todo)-1]
+ out++
+ case it := <-w.enqueuec:
+ todo = append(todo, it)
+ case err := <-w.resc:
+ out--
+ if err != nil {
+ return err
+ }
+ if out == 0 && len(todo) == 0 {
+ // It's safe to quit here, as long as the buffered
+ // enqueue channel isn't also readable, which might
+ // happen if the worker sends both another unit of
+ // work and its result before the other select was
+ // scheduled and both w.resc and w.enqueuec were
+ // readable.
+ select {
+ case it := <-w.enqueuec:
+ todo = append(todo, it)
+ default:
+ return nil
+ }
+ }
+ }
+ }
+}
+
+// doWork reads directories as instructed (via workc) and runs the
+// user's callback function.
+func (w *walker) doWork(wg *sync.WaitGroup) {
+ defer wg.Done()
+ for {
+ select {
+ case <-w.donec:
+ return
+ case it := <-w.workc:
+ select {
+ case <-w.donec:
+ return
+ case w.resc <- w.walk(it.dir, !it.callbackDone):
+ }
+ }
+ }
+}
+
+type walker struct {
+ fn func(path string, typ os.FileMode) error
+
+ donec chan struct{} // closed on fastWalk's return
+ workc chan walkItem // to workers
+ enqueuec chan walkItem // from workers
+ resc chan error // from workers
+}
+
+type walkItem struct {
+ dir string
+ callbackDone bool // callback already called; don't do it again
+}
+
+func (w *walker) enqueue(it walkItem) {
+ select {
+ case w.enqueuec <- it:
+ case <-w.donec:
+ }
+}
+
+func (w *walker) onDirEnt(dirName, baseName string, typ os.FileMode) error {
+ joined := dirName + string(os.PathSeparator) + baseName
+ if typ == os.ModeDir {
+ w.enqueue(walkItem{dir: joined})
+ return nil
+ }
+
+ err := w.fn(joined, typ)
+ if typ == os.ModeSymlink {
+ if err == TraverseLink {
+ // Set callbackDone so we don't call it twice for both the
+ // symlink-as-symlink and the symlink-as-directory later:
+ w.enqueue(walkItem{dir: joined, callbackDone: true})
+ return nil
+ }
+ if err == filepath.SkipDir {
+ // Permit SkipDir on symlinks too.
+ return nil
+ }
+ }
+ return err
+}
+
+func (w *walker) walk(root string, runUserCallback bool) error {
+ if runUserCallback {
+ err := w.fn(root, os.ModeDir)
+ if err == filepath.SkipDir {
+ return nil
+ }
+ if err != nil {
+ return err
+ }
+ }
+
+ return readDir(root, w.onDirEnt)
+}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_fileno.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_fileno.go
new file mode 100644
index 000000000..ccffec5ad
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_fileno.go
@@ -0,0 +1,13 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build freebsd openbsd netbsd
+
+package fastwalk
+
+import "syscall"
+
+func direntInode(dirent *syscall.Dirent) uint64 {
+ return uint64(dirent.Fileno)
+}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_ino.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_ino.go
new file mode 100644
index 000000000..ab7fbc0a9
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_ino.go
@@ -0,0 +1,14 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build linux darwin
+// +build !appengine
+
+package fastwalk
+
+import "syscall"
+
+func direntInode(dirent *syscall.Dirent) uint64 {
+ return uint64(dirent.Ino)
+}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_bsd.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_bsd.go
new file mode 100644
index 000000000..a3b26a7ba
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_bsd.go
@@ -0,0 +1,13 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build darwin freebsd openbsd netbsd
+
+package fastwalk
+
+import "syscall"
+
+func direntNamlen(dirent *syscall.Dirent) uint64 {
+ return uint64(dirent.Namlen)
+}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_linux.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_linux.go
new file mode 100644
index 000000000..e880d358b
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_linux.go
@@ -0,0 +1,29 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build linux
+// +build !appengine
+
+package fastwalk
+
+import (
+ "bytes"
+ "syscall"
+ "unsafe"
+)
+
+func direntNamlen(dirent *syscall.Dirent) uint64 {
+ const fixedHdr = uint16(unsafe.Offsetof(syscall.Dirent{}.Name))
+ nameBuf := (*[unsafe.Sizeof(dirent.Name)]byte)(unsafe.Pointer(&dirent.Name[0]))
+ const nameBufLen = uint16(len(nameBuf))
+ limit := dirent.Reclen - fixedHdr
+ if limit > nameBufLen {
+ limit = nameBufLen
+ }
+ nameLen := bytes.IndexByte(nameBuf[:limit], 0)
+ if nameLen < 0 {
+ panic("failed to find terminating 0 byte in dirent")
+ }
+ return uint64(nameLen)
+}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go
new file mode 100644
index 000000000..a906b8759
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go
@@ -0,0 +1,37 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build appengine !linux,!darwin,!freebsd,!openbsd,!netbsd
+
+package fastwalk
+
+import (
+ "io/ioutil"
+ "os"
+)
+
+// readDir calls fn for each directory entry in dirName.
+// It does not descend into directories or follow symlinks.
+// If fn returns a non-nil error, readDir returns with that error
+// immediately.
+func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) error) error {
+ fis, err := ioutil.ReadDir(dirName)
+ if err != nil {
+ return err
+ }
+ skipFiles := false
+ for _, fi := range fis {
+ if fi.Mode().IsRegular() && skipFiles {
+ continue
+ }
+ if err := fn(dirName, fi.Name(), fi.Mode()&os.ModeType); err != nil {
+ if err == SkipFiles {
+ skipFiles = true
+ continue
+ }
+ return err
+ }
+ }
+ return nil
+}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go
new file mode 100644
index 000000000..3369b1a0b
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go
@@ -0,0 +1,127 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build linux darwin freebsd openbsd netbsd
+// +build !appengine
+
+package fastwalk
+
+import (
+ "fmt"
+ "os"
+ "syscall"
+ "unsafe"
+)
+
+const blockSize = 8 << 10
+
+// unknownFileMode is a sentinel (and bogus) os.FileMode
+// value used to represent a syscall.DT_UNKNOWN Dirent.Type.
+const unknownFileMode os.FileMode = os.ModeNamedPipe | os.ModeSocket | os.ModeDevice
+
+func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) error) error {
+ fd, err := syscall.Open(dirName, 0, 0)
+ if err != nil {
+ return &os.PathError{Op: "open", Path: dirName, Err: err}
+ }
+ defer syscall.Close(fd)
+
+ // The buffer must be at least a block long.
+ buf := make([]byte, blockSize) // stack-allocated; doesn't escape
+ bufp := 0 // starting read position in buf
+ nbuf := 0 // end valid data in buf
+ skipFiles := false
+ for {
+ if bufp >= nbuf {
+ bufp = 0
+ nbuf, err = syscall.ReadDirent(fd, buf)
+ if err != nil {
+ return os.NewSyscallError("readdirent", err)
+ }
+ if nbuf <= 0 {
+ return nil
+ }
+ }
+ consumed, name, typ := parseDirEnt(buf[bufp:nbuf])
+ bufp += consumed
+ if name == "" || name == "." || name == ".." {
+ continue
+ }
+ // Fallback for filesystems (like old XFS) that don't
+ // support Dirent.Type and have DT_UNKNOWN (0) there
+ // instead.
+ if typ == unknownFileMode {
+ fi, err := os.Lstat(dirName + "/" + name)
+ if err != nil {
+ // It got deleted in the meantime.
+ if os.IsNotExist(err) {
+ continue
+ }
+ return err
+ }
+ typ = fi.Mode() & os.ModeType
+ }
+ if skipFiles && typ.IsRegular() {
+ continue
+ }
+ if err := fn(dirName, name, typ); err != nil {
+ if err == SkipFiles {
+ skipFiles = true
+ continue
+ }
+ return err
+ }
+ }
+}
+
+func parseDirEnt(buf []byte) (consumed int, name string, typ os.FileMode) {
+ // golang.org/issue/15653
+ dirent := (*syscall.Dirent)(unsafe.Pointer(&buf[0]))
+ if v := unsafe.Offsetof(dirent.Reclen) + unsafe.Sizeof(dirent.Reclen); uintptr(len(buf)) < v {
+ panic(fmt.Sprintf("buf size of %d smaller than dirent header size %d", len(buf), v))
+ }
+ if len(buf) < int(dirent.Reclen) {
+ panic(fmt.Sprintf("buf size %d < record length %d", len(buf), dirent.Reclen))
+ }
+ consumed = int(dirent.Reclen)
+ if direntInode(dirent) == 0 { // File absent in directory.
+ return
+ }
+ switch dirent.Type {
+ case syscall.DT_REG:
+ typ = 0
+ case syscall.DT_DIR:
+ typ = os.ModeDir
+ case syscall.DT_LNK:
+ typ = os.ModeSymlink
+ case syscall.DT_BLK:
+ typ = os.ModeDevice
+ case syscall.DT_FIFO:
+ typ = os.ModeNamedPipe
+ case syscall.DT_SOCK:
+ typ = os.ModeSocket
+ case syscall.DT_UNKNOWN:
+ typ = unknownFileMode
+ default:
+ // Skip weird things.
+ // It's probably a DT_WHT (http://lwn.net/Articles/325369/)
+ // or something. Revisit if/when this package is moved outside
+ // of goimports. goimports only cares about regular files,
+ // symlinks, and directories.
+ return
+ }
+
+ nameBuf := (*[unsafe.Sizeof(dirent.Name)]byte)(unsafe.Pointer(&dirent.Name[0]))
+ nameLen := direntNamlen(dirent)
+
+ // Special cases for common things:
+ if nameLen == 1 && nameBuf[0] == '.' {
+ name = "."
+ } else if nameLen == 2 && nameBuf[0] == '.' && nameBuf[1] == '.' {
+ name = ".."
+ } else {
+ name = string(nameBuf[:nameLen])
+ }
+ return
+}
diff --git a/vendor/golang.org/x/tools/internal/gopathwalk/walk.go b/vendor/golang.org/x/tools/internal/gopathwalk/walk.go
new file mode 100644
index 000000000..a561f9f41
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gopathwalk/walk.go
@@ -0,0 +1,249 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package gopathwalk is like filepath.Walk but specialized for finding Go
+// packages, particularly in $GOPATH and $GOROOT.
+package gopathwalk
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "go/build"
+ "io/ioutil"
+ "log"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "golang.org/x/tools/internal/fastwalk"
+)
+
+// Options controls the behavior of a Walk call.
+type Options struct {
+ Debug bool // Enable debug logging
+ ModulesEnabled bool // Search module caches. Also disables legacy goimports ignore rules.
+}
+
+// RootType indicates the type of a Root.
+type RootType int
+
+const (
+ RootUnknown RootType = iota
+ RootGOROOT
+ RootGOPATH
+ RootCurrentModule
+ RootModuleCache
+)
+
+// A Root is a starting point for a Walk.
+type Root struct {
+ Path string
+ Type RootType
+}
+
+// SrcDirsRoots returns the roots from build.Default.SrcDirs(). Not modules-compatible.
+func SrcDirsRoots() []Root {
+ var roots []Root
+ roots = append(roots, Root{filepath.Join(build.Default.GOROOT, "src"), RootGOROOT})
+ for _, p := range filepath.SplitList(build.Default.GOPATH) {
+ roots = append(roots, Root{filepath.Join(p, "src"), RootGOPATH})
+ }
+ return roots
+}
+
+// Walk walks Go source directories ($GOROOT, $GOPATH, etc) to find packages.
+// For each package found, add will be called (concurrently) with the absolute
+// paths of the containing source directory and the package directory.
+// add will be called concurrently.
+func Walk(roots []Root, add func(root Root, dir string), opts Options) {
+ for _, root := range roots {
+ walkDir(root, add, opts)
+ }
+}
+
+func walkDir(root Root, add func(Root, string), opts Options) {
+ if _, err := os.Stat(root.Path); os.IsNotExist(err) {
+ if opts.Debug {
+ log.Printf("skipping nonexistant directory: %v", root.Path)
+ }
+ return
+ }
+ if opts.Debug {
+ log.Printf("scanning %s", root.Path)
+ }
+ w := &walker{
+ root: root,
+ add: add,
+ opts: opts,
+ }
+ w.init()
+ if err := fastwalk.Walk(root.Path, w.walk); err != nil {
+ log.Printf("gopathwalk: scanning directory %v: %v", root.Path, err)
+ }
+
+ if opts.Debug {
+ log.Printf("scanned %s", root.Path)
+ }
+}
+
+// walker is the callback for fastwalk.Walk.
+type walker struct {
+ root Root // The source directory to scan.
+ add func(Root, string) // The callback that will be invoked for every possible Go package dir.
+ opts Options // Options passed to Walk by the user.
+
+ ignoredDirs []os.FileInfo // The ignored directories, loaded from .goimportsignore files.
+}
+
+// init initializes the walker based on its Options.
+func (w *walker) init() {
+ var ignoredPaths []string
+ if w.root.Type == RootModuleCache {
+ ignoredPaths = []string{"cache"}
+ }
+ if !w.opts.ModulesEnabled && w.root.Type == RootGOPATH {
+ ignoredPaths = w.getIgnoredDirs(w.root.Path)
+ ignoredPaths = append(ignoredPaths, "v", "mod")
+ }
+
+ for _, p := range ignoredPaths {
+ full := filepath.Join(w.root.Path, p)
+ if fi, err := os.Stat(full); err == nil {
+ w.ignoredDirs = append(w.ignoredDirs, fi)
+ if w.opts.Debug {
+ log.Printf("Directory added to ignore list: %s", full)
+ }
+ } else if w.opts.Debug {
+ log.Printf("Error statting ignored directory: %v", err)
+ }
+ }
+}
+
+// getIgnoredDirs reads an optional config file at /.goimportsignore
+// of relative directories to ignore when scanning for go files.
+// The provided path is one of the $GOPATH entries with "src" appended.
+func (w *walker) getIgnoredDirs(path string) []string {
+ file := filepath.Join(path, ".goimportsignore")
+ slurp, err := ioutil.ReadFile(file)
+ if w.opts.Debug {
+ if err != nil {
+ log.Print(err)
+ } else {
+ log.Printf("Read %s", file)
+ }
+ }
+ if err != nil {
+ return nil
+ }
+
+ var ignoredDirs []string
+ bs := bufio.NewScanner(bytes.NewReader(slurp))
+ for bs.Scan() {
+ line := strings.TrimSpace(bs.Text())
+ if line == "" || strings.HasPrefix(line, "#") {
+ continue
+ }
+ ignoredDirs = append(ignoredDirs, line)
+ }
+ return ignoredDirs
+}
+
+func (w *walker) shouldSkipDir(fi os.FileInfo) bool {
+ for _, ignoredDir := range w.ignoredDirs {
+ if os.SameFile(fi, ignoredDir) {
+ return true
+ }
+ }
+ return false
+}
+
+func (w *walker) walk(path string, typ os.FileMode) error {
+ dir := filepath.Dir(path)
+ if typ.IsRegular() {
+ if dir == w.root.Path {
+ // Doesn't make sense to have regular files
+ // directly in your $GOPATH/src or $GOROOT/src.
+ return fastwalk.SkipFiles
+ }
+ if !strings.HasSuffix(path, ".go") {
+ return nil
+ }
+
+ w.add(w.root, dir)
+ return fastwalk.SkipFiles
+ }
+ if typ == os.ModeDir {
+ base := filepath.Base(path)
+ if base == "" || base[0] == '.' || base[0] == '_' ||
+ base == "testdata" ||
+ (w.root.Type == RootGOROOT && w.opts.ModulesEnabled && base == "vendor") ||
+ (!w.opts.ModulesEnabled && base == "node_modules") {
+ return filepath.SkipDir
+ }
+ fi, err := os.Lstat(path)
+ if err == nil && w.shouldSkipDir(fi) {
+ return filepath.SkipDir
+ }
+ return nil
+ }
+ if typ == os.ModeSymlink {
+ base := filepath.Base(path)
+ if strings.HasPrefix(base, ".#") {
+ // Emacs noise.
+ return nil
+ }
+ fi, err := os.Lstat(path)
+ if err != nil {
+ // Just ignore it.
+ return nil
+ }
+ if w.shouldTraverse(dir, fi) {
+ return fastwalk.TraverseLink
+ }
+ }
+ return nil
+}
+
+// shouldTraverse reports whether the symlink fi, found in dir,
+// should be followed. It makes sure symlinks were never visited
+// before to avoid symlink loops.
+func (w *walker) shouldTraverse(dir string, fi os.FileInfo) bool {
+ path := filepath.Join(dir, fi.Name())
+ target, err := filepath.EvalSymlinks(path)
+ if err != nil {
+ return false
+ }
+ ts, err := os.Stat(target)
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err)
+ return false
+ }
+ if !ts.IsDir() {
+ return false
+ }
+ if w.shouldSkipDir(ts) {
+ return false
+ }
+ // Check for symlink loops by statting each directory component
+ // and seeing if any are the same file as ts.
+ for {
+ parent := filepath.Dir(path)
+ if parent == path {
+ // Made it to the root without seeing a cycle.
+ // Use this symlink.
+ return true
+ }
+ parentInfo, err := os.Stat(parent)
+ if err != nil {
+ return false
+ }
+ if os.SameFile(ts, parentInfo) {
+ // Cycle. Don't traverse.
+ return false
+ }
+ path = parent
+ }
+
+}
diff --git a/vendor/golang.org/x/tools/internal/semver/semver.go b/vendor/golang.org/x/tools/internal/semver/semver.go
new file mode 100644
index 000000000..4af7118e5
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/semver/semver.go
@@ -0,0 +1,388 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package semver implements comparison of semantic version strings.
+// In this package, semantic version strings must begin with a leading "v",
+// as in "v1.0.0".
+//
+// The general form of a semantic version string accepted by this package is
+//
+// vMAJOR[.MINOR[.PATCH[-PRERELEASE][+BUILD]]]
+//
+// where square brackets indicate optional parts of the syntax;
+// MAJOR, MINOR, and PATCH are decimal integers without extra leading zeros;
+// PRERELEASE and BUILD are each a series of non-empty dot-separated identifiers
+// using only alphanumeric characters and hyphens; and
+// all-numeric PRERELEASE identifiers must not have leading zeros.
+//
+// This package follows Semantic Versioning 2.0.0 (see semver.org)
+// with two exceptions. First, it requires the "v" prefix. Second, it recognizes
+// vMAJOR and vMAJOR.MINOR (with no prerelease or build suffixes)
+// as shorthands for vMAJOR.0.0 and vMAJOR.MINOR.0.
+package semver
+
+// parsed returns the parsed form of a semantic version string.
+type parsed struct {
+ major string
+ minor string
+ patch string
+ short string
+ prerelease string
+ build string
+ err string
+}
+
+// IsValid reports whether v is a valid semantic version string.
+func IsValid(v string) bool {
+ _, ok := parse(v)
+ return ok
+}
+
+// Canonical returns the canonical formatting of the semantic version v.
+// It fills in any missing .MINOR or .PATCH and discards build metadata.
+// Two semantic versions compare equal only if their canonical formattings
+// are identical strings.
+// The canonical invalid semantic version is the empty string.
+func Canonical(v string) string {
+ p, ok := parse(v)
+ if !ok {
+ return ""
+ }
+ if p.build != "" {
+ return v[:len(v)-len(p.build)]
+ }
+ if p.short != "" {
+ return v + p.short
+ }
+ return v
+}
+
+// Major returns the major version prefix of the semantic version v.
+// For example, Major("v2.1.0") == "v2".
+// If v is an invalid semantic version string, Major returns the empty string.
+func Major(v string) string {
+ pv, ok := parse(v)
+ if !ok {
+ return ""
+ }
+ return v[:1+len(pv.major)]
+}
+
+// MajorMinor returns the major.minor version prefix of the semantic version v.
+// For example, MajorMinor("v2.1.0") == "v2.1".
+// If v is an invalid semantic version string, MajorMinor returns the empty string.
+func MajorMinor(v string) string {
+ pv, ok := parse(v)
+ if !ok {
+ return ""
+ }
+ i := 1 + len(pv.major)
+ if j := i + 1 + len(pv.minor); j <= len(v) && v[i] == '.' && v[i+1:j] == pv.minor {
+ return v[:j]
+ }
+ return v[:i] + "." + pv.minor
+}
+
+// Prerelease returns the prerelease suffix of the semantic version v.
+// For example, Prerelease("v2.1.0-pre+meta") == "-pre".
+// If v is an invalid semantic version string, Prerelease returns the empty string.
+func Prerelease(v string) string {
+ pv, ok := parse(v)
+ if !ok {
+ return ""
+ }
+ return pv.prerelease
+}
+
+// Build returns the build suffix of the semantic version v.
+// For example, Build("v2.1.0+meta") == "+meta".
+// If v is an invalid semantic version string, Build returns the empty string.
+func Build(v string) string {
+ pv, ok := parse(v)
+ if !ok {
+ return ""
+ }
+ return pv.build
+}
+
+// Compare returns an integer comparing two versions according to
+// according to semantic version precedence.
+// The result will be 0 if v == w, -1 if v < w, or +1 if v > w.
+//
+// An invalid semantic version string is considered less than a valid one.
+// All invalid semantic version strings compare equal to each other.
+func Compare(v, w string) int {
+ pv, ok1 := parse(v)
+ pw, ok2 := parse(w)
+ if !ok1 && !ok2 {
+ return 0
+ }
+ if !ok1 {
+ return -1
+ }
+ if !ok2 {
+ return +1
+ }
+ if c := compareInt(pv.major, pw.major); c != 0 {
+ return c
+ }
+ if c := compareInt(pv.minor, pw.minor); c != 0 {
+ return c
+ }
+ if c := compareInt(pv.patch, pw.patch); c != 0 {
+ return c
+ }
+ return comparePrerelease(pv.prerelease, pw.prerelease)
+}
+
+// Max canonicalizes its arguments and then returns the version string
+// that compares greater.
+func Max(v, w string) string {
+ v = Canonical(v)
+ w = Canonical(w)
+ if Compare(v, w) > 0 {
+ return v
+ }
+ return w
+}
+
+func parse(v string) (p parsed, ok bool) {
+ if v == "" || v[0] != 'v' {
+ p.err = "missing v prefix"
+ return
+ }
+ p.major, v, ok = parseInt(v[1:])
+ if !ok {
+ p.err = "bad major version"
+ return
+ }
+ if v == "" {
+ p.minor = "0"
+ p.patch = "0"
+ p.short = ".0.0"
+ return
+ }
+ if v[0] != '.' {
+ p.err = "bad minor prefix"
+ ok = false
+ return
+ }
+ p.minor, v, ok = parseInt(v[1:])
+ if !ok {
+ p.err = "bad minor version"
+ return
+ }
+ if v == "" {
+ p.patch = "0"
+ p.short = ".0"
+ return
+ }
+ if v[0] != '.' {
+ p.err = "bad patch prefix"
+ ok = false
+ return
+ }
+ p.patch, v, ok = parseInt(v[1:])
+ if !ok {
+ p.err = "bad patch version"
+ return
+ }
+ if len(v) > 0 && v[0] == '-' {
+ p.prerelease, v, ok = parsePrerelease(v)
+ if !ok {
+ p.err = "bad prerelease"
+ return
+ }
+ }
+ if len(v) > 0 && v[0] == '+' {
+ p.build, v, ok = parseBuild(v)
+ if !ok {
+ p.err = "bad build"
+ return
+ }
+ }
+ if v != "" {
+ p.err = "junk on end"
+ ok = false
+ return
+ }
+ ok = true
+ return
+}
+
+func parseInt(v string) (t, rest string, ok bool) {
+ if v == "" {
+ return
+ }
+ if v[0] < '0' || '9' < v[0] {
+ return
+ }
+ i := 1
+ for i < len(v) && '0' <= v[i] && v[i] <= '9' {
+ i++
+ }
+ if v[0] == '0' && i != 1 {
+ return
+ }
+ return v[:i], v[i:], true
+}
+
+func parsePrerelease(v string) (t, rest string, ok bool) {
+ // "A pre-release version MAY be denoted by appending a hyphen and
+ // a series of dot separated identifiers immediately following the patch version.
+ // Identifiers MUST comprise only ASCII alphanumerics and hyphen [0-9A-Za-z-].
+ // Identifiers MUST NOT be empty. Numeric identifiers MUST NOT include leading zeroes."
+ if v == "" || v[0] != '-' {
+ return
+ }
+ i := 1
+ start := 1
+ for i < len(v) && v[i] != '+' {
+ if !isIdentChar(v[i]) && v[i] != '.' {
+ return
+ }
+ if v[i] == '.' {
+ if start == i || isBadNum(v[start:i]) {
+ return
+ }
+ start = i + 1
+ }
+ i++
+ }
+ if start == i || isBadNum(v[start:i]) {
+ return
+ }
+ return v[:i], v[i:], true
+}
+
+func parseBuild(v string) (t, rest string, ok bool) {
+ if v == "" || v[0] != '+' {
+ return
+ }
+ i := 1
+ start := 1
+ for i < len(v) {
+ if !isIdentChar(v[i]) {
+ return
+ }
+ if v[i] == '.' {
+ if start == i {
+ return
+ }
+ start = i + 1
+ }
+ i++
+ }
+ if start == i {
+ return
+ }
+ return v[:i], v[i:], true
+}
+
+func isIdentChar(c byte) bool {
+ return 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || '0' <= c && c <= '9' || c == '-'
+}
+
+func isBadNum(v string) bool {
+ i := 0
+ for i < len(v) && '0' <= v[i] && v[i] <= '9' {
+ i++
+ }
+ return i == len(v) && i > 1 && v[0] == '0'
+}
+
+func isNum(v string) bool {
+ i := 0
+ for i < len(v) && '0' <= v[i] && v[i] <= '9' {
+ i++
+ }
+ return i == len(v)
+}
+
+func compareInt(x, y string) int {
+ if x == y {
+ return 0
+ }
+ if len(x) < len(y) {
+ return -1
+ }
+ if len(x) > len(y) {
+ return +1
+ }
+ if x < y {
+ return -1
+ } else {
+ return +1
+ }
+}
+
+func comparePrerelease(x, y string) int {
+ // "When major, minor, and patch are equal, a pre-release version has
+ // lower precedence than a normal version.
+ // Example: 1.0.0-alpha < 1.0.0.
+ // Precedence for two pre-release versions with the same major, minor,
+ // and patch version MUST be determined by comparing each dot separated
+ // identifier from left to right until a difference is found as follows:
+ // identifiers consisting of only digits are compared numerically and
+ // identifiers with letters or hyphens are compared lexically in ASCII
+ // sort order. Numeric identifiers always have lower precedence than
+ // non-numeric identifiers. A larger set of pre-release fields has a
+ // higher precedence than a smaller set, if all of the preceding
+ // identifiers are equal.
+ // Example: 1.0.0-alpha < 1.0.0-alpha.1 < 1.0.0-alpha.beta <
+ // 1.0.0-beta < 1.0.0-beta.2 < 1.0.0-beta.11 < 1.0.0-rc.1 < 1.0.0."
+ if x == y {
+ return 0
+ }
+ if x == "" {
+ return +1
+ }
+ if y == "" {
+ return -1
+ }
+ for x != "" && y != "" {
+ x = x[1:] // skip - or .
+ y = y[1:] // skip - or .
+ var dx, dy string
+ dx, x = nextIdent(x)
+ dy, y = nextIdent(y)
+ if dx != dy {
+ ix := isNum(dx)
+ iy := isNum(dy)
+ if ix != iy {
+ if ix {
+ return -1
+ } else {
+ return +1
+ }
+ }
+ if ix {
+ if len(dx) < len(dy) {
+ return -1
+ }
+ if len(dx) > len(dy) {
+ return +1
+ }
+ }
+ if dx < dy {
+ return -1
+ } else {
+ return +1
+ }
+ }
+ }
+ if x == "" {
+ return -1
+ } else {
+ return +1
+ }
+}
+
+func nextIdent(x string) (dx, rest string) {
+ i := 0
+ for i < len(x) && x[i] != '.' {
+ i++
+ }
+ return x[:i], x[i:]
+}
diff --git a/vendor/golang.org/x/tools/third_party/moduleloader/LICENSE b/vendor/golang.org/x/tools/third_party/moduleloader/LICENSE
new file mode 100644
index 000000000..1723a2247
--- /dev/null
+++ b/vendor/golang.org/x/tools/third_party/moduleloader/LICENSE
@@ -0,0 +1,22 @@
+Copyright (c) 2013-2016 Guy Bedford, Luke Hoban, Addy Osmani
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
\ No newline at end of file
diff --git a/vendor/golang.org/x/tools/third_party/typescript/LICENSE b/vendor/golang.org/x/tools/third_party/typescript/LICENSE
new file mode 100644
index 000000000..e7259f843
--- /dev/null
+++ b/vendor/golang.org/x/tools/third_party/typescript/LICENSE
@@ -0,0 +1,55 @@
+Apache License
+
+Version 2.0, January 2004
+
+http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
+
+"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
+
+"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
+
+"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
+
+"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
+
+"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
+
+"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
+
+"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
+
+"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
+
+You must give any other recipients of the Work or Derivative Works a copy of this License; and
+
+You must cause any modified files to carry prominent notices stating that You changed the files; and
+
+You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
+
+If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
\ No newline at end of file
diff --git a/vendor/golang.org/x/tools/third_party/webcomponents/LICENSE b/vendor/golang.org/x/tools/third_party/webcomponents/LICENSE
new file mode 100644
index 000000000..e648283b4
--- /dev/null
+++ b/vendor/golang.org/x/tools/third_party/webcomponents/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2015 The Polymer Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\ No newline at end of file
diff --git a/vendor/gopkg.in/h2non/filetype.v1/.editorconfig b/vendor/gopkg.in/h2non/filetype.v1/.editorconfig
new file mode 100644
index 000000000..000dc0a7a
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/.editorconfig
@@ -0,0 +1,12 @@
+root = true
+
+[*]
+indent_style = tabs
+indent_size = 2
+end_of_line = lf
+charset = utf-8
+trim_trailing_whitespace = true
+insert_final_newline = true
+
+[*.md]
+trim_trailing_whitespace = false
diff --git a/vendor/gopkg.in/h2non/filetype.v1/.gitignore b/vendor/gopkg.in/h2non/filetype.v1/.gitignore
new file mode 100644
index 000000000..6fefe6cce
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/.gitignore
@@ -0,0 +1,2 @@
+bin
+.DS_Store
diff --git a/vendor/gopkg.in/h2non/filetype.v1/.travis.yml b/vendor/gopkg.in/h2non/filetype.v1/.travis.yml
new file mode 100644
index 000000000..739e14ebd
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/.travis.yml
@@ -0,0 +1,17 @@
+language: go
+
+go:
+ - 1.9
+ - 1.8
+ - 1.7
+ - 1.6
+ - tip
+
+before_install:
+ - go get -u -v github.com/golang/lint/golint
+
+script:
+ - diff -u <(echo -n) <(gofmt -s -d ./)
+ - diff -u <(echo -n) <(go vet ./...)
+ - diff -u <(echo -n) <(golint)
+ - go test -v -race ./...
diff --git a/vendor/gopkg.in/h2non/filetype.v1/History.md b/vendor/gopkg.in/h2non/filetype.v1/History.md
new file mode 100644
index 000000000..a33f4e740
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/History.md
@@ -0,0 +1,47 @@
+
+v1.0.5 / 2017-12-12
+===================
+
+ * Merge pull request #30 from RangelReale/fix_mp4
+ * Fix duplicated item in mp4 fix
+ * Fix MP4 matcher, with information from http://www.file-recovery.com/mp4-signature-format.htm
+ * Merge pull request #28 from ikovic/master
+ * Updated file header example.
+
+v1.0.4 / 2017-11-29
+===================
+
+ * fix: tests and document types matchers
+ * refactor(docs): remove codesponsor
+ * Merge pull request #26 from bienkma/master
+ * Add support check file type: .doc, .docx, .pptx, .ppt, .xls, .xlsx
+ * feat(docs): add code sponsor banner
+ * feat(travis): add go 1.9
+ * Merge pull request #24 from strazzere/patch-1
+ * Fix typo in unknown
+
+v1.0.3 / 2017-08-03
+===================
+
+ * Merge pull request #21 from elemeta/master
+ * Add Elf file as supported matcher archive type
+
+v1.0.2 / 2017-07-26
+===================
+
+ * Merge pull request #20 from marshyski/master
+ * Added RedHat RPM as supported matcher archive type
+ * Merge pull request #19 from nlamirault/patch-1
+ * Fix typo in documentation
+
+v1.0.1 / 2017-02-24
+===================
+
+ * Merge pull request #18 from Impyy/enable-webm
+ * Enable the webm matcher
+ * feat(docs): add Go version badge
+
+1.0.0 / 2016-12-11
+==================
+
+- Initial stable version (v1.0.0).
diff --git a/vendor/gopkg.in/h2non/filetype.v1/LICENSE b/vendor/gopkg.in/h2non/filetype.v1/LICENSE
new file mode 100644
index 000000000..30ede59b6
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/LICENSE
@@ -0,0 +1,24 @@
+The MIT License
+
+Copyright (c) Tomas Aparicio
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/gopkg.in/h2non/filetype.v1/README.md b/vendor/gopkg.in/h2non/filetype.v1/README.md
new file mode 100644
index 000000000..8d4532444
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/README.md
@@ -0,0 +1,275 @@
+# filetype [](https://travis-ci.org/h2non/filetype) [](https://godoc.org/github.com/h2non/filetype) [](http://goreportcard.com/report/h2non/filetype) [](https://github.com/h2non/gentleman)
+
+Small and dependency free [Go](https://golang.org) package to infer file and MIME type checking the [magic numbers](https://en.wikipedia.org/wiki/Magic_number_(programming)#Magic_numbers_in_files) signature.
+
+For SVG file type checking, see [go-is-svg](https://github.com/h2non/go-is-svg) package.
+
+## Features
+
+- Supports a [wide range](#supported-types) of file types
+- Provides file extension and proper MIME type
+- File discovery by extension or MIME type
+- File discovery by class (image, video, audio...)
+- Provides a bunch of helpers and file matching shortcuts
+- [Pluggable](#add-additional-file-type-matchers): add custom new types and matchers
+- Simple and semantic API
+- [Blazing fast](#benchmarks), even processing large files
+- Only first 261 bytes representing the max file header is required, so you can just [pass a slice](#file-header)
+- Dependency free (just Go code, no C compilation needed)
+- Cross-platform file recognition
+
+## Installation
+
+```bash
+go get gopkg.in/h2non/filetype.v1
+```
+
+## API
+
+See [Godoc](https://godoc.org/github.com/h2non/filetype) reference.
+
+### Subpackages
+
+- [`gopkg.in/h2non/filetype.v1/types`](https://godoc.org/github.com/h2non/filetype/types)
+- [`gopkg.in/h2non/filetype.v1/matchers`](https://godoc.org/github.com/h2non/filetype/matchers)
+
+## Examples
+
+#### Simple file type checking
+
+```go
+package main
+
+import (
+ "fmt"
+ "gopkg.in/h2non/filetype.v1"
+ "io/ioutil"
+)
+
+func main() {
+ buf, _ := ioutil.ReadFile("sample.jpg")
+
+ kind, unknown := filetype.Match(buf)
+ if unknown != nil {
+ fmt.Printf("Unknown: %s", unknown)
+ return
+ }
+
+ fmt.Printf("File type: %s. MIME: %s\n", kind.Extension, kind.MIME.Value)
+}
+```
+
+#### Check type class
+
+```go
+package main
+
+import (
+ "fmt"
+ "gopkg.in/h2non/filetype.v1"
+ "io/ioutil"
+)
+
+func main() {
+ buf, _ := ioutil.ReadFile("sample.jpg")
+
+ if filetype.IsImage(buf) {
+ fmt.Println("File is an image")
+ } else {
+ fmt.Println("Not an image")
+ }
+}
+```
+
+#### Supported type
+
+```go
+package main
+
+import (
+ "fmt"
+ "gopkg.in/h2non/filetype.v1"
+)
+
+func main() {
+ // Check if file is supported by extension
+ if filetype.IsSupported("jpg") {
+ fmt.Println("Extension supported")
+ } else {
+ fmt.Println("Extension not supported")
+ }
+
+ // Check if file is supported by extension
+ if filetype.IsMIMESupported("image/jpeg") {
+ fmt.Println("MIME type supported")
+ } else {
+ fmt.Println("MIME type not supported")
+ }
+}
+```
+
+#### File header
+
+```go
+package main
+
+import (
+ "fmt"
+ "gopkg.in/h2non/filetype.v1"
+ "io/ioutil"
+)
+
+func main() {
+ // Open a file descriptor
+ file, _ := os.Open("movie.mp4")
+
+ // We only have to pass the file header = first 261 bytes
+ head := make([]byte, 261)
+ file.Read(head)
+
+ if filetype.IsImage(head) {
+ fmt.Println("File is an image")
+ } else {
+ fmt.Println("Not an image")
+ }
+}
+```
+
+#### Add additional file type matchers
+
+```go
+package main
+
+import (
+ "fmt"
+ "gopkg.in/h2non/filetype.v1"
+)
+
+var fooType = filetype.NewType("foo", "foo/foo")
+
+func fooMatcher(buf []byte) bool {
+ return len(buf) > 1 && buf[0] == 0x01 && buf[1] == 0x02
+}
+
+func main() {
+ // Register the new matcher and its type
+ filetype.AddMatcher(fooType, fooMatcher)
+
+ // Check if the new type is supported by extension
+ if filetype.IsSupported("foo") {
+ fmt.Println("New supported type: foo")
+ }
+
+ // Check if the new type is supported by MIME
+ if filetype.IsMIMESupported("foo/foo") {
+ fmt.Println("New supported MIME type: foo/foo")
+ }
+
+ // Try to match the file
+ fooFile := []byte{0x01, 0x02}
+ kind, _ := filetype.Match(fooFile)
+ if kind == filetype.Unknown {
+ fmt.Println("Unknown file type")
+ } else {
+ fmt.Printf("File type matched: %s\n", kind.Extension)
+ }
+}
+```
+
+## Supported types
+
+#### Image
+
+- **jpg** - `image/jpeg`
+- **png** - `image/png`
+- **gif** - `image/gif`
+- **webp** - `image/webp`
+- **cr2** - `image/x-canon-cr2`
+- **tif** - `image/tiff`
+- **bmp** - `image/bmp`
+- **jxr** - `image/vnd.ms-photo`
+- **psd** - `image/vnd.adobe.photoshop`
+- **ico** - `image/x-icon`
+
+#### Video
+
+- **mp4** - `video/mp4`
+- **m4v** - `video/x-m4v`
+- **mkv** - `video/x-matroska`
+- **webm** - `video/webm`
+- **mov** - `video/quicktime`
+- **avi** - `video/x-msvideo`
+- **wmv** - `video/x-ms-wmv`
+- **mpg** - `video/mpeg`
+- **flv** - `video/x-flv`
+
+#### Audio
+
+- **mid** - `audio/midi`
+- **mp3** - `audio/mpeg`
+- **m4a** - `audio/m4a`
+- **ogg** - `audio/ogg`
+- **flac** - `audio/x-flac`
+- **wav** - `audio/x-wav`
+- **amr** - `audio/amr`
+
+#### Archive
+
+- **epub** - `application/epub+zip`
+- **zip** - `application/zip`
+- **tar** - `application/x-tar`
+- **rar** - `application/x-rar-compressed`
+- **gz** - `application/gzip`
+- **bz2** - `application/x-bzip2`
+- **7z** - `application/x-7z-compressed`
+- **xz** - `application/x-xz`
+- **pdf** - `application/pdf`
+- **exe** - `application/x-msdownload`
+- **swf** - `application/x-shockwave-flash`
+- **rtf** - `application/rtf`
+- **eot** - `application/octet-stream`
+- **ps** - `application/postscript`
+- **sqlite** - `application/x-sqlite3`
+- **nes** - `application/x-nintendo-nes-rom`
+- **crx** - `application/x-google-chrome-extension`
+- **cab** - `application/vnd.ms-cab-compressed`
+- **deb** - `application/x-deb`
+- **ar** - `application/x-unix-archive`
+- **Z** - `application/x-compress`
+- **lz** - `application/x-lzip`
+- **rpm** - `application/x-rpm`
+- **elf** - `application/x-executable`
+
+#### Documents
+
+- **doc** - `application/msword`
+- **docx** - `application/vnd.openxmlformats-officedocument.wordprocessingml.document`
+- **xls** - `application/vnd.ms-excel`
+- **xlsx** - `application/vnd.openxmlformats-officedocument.spreadsheetml.sheet`
+- **ppt** - `application/vnd.ms-powerpoint`
+- **pptx** - `application/vnd.openxmlformats-officedocument.presentationml.presentation`
+
+#### Font
+
+- **woff** - `application/font-woff`
+- **woff2** - `application/font-woff`
+- **ttf** - `application/font-sfnt`
+- **otf** - `application/font-sfnt`
+
+## Benchmarks
+
+Measured using [real files](https://github.com/h2non/filetype/tree/master/fixtures).
+
+Environment: OSX x64 i7 2.7 Ghz
+
+```bash
+BenchmarkMatchTar-8 1000000 1083 ns/op
+BenchmarkMatchZip-8 1000000 1162 ns/op
+BenchmarkMatchJpeg-8 1000000 1280 ns/op
+BenchmarkMatchGif-8 1000000 1315 ns/op
+BenchmarkMatchPng-8 1000000 1121 ns/op
+```
+
+## License
+
+MIT - Tomas Aparicio
diff --git a/vendor/gopkg.in/h2non/filetype.v1/filetype.go b/vendor/gopkg.in/h2non/filetype.v1/filetype.go
new file mode 100644
index 000000000..3753c03a2
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/filetype.go
@@ -0,0 +1,87 @@
+package filetype
+
+import (
+ "errors"
+
+ "gopkg.in/h2non/filetype.v1/matchers"
+ "gopkg.in/h2non/filetype.v1/types"
+)
+
+// Types stores a map of supported types
+var Types = types.Types
+
+// NewType creates and registers a new type
+var NewType = types.NewType
+
+// Unknown represents an unknown file type
+var Unknown = types.Unknown
+
+// ErrEmptyBuffer represents an empty buffer error
+var ErrEmptyBuffer = errors.New("Empty buffer")
+
+// ErrUnknownBuffer represents a unknown buffer error
+var ErrUnknownBuffer = errors.New("Unknown buffer type")
+
+// AddType registers a new file type
+func AddType(ext, mime string) types.Type {
+ return types.NewType(ext, mime)
+}
+
+// Is checks if a given buffer matches with the given file type extension
+func Is(buf []byte, ext string) bool {
+ kind, ok := types.Types[ext]
+ if ok {
+ return IsType(buf, kind)
+ }
+ return false
+}
+
+// IsExtension semantic alias to Is()
+func IsExtension(buf []byte, ext string) bool {
+ return Is(buf, ext)
+}
+
+// IsType checks if a given buffer matches with the given file type
+func IsType(buf []byte, kind types.Type) bool {
+ matcher := matchers.Matchers[kind]
+ if matcher == nil {
+ return false
+ }
+ return matcher(buf) != types.Unknown
+}
+
+// IsMIME checks if a given buffer matches with the given MIME type
+func IsMIME(buf []byte, mime string) bool {
+ for _, kind := range types.Types {
+ if kind.MIME.Value == mime {
+ matcher := matchers.Matchers[kind]
+ return matcher(buf) != types.Unknown
+ }
+ }
+ return false
+}
+
+// IsSupported checks if a given file extension is supported
+func IsSupported(ext string) bool {
+ for name := range Types {
+ if name == ext {
+ return true
+ }
+ }
+ return false
+}
+
+// IsMIMESupported checks if a given MIME type is supported
+func IsMIMESupported(mime string) bool {
+ for _, m := range Types {
+ if m.MIME.Value == mime {
+ return true
+ }
+ }
+ return false
+}
+
+// GetType retrieves a Type by file extension
+func GetType(ext string) types.Type {
+ return types.Get(ext)
+}
diff --git a/vendor/gopkg.in/h2non/filetype.v1/kind.go b/vendor/gopkg.in/h2non/filetype.v1/kind.go
new file mode 100644
index 000000000..49397258c
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/kind.go
@@ -0,0 +1,69 @@
+package filetype
+
+import (
+ "gopkg.in/h2non/filetype.v1/matchers"
+ "gopkg.in/h2non/filetype.v1/types"
+)
+
+// Image tries to match a file as image type
+func Image(buf []byte) (types.Type, error) {
+ return doMatchMap(buf, matchers.Image)
+}
+
+// IsImage checks if the given buffer is an image type
+func IsImage(buf []byte) bool {
+ kind, _ := Image(buf)
+ return kind != types.Unknown
+}
+
+// Audio tries to match a file as audio type
+func Audio(buf []byte) (types.Type, error) {
+ return doMatchMap(buf, matchers.Audio)
+}
+
+// IsAudio checks if the given buffer is an audio type
+func IsAudio(buf []byte) bool {
+ kind, _ := Audio(buf)
+ return kind != types.Unknown
+}
+
+// Video tries to match a file as video type
+func Video(buf []byte) (types.Type, error) {
+ return doMatchMap(buf, matchers.Video)
+}
+
+// IsVideo checks if the given buffer is a video type
+func IsVideo(buf []byte) bool {
+ kind, _ := Video(buf)
+ return kind != types.Unknown
+}
+
+// Font tries to match a file as text font type
+func Font(buf []byte) (types.Type, error) {
+ return doMatchMap(buf, matchers.Font)
+}
+
+// IsFont checks if the given buffer is a font type
+func IsFont(buf []byte) bool {
+ kind, _ := Font(buf)
+ return kind != types.Unknown
+}
+
+// Archive tries to match a file as generic archive type
+func Archive(buf []byte) (types.Type, error) {
+ return doMatchMap(buf, matchers.Archive)
+}
+
+// IsArchive checks if the given buffer is an archive type
+func IsArchive(buf []byte) bool {
+ kind, _ := Archive(buf)
+ return kind != types.Unknown
+}
+
+func doMatchMap(buf []byte, machers matchers.Map) (types.Type, error) {
+ kind := MatchMap(buf, machers)
+ if kind != types.Unknown {
+ return kind, nil
+ }
+ return kind, ErrUnknownBuffer
+}
diff --git a/vendor/gopkg.in/h2non/filetype.v1/match.go b/vendor/gopkg.in/h2non/filetype.v1/match.go
new file mode 100644
index 000000000..9b6e376f1
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/match.go
@@ -0,0 +1,86 @@
+package filetype
+
+import (
+ "io"
+ "os"
+
+ "gopkg.in/h2non/filetype.v1/matchers"
+ "gopkg.in/h2non/filetype.v1/types"
+)
+
+// Matchers is an alias to matchers.Matchers
+var Matchers = matchers.Matchers
+
+// NewMatcher is an alias to matchers.NewMatcher
+var NewMatcher = matchers.NewMatcher
+
+// Match infers the file type of a given buffer inspecting its magic numbers signature
+func Match(buf []byte) (types.Type, error) {
+ length := len(buf)
+ if length == 0 {
+ return types.Unknown, ErrEmptyBuffer
+ }
+
+ for _, checker := range Matchers {
+ match := checker(buf)
+ if match != types.Unknown && match.Extension != "" {
+ return match, nil
+ }
+ }
+
+ return types.Unknown, nil
+}
+
+// Get is an alias to Match()
+func Get(buf []byte) (types.Type, error) {
+ return Match(buf)
+}
+
+// MatchFile infers a file type for a file
+func MatchFile(filepath string) (types.Type, error) {
+ file, err := os.Open(filepath)
+ if err != nil {
+ return types.Unknown, err
+ }
+ defer file.Close()
+
+ return MatchReader(file)
+}
+
+// MatchReader is convenient wrapper to Match() any Reader
+func MatchReader(reader io.Reader) (types.Type, error) {
+ buffer := make([]byte, 512)
+
+ _, err := reader.Read(buffer)
+ if err != nil && err != io.EOF {
+ return types.Unknown, err
+ }
+
+ return Match(buffer)
+}
+
+// AddMatcher registers a new matcher type
+func AddMatcher(fileType types.Type, matcher matchers.Matcher) matchers.TypeMatcher {
+ return matchers.NewMatcher(fileType, matcher)
+}
+
+// Matches checks if the given buffer matches with some supported file type
+func Matches(buf []byte) bool {
+ kind, _ := Match(buf)
+ return kind != types.Unknown
+}
+
+// MatchMap performs a file matching againts a map of match functions
+func MatchMap(buf []byte, matchers matchers.Map) types.Type {
+ for kind, matcher := range matchers {
+ if matcher(buf) {
+ return kind
+ }
+ }
+ return types.Unknown
+}
+
+// MatchesMap is an alias to Matches() but using matching againts a map of match functions
+func MatchesMap(buf []byte, matchers matchers.Map) bool {
+ return MatchMap(buf, matchers) != types.Unknown
+}
diff --git a/vendor/gopkg.in/h2non/filetype.v1/matchers/archive.go b/vendor/gopkg.in/h2non/filetype.v1/matchers/archive.go
new file mode 100644
index 000000000..9c1270ffb
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/matchers/archive.go
@@ -0,0 +1,217 @@
+package matchers
+
+var (
+ TypeEpub = newType("epub", "application/epub+zip")
+ TypeZip = newType("zip", "application/zip")
+ TypeTar = newType("tar", "application/x-tar")
+ TypeRar = newType("rar", "application/x-rar-compressed")
+ TypeGz = newType("gz", "application/gzip")
+ TypeBz2 = newType("bz2", "application/x-bzip2")
+ Type7z = newType("7z", "application/x-7z-compressed")
+ TypeXz = newType("xz", "application/x-xz")
+ TypePdf = newType("pdf", "application/pdf")
+ TypeExe = newType("exe", "application/x-msdownload")
+ TypeSwf = newType("swf", "application/x-shockwave-flash")
+ TypeRtf = newType("rtf", "application/rtf")
+ TypeEot = newType("eot", "application/octet-stream")
+ TypePs = newType("ps", "application/postscript")
+ TypeSqlite = newType("sqlite", "application/x-sqlite3")
+ TypeNes = newType("nes", "application/x-nintendo-nes-rom")
+ TypeCrx = newType("crx", "application/x-google-chrome-extension")
+ TypeCab = newType("cab", "application/vnd.ms-cab-compressed")
+ TypeDeb = newType("deb", "application/x-deb")
+ TypeAr = newType("ar", "application/x-unix-archive")
+ TypeZ = newType("Z", "application/x-compress")
+ TypeLz = newType("lz", "application/x-lzip")
+ TypeRpm = newType("rpm", "application/x-rpm")
+ TypeElf = newType("elf", "application/x-executable")
+)
+
+var Archive = Map{
+ TypeEpub: Epub,
+ TypeZip: Zip,
+ TypeTar: Tar,
+ TypeRar: Rar,
+ TypeGz: Gz,
+ TypeBz2: Bz2,
+ Type7z: SevenZ,
+ TypeXz: Xz,
+ TypePdf: Pdf,
+ TypeExe: Exe,
+ TypeSwf: Swf,
+ TypeRtf: Rtf,
+ TypeEot: Eot,
+ TypePs: Ps,
+ TypeSqlite: Sqlite,
+ TypeNes: Nes,
+ TypeCrx: Crx,
+ TypeCab: Cab,
+ TypeDeb: Deb,
+ TypeAr: Ar,
+ TypeZ: Z,
+ TypeLz: Lz,
+ TypeRpm: Rpm,
+ TypeElf: Elf,
+}
+
+func Epub(buf []byte) bool {
+ return len(buf) > 57 &&
+ buf[0] == 0x50 && buf[1] == 0x4B && buf[2] == 0x3 && buf[3] == 0x4 &&
+ buf[30] == 0x6D && buf[31] == 0x69 && buf[32] == 0x6D && buf[33] == 0x65 &&
+ buf[34] == 0x74 && buf[35] == 0x79 && buf[36] == 0x70 && buf[37] == 0x65 &&
+ buf[38] == 0x61 && buf[39] == 0x70 && buf[40] == 0x70 && buf[41] == 0x6C &&
+ buf[42] == 0x69 && buf[43] == 0x63 && buf[44] == 0x61 && buf[45] == 0x74 &&
+ buf[46] == 0x69 && buf[47] == 0x6F && buf[48] == 0x6E && buf[49] == 0x2F &&
+ buf[50] == 0x65 && buf[51] == 0x70 && buf[52] == 0x75 && buf[53] == 0x62 &&
+ buf[54] == 0x2B && buf[55] == 0x7A && buf[56] == 0x69 && buf[57] == 0x70
+}
+
+func Zip(buf []byte) bool {
+ return len(buf) > 3 &&
+ buf[0] == 0x50 && buf[1] == 0x4B &&
+ (buf[2] == 0x3 || buf[2] == 0x5 || buf[2] == 0x7) &&
+ (buf[3] == 0x4 || buf[3] == 0x6 || buf[3] == 0x8)
+}
+
+func Tar(buf []byte) bool {
+ return len(buf) > 261 &&
+ buf[257] == 0x75 && buf[258] == 0x73 &&
+ buf[259] == 0x74 && buf[260] == 0x61 &&
+ buf[261] == 0x72
+}
+
+func Rar(buf []byte) bool {
+ return len(buf) > 6 &&
+ buf[0] == 0x52 && buf[1] == 0x61 && buf[2] == 0x72 &&
+ buf[3] == 0x21 && buf[4] == 0x1A && buf[5] == 0x7 &&
+ (buf[6] == 0x0 || buf[6] == 0x1)
+}
+
+func Gz(buf []byte) bool {
+ return len(buf) > 2 &&
+ buf[0] == 0x1F && buf[1] == 0x8B && buf[2] == 0x8
+}
+
+func Bz2(buf []byte) bool {
+ return len(buf) > 2 &&
+ buf[0] == 0x42 && buf[1] == 0x5A && buf[2] == 0x68
+}
+
+func SevenZ(buf []byte) bool {
+ return len(buf) > 5 &&
+ buf[0] == 0x37 && buf[1] == 0x7A && buf[2] == 0xBC &&
+ buf[3] == 0xAF && buf[4] == 0x27 && buf[5] == 0x1C
+}
+
+func Pdf(buf []byte) bool {
+ return len(buf) > 3 &&
+ buf[0] == 0x25 && buf[1] == 0x50 &&
+ buf[2] == 0x44 && buf[3] == 0x46
+}
+
+func Exe(buf []byte) bool {
+ return len(buf) > 1 &&
+ buf[0] == 0x4D && buf[1] == 0x5A
+}
+
+func Swf(buf []byte) bool {
+ return len(buf) > 2 &&
+ (buf[0] == 0x43 || buf[0] == 0x46) &&
+ buf[1] == 0x57 && buf[2] == 0x53
+}
+
+func Rtf(buf []byte) bool {
+ return len(buf) > 4 &&
+ buf[0] == 0x7B && buf[1] == 0x5C &&
+ buf[2] == 0x72 && buf[3] == 0x74 &&
+ buf[4] == 0x66
+}
+
+func Nes(buf []byte) bool {
+ return len(buf) > 3 &&
+ buf[0] == 0x4E && buf[1] == 0x45 &&
+ buf[2] == 0x53 && buf[3] == 0x1A
+}
+
+func Crx(buf []byte) bool {
+ return len(buf) > 3 &&
+ buf[0] == 0x43 && buf[1] == 0x72 &&
+ buf[2] == 0x32 && buf[3] == 0x34
+}
+
+func Cab(buf []byte) bool {
+ return len(buf) > 3 &&
+ ((buf[0] == 0x4D && buf[1] == 0x53 && buf[2] == 0x43 && buf[3] == 0x46) ||
+ (buf[0] == 0x49 && buf[1] == 0x53 && buf[2] == 0x63 && buf[3] == 0x28))
+}
+
+func Eot(buf []byte) bool {
+ return len(buf) > 35 &&
+ buf[34] == 0x4C && buf[35] == 0x50 &&
+ ((buf[8] == 0x02 && buf[9] == 0x00 &&
+ buf[10] == 0x01) || (buf[8] == 0x01 &&
+ buf[9] == 0x00 && buf[10] == 0x00) ||
+ (buf[8] == 0x02 && buf[9] == 0x00 &&
+ buf[10] == 0x02))
+}
+
+func Ps(buf []byte) bool {
+ return len(buf) > 1 &&
+ buf[0] == 0x25 && buf[1] == 0x21
+}
+
+func Xz(buf []byte) bool {
+ return len(buf) > 5 &&
+ buf[0] == 0xFD && buf[1] == 0x37 &&
+ buf[2] == 0x7A && buf[3] == 0x58 &&
+ buf[4] == 0x5A && buf[5] == 0x00
+}
+
+func Sqlite(buf []byte) bool {
+ return len(buf) > 3 &&
+ buf[0] == 0x53 && buf[1] == 0x51 &&
+ buf[2] == 0x4C && buf[3] == 0x69
+}
+
+func Deb(buf []byte) bool {
+ return len(buf) > 20 &&
+ buf[0] == 0x21 && buf[1] == 0x3C && buf[2] == 0x61 &&
+ buf[3] == 0x72 && buf[4] == 0x63 && buf[5] == 0x68 &&
+ buf[6] == 0x3E && buf[7] == 0x0A && buf[8] == 0x64 &&
+ buf[9] == 0x65 && buf[10] == 0x62 && buf[11] == 0x69 &&
+ buf[12] == 0x61 && buf[13] == 0x6E && buf[14] == 0x2D &&
+ buf[15] == 0x62 && buf[16] == 0x69 && buf[17] == 0x6E &&
+ buf[18] == 0x61 && buf[19] == 0x72 && buf[20] == 0x79
+}
+
+func Ar(buf []byte) bool {
+ return len(buf) > 6 &&
+ buf[0] == 0x21 && buf[1] == 0x3C &&
+ buf[2] == 0x61 && buf[3] == 0x72 &&
+ buf[4] == 0x63 && buf[5] == 0x68 &&
+ buf[6] == 0x3E
+}
+
+func Z(buf []byte) bool {
+ return len(buf) > 1 &&
+ ((buf[0] == 0x1F && buf[1] == 0xA0) ||
+ (buf[0] == 0x1F && buf[1] == 0x9D))
+}
+
+func Lz(buf []byte) bool {
+ return len(buf) > 3 &&
+ buf[0] == 0x4C && buf[1] == 0x5A &&
+ buf[2] == 0x49 && buf[3] == 0x50
+}
+
+func Rpm(buf []byte) bool {
+ return len(buf) > 96 &&
+ buf[0] == 0xED && buf[1] == 0xAB &&
+ buf[2] == 0xEE && buf[3] == 0xDB
+}
+
+func Elf(buf []byte) bool {
+ return len(buf) > 52 &&
+ buf[0] == 0x7F && buf[1] == 0x45 &&
+ buf[2] == 0x4C && buf[3] == 0x46
+}
diff --git a/vendor/gopkg.in/h2non/filetype.v1/matchers/audio.go b/vendor/gopkg.in/h2non/filetype.v1/matchers/audio.go
new file mode 100644
index 000000000..7b27caf1d
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/matchers/audio.go
@@ -0,0 +1,67 @@
+package matchers
+
+var (
+ TypeMidi = newType("mid", "audio/midi")
+ TypeMp3 = newType("mp3", "audio/mpeg")
+ TypeM4a = newType("m4a", "audio/m4a")
+ TypeOgg = newType("ogg", "audio/ogg")
+ TypeFlac = newType("flac", "audio/x-flac")
+ TypeWav = newType("wav", "audio/x-wav")
+ TypeAmr = newType("amr", "audio/amr")
+)
+
+var Audio = Map{
+ TypeMidi: Midi,
+ TypeMp3: Mp3,
+ TypeM4a: M4a,
+ TypeOgg: Ogg,
+ TypeFlac: Flac,
+ TypeWav: Wav,
+ TypeAmr: Amr,
+}
+
+func Midi(buf []byte) bool {
+ return len(buf) > 3 &&
+ buf[0] == 0x4D && buf[1] == 0x54 &&
+ buf[2] == 0x68 && buf[3] == 0x64
+}
+
+func Mp3(buf []byte) bool {
+ return len(buf) > 2 &&
+ ((buf[0] == 0x49 && buf[1] == 0x44 && buf[2] == 0x33) ||
+ (buf[0] == 0xFF && buf[1] == 0xfb))
+}
+
+func M4a(buf []byte) bool {
+ return len(buf) > 10 &&
+ ((buf[4] == 0x66 && buf[5] == 0x74 && buf[6] == 0x79 &&
+ buf[7] == 0x70 && buf[8] == 0x4D && buf[9] == 0x34 && buf[10] == 0x41) ||
+ (buf[0] == 0x4D && buf[1] == 0x34 && buf[2] == 0x41 && buf[3] == 0x20))
+}
+
+func Ogg(buf []byte) bool {
+ return len(buf) > 3 &&
+ buf[0] == 0x4F && buf[1] == 0x67 &&
+ buf[2] == 0x67 && buf[3] == 0x53
+}
+
+func Flac(buf []byte) bool {
+ return len(buf) > 3 &&
+ buf[0] == 0x66 && buf[1] == 0x4C &&
+ buf[2] == 0x61 && buf[3] == 0x43
+}
+
+func Wav(buf []byte) bool {
+ return len(buf) > 11 &&
+ buf[0] == 0x52 && buf[1] == 0x49 &&
+ buf[2] == 0x46 && buf[3] == 0x46 &&
+ buf[8] == 0x57 && buf[9] == 0x41 &&
+ buf[10] == 0x56 && buf[11] == 0x45
+}
+
+func Amr(buf []byte) bool {
+ return len(buf) > 11 &&
+ buf[0] == 0x23 && buf[1] == 0x21 &&
+ buf[2] == 0x41 && buf[3] == 0x4D &&
+ buf[4] == 0x52 && buf[5] == 0x0A
+}
diff --git a/vendor/gopkg.in/h2non/filetype.v1/matchers/document.go b/vendor/gopkg.in/h2non/filetype.v1/matchers/document.go
new file mode 100644
index 000000000..cc5ded202
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/matchers/document.go
@@ -0,0 +1,66 @@
+package matchers
+
+import "bytes"
+
+var (
+ TypeDoc = newType("doc", "application/msword")
+ TypeDocx = newType("docx", "application/vnd.openxmlformats-officedocument.wordprocessingml.document")
+ TypeXls = newType("xls", "application/vnd.ms-excel")
+ TypeXlsx = newType("xlsx", "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")
+ TypePpt = newType("ppt", "application/vnd.ms-powerpoint")
+ TypePptx = newType("pptx", "application/vnd.openxmlformats-officedocument.presentationml.presentation")
+)
+
+var Document = Map{
+ TypeDoc: Doc,
+ TypeDocx: Docx,
+ TypeXls: Xls,
+ TypeXlsx: Xlsx,
+ TypePpt: Ppt,
+ TypePptx: Pptx,
+}
+
+func Doc(buf []byte) bool {
+ return len(buf) > 7 &&
+ buf[0] == 0xD0 && buf[1] == 0xCF &&
+ buf[2] == 0x11 && buf[3] == 0xE0 &&
+ buf[4] == 0xA1 && buf[5] == 0xB1 &&
+ buf[6] == 0x1A && buf[7] == 0xE1
+}
+
+func Docx(buf []byte) bool {
+ return len(buf) > 3 &&
+ buf[0] == 0x50 && buf[1] == 0x4B &&
+ buf[2] == 0x03 && buf[3] == 0x04 &&
+ bytes.Contains(buf[:256], []byte(TypeDocx.MIME.Value))
+}
+
+func Xls(buf []byte) bool {
+ return len(buf) > 7 &&
+ buf[0] == 0xD0 && buf[1] == 0xCF &&
+ buf[2] == 0x11 && buf[3] == 0xE0 &&
+ buf[4] == 0xA1 && buf[5] == 0xB1 &&
+ buf[6] == 0x1A && buf[7] == 0xE1
+}
+
+func Xlsx(buf []byte) bool {
+ return len(buf) > 3 &&
+ buf[0] == 0x50 && buf[1] == 0x4B &&
+ buf[2] == 0x03 && buf[3] == 0x04 &&
+ bytes.Contains(buf[:256], []byte(TypeXlsx.MIME.Value))
+}
+
+func Ppt(buf []byte) bool {
+ return len(buf) > 7 &&
+ buf[0] == 0xD0 && buf[1] == 0xCF &&
+ buf[2] == 0x11 && buf[3] == 0xE0 &&
+ buf[4] == 0xA1 && buf[5] == 0xB1 &&
+ buf[6] == 0x1A && buf[7] == 0xE1
+}
+
+func Pptx(buf []byte) bool {
+ return len(buf) > 3 &&
+ buf[0] == 0x50 && buf[1] == 0x4B &&
+ buf[2] == 0x07 && buf[3] == 0x08 &&
+ bytes.Contains(buf[:256], []byte(TypePptx.MIME.Value))
+}
diff --git a/vendor/gopkg.in/h2non/filetype.v1/matchers/font.go b/vendor/gopkg.in/h2non/filetype.v1/matchers/font.go
new file mode 100644
index 000000000..f39171675
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/matchers/font.go
@@ -0,0 +1,45 @@
+package matchers
+
+var (
+ TypeWoff = newType("woff", "application/font-woff")
+ TypeWoff2 = newType("woff2", "application/font-woff")
+ TypeTtf = newType("ttf", "application/font-sfnt")
+ TypeOtf = newType("otf", "application/font-sfnt")
+)
+
+var Font = Map{
+ TypeWoff: Woff,
+ TypeWoff2: Woff2,
+ TypeTtf: Ttf,
+ TypeOtf: Otf,
+}
+
+func Woff(buf []byte) bool {
+ return len(buf) > 7 &&
+ buf[0] == 0x77 && buf[1] == 0x4F &&
+ buf[2] == 0x46 && buf[3] == 0x46 &&
+ buf[4] == 0x00 && buf[5] == 0x01 &&
+ buf[6] == 0x00 && buf[7] == 0x00
+}
+
+func Woff2(buf []byte) bool {
+ return len(buf) > 7 &&
+ buf[0] == 0x77 && buf[1] == 0x4F &&
+ buf[2] == 0x46 && buf[3] == 0x32 &&
+ buf[4] == 0x00 && buf[5] == 0x01 &&
+ buf[6] == 0x00 && buf[7] == 0x00
+}
+
+func Ttf(buf []byte) bool {
+ return len(buf) > 4 &&
+ buf[0] == 0x00 && buf[1] == 0x01 &&
+ buf[2] == 0x00 && buf[3] == 0x00 &&
+ buf[4] == 0x00
+}
+
+func Otf(buf []byte) bool {
+ return len(buf) > 4 &&
+ buf[0] == 0x4F && buf[1] == 0x54 &&
+ buf[2] == 0x54 && buf[3] == 0x4F &&
+ buf[4] == 0x00
+}
diff --git a/vendor/gopkg.in/h2non/filetype.v1/matchers/image.go b/vendor/gopkg.in/h2non/filetype.v1/matchers/image.go
new file mode 100644
index 000000000..bc3378d6c
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/matchers/image.go
@@ -0,0 +1,89 @@
+package matchers
+
+var (
+ TypeJpeg = newType("jpg", "image/jpeg")
+ TypePng = newType("png", "image/png")
+ TypeGif = newType("gif", "image/gif")
+ TypeWebp = newType("webp", "image/webp")
+ TypeCR2 = newType("cr2", "image/x-canon-cr2")
+ TypeTiff = newType("tif", "image/tiff")
+ TypeBmp = newType("bmp", "image/bmp")
+ TypeJxr = newType("jxr", "image/vnd.ms-photo")
+ TypePsd = newType("psd", "image/vnd.adobe.photoshop")
+ TypeIco = newType("ico", "image/x-icon")
+)
+
+var Image = Map{
+ TypeJpeg: Jpeg,
+ TypePng: Png,
+ TypeGif: Gif,
+ TypeWebp: Webp,
+ TypeCR2: CR2,
+ TypeTiff: Tiff,
+ TypeBmp: Bmp,
+ TypeJxr: Jxr,
+ TypePsd: Psd,
+ TypeIco: Ico,
+}
+
+func Jpeg(buf []byte) bool {
+ return len(buf) > 2 &&
+ buf[0] == 0xFF &&
+ buf[1] == 0xD8 &&
+ buf[2] == 0xFF
+}
+
+func Png(buf []byte) bool {
+ return len(buf) > 3 &&
+ buf[0] == 0x89 && buf[1] == 0x50 &&
+ buf[2] == 0x4E && buf[3] == 0x47
+}
+
+func Gif(buf []byte) bool {
+ return len(buf) > 2 &&
+ buf[0] == 0x47 && buf[1] == 0x49 && buf[2] == 0x46
+}
+
+func Webp(buf []byte) bool {
+ return len(buf) > 11 &&
+ buf[8] == 0x57 && buf[9] == 0x45 &&
+ buf[10] == 0x42 && buf[11] == 0x50
+}
+
+func CR2(buf []byte) bool {
+ return len(buf) > 9 &&
+ ((buf[0] == 0x49 && buf[1] == 0x49 && buf[2] == 0x2A && buf[3] == 0x0) ||
+ (buf[0] == 0x4D && buf[1] == 0x4D && buf[2] == 0x0 && buf[3] == 0x2A)) &&
+ buf[8] == 0x43 && buf[9] == 0x52
+}
+
+func Tiff(buf []byte) bool {
+ return len(buf) > 3 &&
+ ((buf[0] == 0x49 && buf[1] == 0x49 && buf[2] == 0x2A && buf[3] == 0x0) ||
+ (buf[0] == 0x4D && buf[1] == 0x4D && buf[2] == 0x0 && buf[3] == 0x2A))
+}
+
+func Bmp(buf []byte) bool {
+ return len(buf) > 1 &&
+ buf[0] == 0x42 &&
+ buf[1] == 0x4D
+}
+
+func Jxr(buf []byte) bool {
+ return len(buf) > 2 &&
+ buf[0] == 0x49 &&
+ buf[1] == 0x49 &&
+ buf[2] == 0xBC
+}
+
+func Psd(buf []byte) bool {
+ return len(buf) > 3 &&
+ buf[0] == 0x38 && buf[1] == 0x42 &&
+ buf[2] == 0x50 && buf[3] == 0x53
+}
+
+func Ico(buf []byte) bool {
+ return len(buf) > 3 &&
+ buf[0] == 0x00 && buf[1] == 0x00 &&
+ buf[2] == 0x01 && buf[3] == 0x00
+}
diff --git a/vendor/gopkg.in/h2non/filetype.v1/matchers/matchers.go b/vendor/gopkg.in/h2non/filetype.v1/matchers/matchers.go
new file mode 100644
index 000000000..4525c02a4
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/matchers/matchers.go
@@ -0,0 +1,44 @@
+package matchers
+
+import "gopkg.in/h2non/filetype.v1/types"
+
+// Internal shortcut to NewType
+var newType = types.NewType
+
+// Matcher function interface as type alias
+type Matcher func([]byte) bool
+
+// Type interface to store pairs of type with its matcher function
+type Map map[types.Type]Matcher
+
+// Type specific matcher function interface
+type TypeMatcher func([]byte) types.Type
+
+// Store registered file type matchers
+var Matchers = make(map[types.Type]TypeMatcher)
+
+// Create and register a new type matcher function
+func NewMatcher(kind types.Type, fn Matcher) TypeMatcher {
+ matcher := func(buf []byte) types.Type {
+ if fn(buf) {
+ return kind
+ }
+ return types.Unknown
+ }
+
+ Matchers[kind] = matcher
+ return matcher
+}
+
+func register(matchers ...Map) {
+ for _, m := range matchers {
+ for kind, matcher := range m {
+ NewMatcher(kind, matcher)
+ }
+ }
+}
+
+func init() {
+ // Arguments order is intentional
+ register(Image, Video, Audio, Font, Document, Archive)
+}
diff --git a/vendor/gopkg.in/h2non/filetype.v1/matchers/video.go b/vendor/gopkg.in/h2non/filetype.v1/matchers/video.go
new file mode 100644
index 000000000..9b8350b38
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/matchers/video.go
@@ -0,0 +1,120 @@
+package matchers
+
+var (
+ TypeMp4 = newType("mp4", "video/mp4")
+ TypeM4v = newType("m4v", "video/x-m4v")
+ TypeMkv = newType("mkv", "video/x-matroska")
+ TypeWebm = newType("webm", "video/webm")
+ TypeMov = newType("mov", "video/quicktime")
+ TypeAvi = newType("avi", "video/x-msvideo")
+ TypeWmv = newType("wmv", "video/x-ms-wmv")
+ TypeMpeg = newType("mpg", "video/mpeg")
+ TypeFlv = newType("flv", "video/x-flv")
+)
+
+var Video = Map{
+ TypeMp4: Mp4,
+ TypeM4v: M4v,
+ TypeMkv: Mkv,
+ TypeWebm: Webm,
+ TypeMov: Mov,
+ TypeAvi: Avi,
+ TypeWmv: Wmv,
+ TypeMpeg: Mpeg,
+ TypeFlv: Flv,
+}
+
+func M4v(buf []byte) bool {
+ return len(buf) > 10 &&
+ buf[0] == 0x0 && buf[1] == 0x0 &&
+ buf[2] == 0x0 && buf[3] == 0x1C &&
+ buf[4] == 0x66 && buf[5] == 0x74 &&
+ buf[6] == 0x79 && buf[7] == 0x70 &&
+ buf[8] == 0x4D && buf[9] == 0x34 &&
+ buf[10] == 0x56
+}
+
+func Mkv(buf []byte) bool {
+ return (len(buf) > 15 &&
+ buf[0] == 0x1A && buf[1] == 0x45 &&
+ buf[2] == 0xDF && buf[3] == 0xA3 &&
+ buf[4] == 0x93 && buf[5] == 0x42 &&
+ buf[6] == 0x82 && buf[7] == 0x88 &&
+ buf[8] == 0x6D && buf[9] == 0x61 &&
+ buf[10] == 0x74 && buf[11] == 0x72 &&
+ buf[12] == 0x6F && buf[13] == 0x73 &&
+ buf[14] == 0x6B && buf[15] == 0x61) ||
+ (len(buf) > 38 &&
+ buf[31] == 0x6D && buf[32] == 0x61 &&
+ buf[33] == 0x74 && buf[34] == 0x72 &&
+ buf[35] == 0x6f && buf[36] == 0x73 &&
+ buf[37] == 0x6B && buf[38] == 0x61)
+}
+
+func Webm(buf []byte) bool {
+ return len(buf) > 3 &&
+ buf[0] == 0x1A && buf[1] == 0x45 &&
+ buf[2] == 0xDF && buf[3] == 0xA3
+}
+
+func Mov(buf []byte) bool {
+ return len(buf) > 7 &&
+ buf[0] == 0x0 && buf[1] == 0x0 &&
+ buf[2] == 0x0 && buf[3] == 0x14 &&
+ buf[4] == 0x66 && buf[5] == 0x74 &&
+ buf[6] == 0x79 && buf[7] == 0x70
+}
+
+func Avi(buf []byte) bool {
+ return len(buf) > 10 &&
+ buf[0] == 0x52 && buf[1] == 0x49 &&
+ buf[2] == 0x46 && buf[3] == 0x46 &&
+ buf[8] == 0x41 && buf[9] == 0x56 &&
+ buf[10] == 0x49
+}
+
+func Wmv(buf []byte) bool {
+ return len(buf) > 9 &&
+ buf[0] == 0x30 && buf[1] == 0x26 &&
+ buf[2] == 0xB2 && buf[3] == 0x75 &&
+ buf[4] == 0x8E && buf[5] == 0x66 &&
+ buf[6] == 0xCF && buf[7] == 0x11 &&
+ buf[8] == 0xA6 && buf[9] == 0xD9
+}
+
+func Mpeg(buf []byte) bool {
+ return len(buf) > 3 &&
+ buf[0] == 0x0 && buf[1] == 0x0 &&
+ buf[2] == 0x1 && buf[3] >= 0xb0 &&
+ buf[3] <= 0xbf
+}
+
+func Flv(buf []byte) bool {
+ return len(buf) > 3 &&
+ buf[0] == 0x46 && buf[1] == 0x4C &&
+ buf[2] == 0x56 && buf[3] == 0x01
+}
+
+func Mp4(buf []byte) bool {
+ return len(buf) > 11 &&
+ (buf[4] == 'f' && buf[5] == 't' && buf[6] == 'y' && buf[7] == 'p') &&
+ ((buf[8] == 'a' && buf[9] == 'v' && buf[10] == 'c' && buf[11] == '1') ||
+ (buf[8] == 'i' && buf[9] == 's' && buf[10] == 'o' && buf[11] == '2') ||
+ (buf[8] == 'i' && buf[9] == 's' && buf[10] == 'o' && buf[11] == 'm') ||
+ (buf[8] == 'm' && buf[9] == 'm' && buf[10] == 'p' && buf[11] == '4') ||
+ (buf[8] == 'm' && buf[9] == 'p' && buf[10] == '4' && buf[11] == '1') ||
+ (buf[8] == 'm' && buf[9] == 'p' && buf[10] == '4' && buf[11] == '2') ||
+ (buf[8] == 'm' && buf[9] == 'p' && buf[10] == '7' && buf[11] == '1') ||
+ (buf[8] == 'm' && buf[9] == 's' && buf[10] == 'n' && buf[11] == 'v') ||
+ (buf[8] == 'n' && buf[9] == 'd' && buf[10] == 'a' && buf[11] == 's') ||
+ (buf[8] == 'n' && buf[9] == 'd' && buf[10] == 's' && buf[11] == 'c') ||
+ (buf[8] == 'n' && buf[9] == 'd' && buf[10] == 's' && buf[11] == 'h') ||
+ (buf[8] == 'n' && buf[9] == 'd' && buf[10] == 's' && buf[11] == 'm') ||
+ (buf[8] == 'n' && buf[9] == 'd' && buf[10] == 's' && buf[11] == 'p') ||
+ (buf[8] == 'n' && buf[9] == 'd' && buf[10] == 's' && buf[11] == 's') ||
+ (buf[8] == 'n' && buf[9] == 'd' && buf[10] == 'x' && buf[11] == 'c') ||
+ (buf[8] == 'n' && buf[9] == 'd' && buf[10] == 'x' && buf[11] == 'h') ||
+ (buf[8] == 'n' && buf[9] == 'd' && buf[10] == 'x' && buf[11] == 'm') ||
+ (buf[8] == 'n' && buf[9] == 'd' && buf[10] == 'x' && buf[11] == 'p') ||
+ (buf[8] == 'n' && buf[9] == 'd' && buf[10] == 'x' && buf[11] == 's'))
+}
diff --git a/vendor/gopkg.in/h2non/filetype.v1/types/defaults.go b/vendor/gopkg.in/h2non/filetype.v1/types/defaults.go
new file mode 100644
index 000000000..bb1ea62ee
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/types/defaults.go
@@ -0,0 +1,4 @@
+package types
+
+// Unkown default type
+var Unknown = NewType("unknown", "")
diff --git a/vendor/gopkg.in/h2non/filetype.v1/types/mime.go b/vendor/gopkg.in/h2non/filetype.v1/types/mime.go
new file mode 100644
index 000000000..fe8ea822e
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/types/mime.go
@@ -0,0 +1,14 @@
+package types
+
+// MIME stores the file MIME type values
+type MIME struct {
+ Type string
+ Subtype string
+ Value string
+}
+
+// Creates a new MIME type
+func NewMIME(mime string) MIME {
+ kind, subtype := splitMime(mime)
+ return MIME{Type: kind, Subtype: subtype, Value: mime}
+}
diff --git a/vendor/gopkg.in/h2non/filetype.v1/types/split.go b/vendor/gopkg.in/h2non/filetype.v1/types/split.go
new file mode 100644
index 000000000..68a5a8b3b
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/types/split.go
@@ -0,0 +1,11 @@
+package types
+
+import "strings"
+
+func splitMime(s string) (string, string) {
+ x := strings.Split(s, "/")
+ if len(x) > 1 {
+ return x[0], x[1]
+ }
+ return x[0], ""
+}
diff --git a/vendor/gopkg.in/h2non/filetype.v1/types/type.go b/vendor/gopkg.in/h2non/filetype.v1/types/type.go
new file mode 100644
index 000000000..5cf7dfc4b
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/types/type.go
@@ -0,0 +1,16 @@
+package types
+
+// Type represents a file MIME type and its extension
+type Type struct {
+ MIME MIME
+ Extension string
+}
+
+// NewType creates a new Type
+func NewType(ext, mime string) Type {
+ t := Type{
+ MIME: NewMIME(mime),
+ Extension: ext,
+ }
+ return Add(t)
+}
diff --git a/vendor/gopkg.in/h2non/filetype.v1/types/types.go b/vendor/gopkg.in/h2non/filetype.v1/types/types.go
new file mode 100644
index 000000000..27d433eec
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/types/types.go
@@ -0,0 +1,18 @@
+package types
+
+var Types = make(map[string]Type)
+
+// Add registers a new type in the package
+func Add(t Type) Type {
+ Types[t.Extension] = t
+ return t
+}
+
+// Get retrieves a Type by extension
+func Get(ext string) Type {
+ kind := Types[ext]
+ if kind.Extension != "" {
+ return kind
+ }
+ return Unknown
+}
diff --git a/vendor/gopkg.in/h2non/filetype.v1/version.go b/vendor/gopkg.in/h2non/filetype.v1/version.go
new file mode 100644
index 000000000..4ef1e52dd
--- /dev/null
+++ b/vendor/gopkg.in/h2non/filetype.v1/version.go
@@ -0,0 +1,4 @@
+package filetype
+
+// Version exposes the current package version.
+const Version = "1.0.5"
diff --git a/vendor/gopkg.in/yaml.v2/.travis.yml b/vendor/gopkg.in/yaml.v2/.travis.yml
index 004172a2e..9f556934d 100644
--- a/vendor/gopkg.in/yaml.v2/.travis.yml
+++ b/vendor/gopkg.in/yaml.v2/.travis.yml
@@ -4,6 +4,9 @@ go:
- 1.4
- 1.5
- 1.6
+ - 1.7
+ - 1.8
+ - 1.9
- tip
go_import_path: gopkg.in/yaml.v2
diff --git a/vendor/gopkg.in/yaml.v2/LICENSE b/vendor/gopkg.in/yaml.v2/LICENSE
index 866d74a7a..8dada3eda 100644
--- a/vendor/gopkg.in/yaml.v2/LICENSE
+++ b/vendor/gopkg.in/yaml.v2/LICENSE
@@ -1,13 +1,201 @@
-Copyright 2011-2016 Canonical Ltd.
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
- http://www.apache.org/licenses/LICENSE-2.0
+ 1. Definitions.
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/gopkg.in/yaml.v2/NOTICE b/vendor/gopkg.in/yaml.v2/NOTICE
new file mode 100644
index 000000000..866d74a7a
--- /dev/null
+++ b/vendor/gopkg.in/yaml.v2/NOTICE
@@ -0,0 +1,13 @@
+Copyright 2011-2016 Canonical Ltd.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/vendor/gopkg.in/yaml.v2/README.md b/vendor/gopkg.in/yaml.v2/README.md
index 1884de6a7..b50c6e877 100644
--- a/vendor/gopkg.in/yaml.v2/README.md
+++ b/vendor/gopkg.in/yaml.v2/README.md
@@ -65,6 +65,8 @@ b:
d: [3, 4]
`
+// Note: struct fields must be public in order for unmarshal to
+// correctly populate the data.
type T struct {
A string
B struct {
diff --git a/vendor/gopkg.in/yaml.v2/apic.go b/vendor/gopkg.in/yaml.v2/apic.go
index 95ec014e8..1f7e87e67 100644
--- a/vendor/gopkg.in/yaml.v2/apic.go
+++ b/vendor/gopkg.in/yaml.v2/apic.go
@@ -2,7 +2,6 @@ package yaml
import (
"io"
- "os"
)
func yaml_insert_token(parser *yaml_parser_t, pos int, token *yaml_token_t) {
@@ -48,9 +47,9 @@ func yaml_string_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err
return n, nil
}
-// File read handler.
-func yaml_file_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) {
- return parser.input_file.Read(buffer)
+// Reader read handler.
+func yaml_reader_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) {
+ return parser.input_reader.Read(buffer)
}
// Set a string input.
@@ -64,12 +63,12 @@ func yaml_parser_set_input_string(parser *yaml_parser_t, input []byte) {
}
// Set a file input.
-func yaml_parser_set_input_file(parser *yaml_parser_t, file *os.File) {
+func yaml_parser_set_input_reader(parser *yaml_parser_t, r io.Reader) {
if parser.read_handler != nil {
panic("must set the input source only once")
}
- parser.read_handler = yaml_file_read_handler
- parser.input_file = file
+ parser.read_handler = yaml_reader_read_handler
+ parser.input_reader = r
}
// Set the source encoding.
@@ -81,14 +80,13 @@ func yaml_parser_set_encoding(parser *yaml_parser_t, encoding yaml_encoding_t) {
}
// Create a new emitter object.
-func yaml_emitter_initialize(emitter *yaml_emitter_t) bool {
+func yaml_emitter_initialize(emitter *yaml_emitter_t) {
*emitter = yaml_emitter_t{
buffer: make([]byte, output_buffer_size),
raw_buffer: make([]byte, 0, output_raw_buffer_size),
states: make([]yaml_emitter_state_t, 0, initial_stack_size),
events: make([]yaml_event_t, 0, initial_queue_size),
}
- return true
}
// Destroy an emitter object.
@@ -102,9 +100,10 @@ func yaml_string_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
return nil
}
-// File write handler.
-func yaml_file_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
- _, err := emitter.output_file.Write(buffer)
+// yaml_writer_write_handler uses emitter.output_writer to write the
+// emitted text.
+func yaml_writer_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
+ _, err := emitter.output_writer.Write(buffer)
return err
}
@@ -118,12 +117,12 @@ func yaml_emitter_set_output_string(emitter *yaml_emitter_t, output_buffer *[]by
}
// Set a file output.
-func yaml_emitter_set_output_file(emitter *yaml_emitter_t, file io.Writer) {
+func yaml_emitter_set_output_writer(emitter *yaml_emitter_t, w io.Writer) {
if emitter.write_handler != nil {
panic("must set the output target only once")
}
- emitter.write_handler = yaml_file_write_handler
- emitter.output_file = file
+ emitter.write_handler = yaml_writer_write_handler
+ emitter.output_writer = w
}
// Set the output encoding.
@@ -252,41 +251,41 @@ func yaml_emitter_set_break(emitter *yaml_emitter_t, line_break yaml_break_t) {
//
// Create STREAM-START.
-func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) bool {
+func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) {
*event = yaml_event_t{
typ: yaml_STREAM_START_EVENT,
encoding: encoding,
}
- return true
}
// Create STREAM-END.
-func yaml_stream_end_event_initialize(event *yaml_event_t) bool {
+func yaml_stream_end_event_initialize(event *yaml_event_t) {
*event = yaml_event_t{
typ: yaml_STREAM_END_EVENT,
}
- return true
}
// Create DOCUMENT-START.
-func yaml_document_start_event_initialize(event *yaml_event_t, version_directive *yaml_version_directive_t,
- tag_directives []yaml_tag_directive_t, implicit bool) bool {
+func yaml_document_start_event_initialize(
+ event *yaml_event_t,
+ version_directive *yaml_version_directive_t,
+ tag_directives []yaml_tag_directive_t,
+ implicit bool,
+) {
*event = yaml_event_t{
typ: yaml_DOCUMENT_START_EVENT,
version_directive: version_directive,
tag_directives: tag_directives,
implicit: implicit,
}
- return true
}
// Create DOCUMENT-END.
-func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) bool {
+func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) {
*event = yaml_event_t{
typ: yaml_DOCUMENT_END_EVENT,
implicit: implicit,
}
- return true
}
///*
@@ -348,7 +347,7 @@ func yaml_sequence_end_event_initialize(event *yaml_event_t) bool {
}
// Create MAPPING-START.
-func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_mapping_style_t) bool {
+func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_mapping_style_t) {
*event = yaml_event_t{
typ: yaml_MAPPING_START_EVENT,
anchor: anchor,
@@ -356,15 +355,13 @@ func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte
implicit: implicit,
style: yaml_style_t(style),
}
- return true
}
// Create MAPPING-END.
-func yaml_mapping_end_event_initialize(event *yaml_event_t) bool {
+func yaml_mapping_end_event_initialize(event *yaml_event_t) {
*event = yaml_event_t{
typ: yaml_MAPPING_END_EVENT,
}
- return true
}
// Destroy an event object.
@@ -471,7 +468,7 @@ func yaml_event_delete(event *yaml_event_t) {
// } context
// tag_directive *yaml_tag_directive_t
//
-// context.error = YAML_NO_ERROR // Eliminate a compliler warning.
+// context.error = YAML_NO_ERROR // Eliminate a compiler warning.
//
// assert(document) // Non-NULL document object is expected.
//
diff --git a/vendor/gopkg.in/yaml.v2/decode.go b/vendor/gopkg.in/yaml.v2/decode.go
index b13ab9f07..e4e56e28e 100644
--- a/vendor/gopkg.in/yaml.v2/decode.go
+++ b/vendor/gopkg.in/yaml.v2/decode.go
@@ -4,6 +4,7 @@ import (
"encoding"
"encoding/base64"
"fmt"
+ "io"
"math"
"reflect"
"strconv"
@@ -22,19 +23,22 @@ type node struct {
kind int
line, column int
tag string
- value string
- implicit bool
- children []*node
- anchors map[string]*node
+ // For an alias node, alias holds the resolved alias.
+ alias *node
+ value string
+ implicit bool
+ children []*node
+ anchors map[string]*node
}
// ----------------------------------------------------------------------------
// Parser, produces a node tree out of a libyaml event stream.
type parser struct {
- parser yaml_parser_t
- event yaml_event_t
- doc *node
+ parser yaml_parser_t
+ event yaml_event_t
+ doc *node
+ doneInit bool
}
func newParser(b []byte) *parser {
@@ -42,21 +46,30 @@ func newParser(b []byte) *parser {
if !yaml_parser_initialize(&p.parser) {
panic("failed to initialize YAML emitter")
}
-
if len(b) == 0 {
b = []byte{'\n'}
}
-
yaml_parser_set_input_string(&p.parser, b)
+ return &p
+}
- p.skip()
- if p.event.typ != yaml_STREAM_START_EVENT {
- panic("expected stream start event, got " + strconv.Itoa(int(p.event.typ)))
+func newParserFromReader(r io.Reader) *parser {
+ p := parser{}
+ if !yaml_parser_initialize(&p.parser) {
+ panic("failed to initialize YAML emitter")
}
- p.skip()
+ yaml_parser_set_input_reader(&p.parser, r)
return &p
}
+func (p *parser) init() {
+ if p.doneInit {
+ return
+ }
+ p.expect(yaml_STREAM_START_EVENT)
+ p.doneInit = true
+}
+
func (p *parser) destroy() {
if p.event.typ != yaml_NO_EVENT {
yaml_event_delete(&p.event)
@@ -64,16 +77,35 @@ func (p *parser) destroy() {
yaml_parser_delete(&p.parser)
}
-func (p *parser) skip() {
- if p.event.typ != yaml_NO_EVENT {
- if p.event.typ == yaml_STREAM_END_EVENT {
- failf("attempted to go past the end of stream; corrupted value?")
+// expect consumes an event from the event stream and
+// checks that it's of the expected type.
+func (p *parser) expect(e yaml_event_type_t) {
+ if p.event.typ == yaml_NO_EVENT {
+ if !yaml_parser_parse(&p.parser, &p.event) {
+ p.fail()
}
- yaml_event_delete(&p.event)
+ }
+ if p.event.typ == yaml_STREAM_END_EVENT {
+ failf("attempted to go past the end of stream; corrupted value?")
+ }
+ if p.event.typ != e {
+ p.parser.problem = fmt.Sprintf("expected %s event but got %s", e, p.event.typ)
+ p.fail()
+ }
+ yaml_event_delete(&p.event)
+ p.event.typ = yaml_NO_EVENT
+}
+
+// peek peeks at the next event in the event stream,
+// puts the results into p.event and returns the event type.
+func (p *parser) peek() yaml_event_type_t {
+ if p.event.typ != yaml_NO_EVENT {
+ return p.event.typ
}
if !yaml_parser_parse(&p.parser, &p.event) {
p.fail()
}
+ return p.event.typ
}
func (p *parser) fail() {
@@ -81,6 +113,10 @@ func (p *parser) fail() {
var line int
if p.parser.problem_mark.line != 0 {
line = p.parser.problem_mark.line
+ // Scanner errors don't iterate line before returning error
+ if p.parser.error == yaml_SCANNER_ERROR {
+ line++
+ }
} else if p.parser.context_mark.line != 0 {
line = p.parser.context_mark.line
}
@@ -103,7 +139,8 @@ func (p *parser) anchor(n *node, anchor []byte) {
}
func (p *parser) parse() *node {
- switch p.event.typ {
+ p.init()
+ switch p.peek() {
case yaml_SCALAR_EVENT:
return p.scalar()
case yaml_ALIAS_EVENT:
@@ -118,9 +155,8 @@ func (p *parser) parse() *node {
// Happens when attempting to decode an empty buffer.
return nil
default:
- panic("attempted to parse unknown event: " + strconv.Itoa(int(p.event.typ)))
+ panic("attempted to parse unknown event: " + p.event.typ.String())
}
- panic("unreachable")
}
func (p *parser) node(kind int) *node {
@@ -135,19 +171,20 @@ func (p *parser) document() *node {
n := p.node(documentNode)
n.anchors = make(map[string]*node)
p.doc = n
- p.skip()
+ p.expect(yaml_DOCUMENT_START_EVENT)
n.children = append(n.children, p.parse())
- if p.event.typ != yaml_DOCUMENT_END_EVENT {
- panic("expected end of document event but got " + strconv.Itoa(int(p.event.typ)))
- }
- p.skip()
+ p.expect(yaml_DOCUMENT_END_EVENT)
return n
}
func (p *parser) alias() *node {
n := p.node(aliasNode)
n.value = string(p.event.anchor)
- p.skip()
+ n.alias = p.doc.anchors[n.value]
+ if n.alias == nil {
+ failf("unknown anchor '%s' referenced", n.value)
+ }
+ p.expect(yaml_ALIAS_EVENT)
return n
}
@@ -157,29 +194,29 @@ func (p *parser) scalar() *node {
n.tag = string(p.event.tag)
n.implicit = p.event.implicit
p.anchor(n, p.event.anchor)
- p.skip()
+ p.expect(yaml_SCALAR_EVENT)
return n
}
func (p *parser) sequence() *node {
n := p.node(sequenceNode)
p.anchor(n, p.event.anchor)
- p.skip()
- for p.event.typ != yaml_SEQUENCE_END_EVENT {
+ p.expect(yaml_SEQUENCE_START_EVENT)
+ for p.peek() != yaml_SEQUENCE_END_EVENT {
n.children = append(n.children, p.parse())
}
- p.skip()
+ p.expect(yaml_SEQUENCE_END_EVENT)
return n
}
func (p *parser) mapping() *node {
n := p.node(mappingNode)
p.anchor(n, p.event.anchor)
- p.skip()
- for p.event.typ != yaml_MAPPING_END_EVENT {
+ p.expect(yaml_MAPPING_START_EVENT)
+ for p.peek() != yaml_MAPPING_END_EVENT {
n.children = append(n.children, p.parse(), p.parse())
}
- p.skip()
+ p.expect(yaml_MAPPING_END_EVENT)
return n
}
@@ -188,9 +225,10 @@ func (p *parser) mapping() *node {
type decoder struct {
doc *node
- aliases map[string]bool
+ aliases map[*node]bool
mapType reflect.Type
terrors []string
+ strict bool
}
var (
@@ -198,11 +236,13 @@ var (
durationType = reflect.TypeOf(time.Duration(0))
defaultMapType = reflect.TypeOf(map[interface{}]interface{}{})
ifaceType = defaultMapType.Elem()
+ timeType = reflect.TypeOf(time.Time{})
+ ptrTimeType = reflect.TypeOf(&time.Time{})
)
-func newDecoder() *decoder {
- d := &decoder{mapType: defaultMapType}
- d.aliases = make(map[string]bool)
+func newDecoder(strict bool) *decoder {
+ d := &decoder{mapType: defaultMapType, strict: strict}
+ d.aliases = make(map[*node]bool)
return d
}
@@ -251,7 +291,7 @@ func (d *decoder) callUnmarshaler(n *node, u Unmarshaler) (good bool) {
//
// If n holds a null value, prepare returns before doing anything.
func (d *decoder) prepare(n *node, out reflect.Value) (newout reflect.Value, unmarshaled, good bool) {
- if n.tag == yaml_NULL_TAG || n.kind == scalarNode && n.tag == "" && (n.value == "null" || n.value == "" && n.implicit) {
+ if n.tag == yaml_NULL_TAG || n.kind == scalarNode && n.tag == "" && (n.value == "null" || n.value == "~" || n.value == "" && n.implicit) {
return out, false, false
}
again := true
@@ -308,16 +348,13 @@ func (d *decoder) document(n *node, out reflect.Value) (good bool) {
}
func (d *decoder) alias(n *node, out reflect.Value) (good bool) {
- an, ok := d.doc.anchors[n.value]
- if !ok {
- failf("unknown anchor '%s' referenced", n.value)
- }
- if d.aliases[n.value] {
+ if d.aliases[n] {
+ // TODO this could actually be allowed in some circumstances.
failf("anchor '%s' value contains itself", n.value)
}
- d.aliases[n.value] = true
- good = d.unmarshal(an, out)
- delete(d.aliases, n.value)
+ d.aliases[n] = true
+ good = d.unmarshal(n.alias, out)
+ delete(d.aliases, n)
return good
}
@@ -329,7 +366,7 @@ func resetMap(out reflect.Value) {
}
}
-func (d *decoder) scalar(n *node, out reflect.Value) (good bool) {
+func (d *decoder) scalar(n *node, out reflect.Value) bool {
var tag string
var resolved interface{}
if n.tag == "" && !n.implicit {
@@ -353,9 +390,26 @@ func (d *decoder) scalar(n *node, out reflect.Value) (good bool) {
}
return true
}
- if s, ok := resolved.(string); ok && out.CanAddr() {
- if u, ok := out.Addr().Interface().(encoding.TextUnmarshaler); ok {
- err := u.UnmarshalText([]byte(s))
+ if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() {
+ // We've resolved to exactly the type we want, so use that.
+ out.Set(resolvedv)
+ return true
+ }
+ // Perhaps we can use the value as a TextUnmarshaler to
+ // set its value.
+ if out.CanAddr() {
+ u, ok := out.Addr().Interface().(encoding.TextUnmarshaler)
+ if ok {
+ var text []byte
+ if tag == yaml_BINARY_TAG {
+ text = []byte(resolved.(string))
+ } else {
+ // We let any value be unmarshaled into TextUnmarshaler.
+ // That might be more lax than we'd like, but the
+ // TextUnmarshaler itself should bowl out any dubious values.
+ text = []byte(n.value)
+ }
+ err := u.UnmarshalText(text)
if err != nil {
fail(err)
}
@@ -366,46 +420,54 @@ func (d *decoder) scalar(n *node, out reflect.Value) (good bool) {
case reflect.String:
if tag == yaml_BINARY_TAG {
out.SetString(resolved.(string))
- good = true
- } else if resolved != nil {
+ return true
+ }
+ if resolved != nil {
out.SetString(n.value)
- good = true
+ return true
}
case reflect.Interface:
if resolved == nil {
out.Set(reflect.Zero(out.Type()))
+ } else if tag == yaml_TIMESTAMP_TAG {
+ // It looks like a timestamp but for backward compatibility
+ // reasons we set it as a string, so that code that unmarshals
+ // timestamp-like values into interface{} will continue to
+ // see a string and not a time.Time.
+ // TODO(v3) Drop this.
+ out.Set(reflect.ValueOf(n.value))
} else {
out.Set(reflect.ValueOf(resolved))
}
- good = true
+ return true
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
switch resolved := resolved.(type) {
case int:
if !out.OverflowInt(int64(resolved)) {
out.SetInt(int64(resolved))
- good = true
+ return true
}
case int64:
if !out.OverflowInt(resolved) {
out.SetInt(resolved)
- good = true
+ return true
}
case uint64:
if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
out.SetInt(int64(resolved))
- good = true
+ return true
}
case float64:
if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
out.SetInt(int64(resolved))
- good = true
+ return true
}
case string:
if out.Type() == durationType {
d, err := time.ParseDuration(resolved)
if err == nil {
out.SetInt(int64(d))
- good = true
+ return true
}
}
}
@@ -414,44 +476,49 @@ func (d *decoder) scalar(n *node, out reflect.Value) (good bool) {
case int:
if resolved >= 0 && !out.OverflowUint(uint64(resolved)) {
out.SetUint(uint64(resolved))
- good = true
+ return true
}
case int64:
if resolved >= 0 && !out.OverflowUint(uint64(resolved)) {
out.SetUint(uint64(resolved))
- good = true
+ return true
}
case uint64:
if !out.OverflowUint(uint64(resolved)) {
out.SetUint(uint64(resolved))
- good = true
+ return true
}
case float64:
if resolved <= math.MaxUint64 && !out.OverflowUint(uint64(resolved)) {
out.SetUint(uint64(resolved))
- good = true
+ return true
}
}
case reflect.Bool:
switch resolved := resolved.(type) {
case bool:
out.SetBool(resolved)
- good = true
+ return true
}
case reflect.Float32, reflect.Float64:
switch resolved := resolved.(type) {
case int:
out.SetFloat(float64(resolved))
- good = true
+ return true
case int64:
out.SetFloat(float64(resolved))
- good = true
+ return true
case uint64:
out.SetFloat(float64(resolved))
- good = true
+ return true
case float64:
out.SetFloat(resolved)
- good = true
+ return true
+ }
+ case reflect.Struct:
+ if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() {
+ out.Set(resolvedv)
+ return true
}
case reflect.Ptr:
if out.Type().Elem() == reflect.TypeOf(resolved) {
@@ -459,13 +526,11 @@ func (d *decoder) scalar(n *node, out reflect.Value) (good bool) {
elem := reflect.New(out.Type().Elem())
elem.Elem().Set(reflect.ValueOf(resolved))
out.Set(elem)
- good = true
+ return true
}
}
- if !good {
- d.terror(n, tag, out)
- }
- return good
+ d.terror(n, tag, out)
+ return false
}
func settableValueOf(i interface{}) reflect.Value {
@@ -482,6 +547,10 @@ func (d *decoder) sequence(n *node, out reflect.Value) (good bool) {
switch out.Kind() {
case reflect.Slice:
out.Set(reflect.MakeSlice(out.Type(), l, l))
+ case reflect.Array:
+ if l != out.Len() {
+ failf("invalid array: want %d elements but got %d", out.Len(), l)
+ }
case reflect.Interface:
// No type hints. Will have to use a generic sequence.
iface = out
@@ -500,7 +569,9 @@ func (d *decoder) sequence(n *node, out reflect.Value) (good bool) {
j++
}
}
- out.Set(out.Slice(0, j))
+ if out.Kind() != reflect.Array {
+ out.Set(out.Slice(0, j))
+ }
if iface.IsValid() {
iface.Set(out)
}
@@ -561,7 +632,7 @@ func (d *decoder) mapping(n *node, out reflect.Value) (good bool) {
}
e := reflect.New(et).Elem()
if d.unmarshal(n.children[i+1], e) {
- out.SetMapIndex(k, e)
+ d.setMapIndex(n.children[i+1], out, k, e)
}
}
}
@@ -569,6 +640,14 @@ func (d *decoder) mapping(n *node, out reflect.Value) (good bool) {
return true
}
+func (d *decoder) setMapIndex(n *node, out, k, v reflect.Value) {
+ if d.strict && out.MapIndex(k) != zeroValue {
+ d.terrors = append(d.terrors, fmt.Sprintf("line %d: key %#v already set in map", n.line+1, k.Interface()))
+ return
+ }
+ out.SetMapIndex(k, v)
+}
+
func (d *decoder) mappingSlice(n *node, out reflect.Value) (good bool) {
outt := out.Type()
if outt.Elem() != mapItemType {
@@ -616,6 +695,10 @@ func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) {
elemType = inlineMap.Type().Elem()
}
+ var doneFields []bool
+ if d.strict {
+ doneFields = make([]bool, len(sinfo.FieldsList))
+ }
for i := 0; i < l; i += 2 {
ni := n.children[i]
if isMerge(ni) {
@@ -626,6 +709,13 @@ func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) {
continue
}
if info, ok := sinfo.FieldsMap[name.String()]; ok {
+ if d.strict {
+ if doneFields[info.Id] {
+ d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s already set in type %s", ni.line+1, name.String(), out.Type()))
+ continue
+ }
+ doneFields[info.Id] = true
+ }
var field reflect.Value
if info.Inline == nil {
field = out.Field(info.Num)
@@ -639,7 +729,9 @@ func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) {
}
value := reflect.New(elemType).Elem()
d.unmarshal(n.children[i+1], value)
- inlineMap.SetMapIndex(name, value)
+ d.setMapIndex(n.children[i+1], inlineMap, name, value)
+ } else if d.strict {
+ d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s not found in type %s", ni.line+1, name.String(), out.Type()))
}
}
return true
diff --git a/vendor/gopkg.in/yaml.v2/emitterc.go b/vendor/gopkg.in/yaml.v2/emitterc.go
index 2befd553e..a1c2cc526 100644
--- a/vendor/gopkg.in/yaml.v2/emitterc.go
+++ b/vendor/gopkg.in/yaml.v2/emitterc.go
@@ -2,6 +2,7 @@ package yaml
import (
"bytes"
+ "fmt"
)
// Flush the buffer if needed.
@@ -664,9 +665,8 @@ func yaml_emitter_emit_node(emitter *yaml_emitter_t, event *yaml_event_t,
return yaml_emitter_emit_mapping_start(emitter, event)
default:
return yaml_emitter_set_emitter_error(emitter,
- "expected SCALAR, SEQUENCE-START, MAPPING-START, or ALIAS")
+ fmt.Sprintf("expected SCALAR, SEQUENCE-START, MAPPING-START, or ALIAS, but got %v", event.typ))
}
- return false
}
// Expect ALIAS.
@@ -843,7 +843,7 @@ func yaml_emitter_select_scalar_style(emitter *yaml_emitter_t, event *yaml_event
return true
}
-// Write an achor.
+// Write an anchor.
func yaml_emitter_process_anchor(emitter *yaml_emitter_t) bool {
if emitter.anchor_data.anchor == nil {
return true
@@ -995,10 +995,10 @@ func yaml_emitter_analyze_scalar(emitter *yaml_emitter_t, value []byte) bool {
break_space = false
space_break = false
- preceeded_by_whitespace = false
- followed_by_whitespace = false
- previous_space = false
- previous_break = false
+ preceded_by_whitespace = false
+ followed_by_whitespace = false
+ previous_space = false
+ previous_break = false
)
emitter.scalar_data.value = value
@@ -1017,7 +1017,7 @@ func yaml_emitter_analyze_scalar(emitter *yaml_emitter_t, value []byte) bool {
flow_indicators = true
}
- preceeded_by_whitespace = true
+ preceded_by_whitespace = true
for i, w := 0, 0; i < len(value); i += w {
w = width(value[i])
followed_by_whitespace = i+w >= len(value) || is_blank(value, i+w)
@@ -1048,7 +1048,7 @@ func yaml_emitter_analyze_scalar(emitter *yaml_emitter_t, value []byte) bool {
block_indicators = true
}
case '#':
- if preceeded_by_whitespace {
+ if preceded_by_whitespace {
flow_indicators = true
block_indicators = true
}
@@ -1089,7 +1089,7 @@ func yaml_emitter_analyze_scalar(emitter *yaml_emitter_t, value []byte) bool {
}
// [Go]: Why 'z'? Couldn't be the end of the string as that's the loop condition.
- preceeded_by_whitespace = is_blankz(value, i)
+ preceded_by_whitespace = is_blankz(value, i)
}
emitter.scalar_data.multiline = line_breaks
diff --git a/vendor/gopkg.in/yaml.v2/encode.go b/vendor/gopkg.in/yaml.v2/encode.go
index 84f849955..0ee738e11 100644
--- a/vendor/gopkg.in/yaml.v2/encode.go
+++ b/vendor/gopkg.in/yaml.v2/encode.go
@@ -3,38 +3,67 @@ package yaml
import (
"encoding"
"fmt"
+ "io"
"reflect"
"regexp"
"sort"
"strconv"
"strings"
"time"
+ "unicode/utf8"
)
+// jsonNumber is the interface of the encoding/json.Number datatype.
+// Repeating the interface here avoids a dependency on encoding/json, and also
+// supports other libraries like jsoniter, which use a similar datatype with
+// the same interface. Detecting this interface is useful when dealing with
+// structures containing json.Number, which is a string under the hood. The
+// encoder should prefer the use of Int64(), Float64() and string(), in that
+// order, when encoding this type.
+type jsonNumber interface {
+ Float64() (float64, error)
+ Int64() (int64, error)
+ String() string
+}
+
type encoder struct {
emitter yaml_emitter_t
event yaml_event_t
out []byte
flow bool
+ // doneInit holds whether the initial stream_start_event has been
+ // emitted.
+ doneInit bool
}
-func newEncoder() (e *encoder) {
- e = &encoder{}
- e.must(yaml_emitter_initialize(&e.emitter))
+func newEncoder() *encoder {
+ e := &encoder{}
+ yaml_emitter_initialize(&e.emitter)
yaml_emitter_set_output_string(&e.emitter, &e.out)
yaml_emitter_set_unicode(&e.emitter, true)
- e.must(yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING))
- e.emit()
- e.must(yaml_document_start_event_initialize(&e.event, nil, nil, true))
- e.emit()
return e
}
-func (e *encoder) finish() {
- e.must(yaml_document_end_event_initialize(&e.event, true))
+func newEncoderWithWriter(w io.Writer) *encoder {
+ e := &encoder{}
+ yaml_emitter_initialize(&e.emitter)
+ yaml_emitter_set_output_writer(&e.emitter, w)
+ yaml_emitter_set_unicode(&e.emitter, true)
+ return e
+}
+
+func (e *encoder) init() {
+ if e.doneInit {
+ return
+ }
+ yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING)
e.emit()
+ e.doneInit = true
+}
+
+func (e *encoder) finish() {
e.emitter.open_ended = false
- e.must(yaml_stream_end_event_initialize(&e.event))
+ yaml_stream_end_event_initialize(&e.event)
e.emit()
}
@@ -44,9 +73,7 @@ func (e *encoder) destroy() {
func (e *encoder) emit() {
// This will internally delete the e.event value.
- if !yaml_emitter_emit(&e.emitter, &e.event) && e.event.typ != yaml_DOCUMENT_END_EVENT && e.event.typ != yaml_STREAM_END_EVENT {
- e.must(false)
- }
+ e.must(yaml_emitter_emit(&e.emitter, &e.event))
}
func (e *encoder) must(ok bool) {
@@ -59,13 +86,43 @@ func (e *encoder) must(ok bool) {
}
}
+func (e *encoder) marshalDoc(tag string, in reflect.Value) {
+ e.init()
+ yaml_document_start_event_initialize(&e.event, nil, nil, true)
+ e.emit()
+ e.marshal(tag, in)
+ yaml_document_end_event_initialize(&e.event, true)
+ e.emit()
+}
+
func (e *encoder) marshal(tag string, in reflect.Value) {
- if !in.IsValid() {
+ if !in.IsValid() || in.Kind() == reflect.Ptr && in.IsNil() {
e.nilv()
return
}
iface := in.Interface()
- if m, ok := iface.(Marshaler); ok {
+ switch m := iface.(type) {
+ case jsonNumber:
+ integer, err := m.Int64()
+ if err == nil {
+ // In this case the json.Number is a valid int64
+ in = reflect.ValueOf(integer)
+ break
+ }
+ float, err := m.Float64()
+ if err == nil {
+ // In this case the json.Number is a valid float64
+ in = reflect.ValueOf(float)
+ break
+ }
+ // fallback case - no number could be obtained
+ in = reflect.ValueOf(m.String())
+ case time.Time, *time.Time:
+ // Although time.Time implements TextMarshaler,
+ // we don't want to treat it as a string for YAML
+ // purposes because YAML has special support for
+ // timestamps.
+ case Marshaler:
v, err := m.MarshalYAML()
if err != nil {
fail(err)
@@ -75,31 +132,34 @@ func (e *encoder) marshal(tag string, in reflect.Value) {
return
}
in = reflect.ValueOf(v)
- } else if m, ok := iface.(encoding.TextMarshaler); ok {
+ case encoding.TextMarshaler:
text, err := m.MarshalText()
if err != nil {
fail(err)
}
in = reflect.ValueOf(string(text))
+ case nil:
+ e.nilv()
+ return
}
switch in.Kind() {
case reflect.Interface:
- if in.IsNil() {
- e.nilv()
- } else {
- e.marshal(tag, in.Elem())
- }
+ e.marshal(tag, in.Elem())
case reflect.Map:
e.mapv(tag, in)
case reflect.Ptr:
- if in.IsNil() {
- e.nilv()
+ if in.Type() == ptrTimeType {
+ e.timev(tag, in.Elem())
} else {
e.marshal(tag, in.Elem())
}
case reflect.Struct:
- e.structv(tag, in)
- case reflect.Slice:
+ if in.Type() == timeType {
+ e.timev(tag, in)
+ } else {
+ e.structv(tag, in)
+ }
+ case reflect.Slice, reflect.Array:
if in.Type().Elem() == mapItemType {
e.itemsv(tag, in)
} else {
@@ -191,10 +251,10 @@ func (e *encoder) mappingv(tag string, f func()) {
e.flow = false
style = yaml_FLOW_MAPPING_STYLE
}
- e.must(yaml_mapping_start_event_initialize(&e.event, nil, []byte(tag), implicit, style))
+ yaml_mapping_start_event_initialize(&e.event, nil, []byte(tag), implicit, style)
e.emit()
f()
- e.must(yaml_mapping_end_event_initialize(&e.event))
+ yaml_mapping_end_event_initialize(&e.event)
e.emit()
}
@@ -240,23 +300,36 @@ var base60float = regexp.MustCompile(`^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+(?:\.[0
func (e *encoder) stringv(tag string, in reflect.Value) {
var style yaml_scalar_style_t
s := in.String()
- rtag, rs := resolve("", s)
- if rtag == yaml_BINARY_TAG {
- if tag == "" || tag == yaml_STR_TAG {
- tag = rtag
- s = rs.(string)
- } else if tag == yaml_BINARY_TAG {
+ canUsePlain := true
+ switch {
+ case !utf8.ValidString(s):
+ if tag == yaml_BINARY_TAG {
failf("explicitly tagged !!binary data must be base64-encoded")
- } else {
+ }
+ if tag != "" {
failf("cannot marshal invalid UTF-8 data as %s", shortTag(tag))
}
+ // It can't be encoded directly as YAML so use a binary tag
+ // and encode it as base64.
+ tag = yaml_BINARY_TAG
+ s = encodeBase64(s)
+ case tag == "":
+ // Check to see if it would resolve to a specific
+ // tag when encoded unquoted. If it doesn't,
+ // there's no need to quote it.
+ rtag, _ := resolve("", s)
+ canUsePlain = rtag == yaml_STR_TAG && !isBase60Float(s)
}
- if tag == "" && (rtag != yaml_STR_TAG || isBase60Float(s)) {
- style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
- } else if strings.Contains(s, "\n") {
+ // Note: it's possible for user code to emit invalid YAML
+ // if they explicitly specify a tag and a string containing
+ // text that's incompatible with that tag.
+ switch {
+ case strings.Contains(s, "\n"):
style = yaml_LITERAL_SCALAR_STYLE
- } else {
+ case canUsePlain:
style = yaml_PLAIN_SCALAR_STYLE
+ default:
+ style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
}
e.emitScalar(s, "", tag, style)
}
@@ -281,9 +354,20 @@ func (e *encoder) uintv(tag string, in reflect.Value) {
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
}
+func (e *encoder) timev(tag string, in reflect.Value) {
+ t := in.Interface().(time.Time)
+ s := t.Format(time.RFC3339Nano)
+ e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
+}
+
func (e *encoder) floatv(tag string, in reflect.Value) {
- // FIXME: Handle 64 bits here.
- s := strconv.FormatFloat(float64(in.Float()), 'g', -1, 32)
+ // Issue #352: When formatting, use the precision of the underlying value
+ precision := 64
+ if in.Kind() == reflect.Float32 {
+ precision = 32
+ }
+
+ s := strconv.FormatFloat(in.Float(), 'g', -1, precision)
switch s {
case "+Inf":
s = ".inf"
diff --git a/vendor/gopkg.in/yaml.v2/go.mod b/vendor/gopkg.in/yaml.v2/go.mod
new file mode 100644
index 000000000..1934e8769
--- /dev/null
+++ b/vendor/gopkg.in/yaml.v2/go.mod
@@ -0,0 +1,5 @@
+module "gopkg.in/yaml.v2"
+
+require (
+ "gopkg.in/check.v1" v0.0.0-20161208181325-20d25e280405
+)
diff --git a/vendor/gopkg.in/yaml.v2/parserc.go b/vendor/gopkg.in/yaml.v2/parserc.go
index 0a7037ad1..81d05dfe5 100644
--- a/vendor/gopkg.in/yaml.v2/parserc.go
+++ b/vendor/gopkg.in/yaml.v2/parserc.go
@@ -166,7 +166,6 @@ func yaml_parser_state_machine(parser *yaml_parser_t, event *yaml_event_t) bool
default:
panic("invalid parser state")
}
- return false
}
// Parse the production:
diff --git a/vendor/gopkg.in/yaml.v2/readerc.go b/vendor/gopkg.in/yaml.v2/readerc.go
index f45079171..7c1f5fac3 100644
--- a/vendor/gopkg.in/yaml.v2/readerc.go
+++ b/vendor/gopkg.in/yaml.v2/readerc.go
@@ -93,9 +93,18 @@ func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool {
panic("read handler must be set")
}
+ // [Go] This function was changed to guarantee the requested length size at EOF.
+ // The fact we need to do this is pretty awful, but the description above implies
+ // for that to be the case, and there are tests
+
// If the EOF flag is set and the raw buffer is empty, do nothing.
if parser.eof && parser.raw_buffer_pos == len(parser.raw_buffer) {
- return true
+ // [Go] ACTUALLY! Read the documentation of this function above.
+ // This is just broken. To return true, we need to have the
+ // given length in the buffer. Not doing that means every single
+ // check that calls this function to make sure the buffer has a
+ // given length is Go) panicking; or C) accessing invalid memory.
+ //return true
}
// Return if the buffer contains enough characters.
@@ -389,6 +398,15 @@ func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool {
break
}
}
+ // [Go] Read the documentation of this function above. To return true,
+ // we need to have the given length in the buffer. Not doing that means
+ // every single check that calls this function to make sure the buffer
+ // has a given length is Go) panicking; or C) accessing invalid memory.
+ // This happens here due to the EOF above breaking early.
+ for buffer_len < length {
+ parser.buffer[buffer_len] = 0
+ buffer_len++
+ }
parser.buffer = parser.buffer[:buffer_len]
return true
}
diff --git a/vendor/gopkg.in/yaml.v2/resolve.go b/vendor/gopkg.in/yaml.v2/resolve.go
index 93a863274..6c151db6f 100644
--- a/vendor/gopkg.in/yaml.v2/resolve.go
+++ b/vendor/gopkg.in/yaml.v2/resolve.go
@@ -3,9 +3,10 @@ package yaml
import (
"encoding/base64"
"math"
+ "regexp"
"strconv"
"strings"
- "unicode/utf8"
+ "time"
)
type resolveMapItem struct {
@@ -74,12 +75,14 @@ func longTag(tag string) string {
func resolvableTag(tag string) bool {
switch tag {
- case "", yaml_STR_TAG, yaml_BOOL_TAG, yaml_INT_TAG, yaml_FLOAT_TAG, yaml_NULL_TAG:
+ case "", yaml_STR_TAG, yaml_BOOL_TAG, yaml_INT_TAG, yaml_FLOAT_TAG, yaml_NULL_TAG, yaml_TIMESTAMP_TAG:
return true
}
return false
}
+var yamlStyleFloat = regexp.MustCompile(`^[-+]?[0-9]*\.?[0-9]+([eE][-+][0-9]+)?$`)
+
func resolve(tag string, in string) (rtag string, out interface{}) {
if !resolvableTag(tag) {
return tag, in
@@ -89,6 +92,19 @@ func resolve(tag string, in string) (rtag string, out interface{}) {
switch tag {
case "", rtag, yaml_STR_TAG, yaml_BINARY_TAG:
return
+ case yaml_FLOAT_TAG:
+ if rtag == yaml_INT_TAG {
+ switch v := out.(type) {
+ case int64:
+ rtag = yaml_FLOAT_TAG
+ out = float64(v)
+ return
+ case int:
+ rtag = yaml_FLOAT_TAG
+ out = float64(v)
+ return
+ }
+ }
}
failf("cannot decode %s `%s` as a %s", shortTag(rtag), in, shortTag(tag))
}()
@@ -122,6 +138,15 @@ func resolve(tag string, in string) (rtag string, out interface{}) {
case 'D', 'S':
// Int, float, or timestamp.
+ // Only try values as a timestamp if the value is unquoted or there's an explicit
+ // !!timestamp tag.
+ if tag == "" || tag == yaml_TIMESTAMP_TAG {
+ t, ok := parseTimestamp(in)
+ if ok {
+ return yaml_TIMESTAMP_TAG, t
+ }
+ }
+
plain := strings.Replace(in, "_", "", -1)
intv, err := strconv.ParseInt(plain, 0, 64)
if err == nil {
@@ -135,9 +160,11 @@ func resolve(tag string, in string) (rtag string, out interface{}) {
if err == nil {
return yaml_INT_TAG, uintv
}
- floatv, err := strconv.ParseFloat(plain, 64)
- if err == nil {
- return yaml_FLOAT_TAG, floatv
+ if yamlStyleFloat.MatchString(plain) {
+ floatv, err := strconv.ParseFloat(plain, 64)
+ if err == nil {
+ return yaml_FLOAT_TAG, floatv
+ }
}
if strings.HasPrefix(plain, "0b") {
intv, err := strconv.ParseInt(plain[2:], 2, 64)
@@ -153,28 +180,20 @@ func resolve(tag string, in string) (rtag string, out interface{}) {
return yaml_INT_TAG, uintv
}
} else if strings.HasPrefix(plain, "-0b") {
- intv, err := strconv.ParseInt(plain[3:], 2, 64)
+ intv, err := strconv.ParseInt("-" + plain[3:], 2, 64)
if err == nil {
- if intv == int64(int(intv)) {
- return yaml_INT_TAG, -int(intv)
+ if true || intv == int64(int(intv)) {
+ return yaml_INT_TAG, int(intv)
} else {
- return yaml_INT_TAG, -intv
+ return yaml_INT_TAG, intv
}
}
}
- // XXX Handle timestamps here.
-
default:
panic("resolveTable item not yet handled: " + string(rune(hint)) + " (with " + in + ")")
}
}
- if tag == yaml_BINARY_TAG {
- return yaml_BINARY_TAG, in
- }
- if utf8.ValidString(in) {
- return yaml_STR_TAG, in
- }
- return yaml_BINARY_TAG, encodeBase64(in)
+ return yaml_STR_TAG, in
}
// encodeBase64 encodes s as base64 that is broken up into multiple lines
@@ -201,3 +220,39 @@ func encodeBase64(s string) string {
}
return string(out[:k])
}
+
+// This is a subset of the formats allowed by the regular expression
+// defined at http://yaml.org/type/timestamp.html.
+var allowedTimestampFormats = []string{
+ "2006-1-2T15:4:5.999999999Z07:00", // RCF3339Nano with short date fields.
+ "2006-1-2t15:4:5.999999999Z07:00", // RFC3339Nano with short date fields and lower-case "t".
+ "2006-1-2 15:4:5.999999999", // space separated with no time zone
+ "2006-1-2", // date only
+ // Notable exception: time.Parse cannot handle: "2001-12-14 21:59:43.10 -5"
+ // from the set of examples.
+}
+
+// parseTimestamp parses s as a timestamp string and
+// returns the timestamp and reports whether it succeeded.
+// Timestamp formats are defined at http://yaml.org/type/timestamp.html
+func parseTimestamp(s string) (time.Time, bool) {
+ // TODO write code to check all the formats supported by
+ // http://yaml.org/type/timestamp.html instead of using time.Parse.
+
+ // Quick check: all date formats start with YYYY-.
+ i := 0
+ for ; i < len(s); i++ {
+ if c := s[i]; c < '0' || c > '9' {
+ break
+ }
+ }
+ if i != 4 || i == len(s) || s[i] != '-' {
+ return time.Time{}, false
+ }
+ for _, format := range allowedTimestampFormats {
+ if t, err := time.Parse(format, s); err == nil {
+ return t, true
+ }
+ }
+ return time.Time{}, false
+}
diff --git a/vendor/gopkg.in/yaml.v2/scannerc.go b/vendor/gopkg.in/yaml.v2/scannerc.go
index 25808000f..077fd1dd2 100644
--- a/vendor/gopkg.in/yaml.v2/scannerc.go
+++ b/vendor/gopkg.in/yaml.v2/scannerc.go
@@ -9,7 +9,7 @@ import (
// ************
//
// The following notes assume that you are familiar with the YAML specification
-// (http://yaml.org/spec/cvs/current.html). We mostly follow it, although in
+// (http://yaml.org/spec/1.2/spec.html). We mostly follow it, although in
// some cases we are less restrictive that it requires.
//
// The process of transforming a YAML stream into a sequence of events is
@@ -611,7 +611,7 @@ func yaml_parser_set_scanner_tag_error(parser *yaml_parser_t, directive bool, co
if directive {
context = "while parsing a %TAG directive"
}
- return yaml_parser_set_scanner_error(parser, context, context_mark, "did not find URI escaped octet")
+ return yaml_parser_set_scanner_error(parser, context, context_mark, problem)
}
func trace(args ...interface{}) func() {
@@ -871,12 +871,6 @@ func yaml_parser_save_simple_key(parser *yaml_parser_t) bool {
required := parser.flow_level == 0 && parser.indent == parser.mark.column
- // A simple key is required only when it is the first token in the current
- // line. Therefore it is always allowed. But we add a check anyway.
- if required && !parser.simple_key_allowed {
- panic("should not happen")
- }
-
//
// If the current position may start a simple key, save it.
//
@@ -1944,7 +1938,7 @@ func yaml_parser_scan_tag_handle(parser *yaml_parser_t, directive bool, start_ma
} else {
// It's either the '!' tag or not really a tag handle. If it's a %TAG
// directive, it's an error. If it's a tag token, it must be a part of URI.
- if directive && !(s[0] == '!' && s[1] == 0) {
+ if directive && string(s) != "!" {
yaml_parser_set_scanner_tag_error(parser, directive,
start_mark, "did not find expected '!'")
return false
@@ -1959,6 +1953,7 @@ func yaml_parser_scan_tag_handle(parser *yaml_parser_t, directive bool, start_ma
func yaml_parser_scan_tag_uri(parser *yaml_parser_t, directive bool, head []byte, start_mark yaml_mark_t, uri *[]byte) bool {
//size_t length = head ? strlen((char *)head) : 0
var s []byte
+ hasTag := len(head) > 0
// Copy the head if needed.
//
@@ -2000,10 +1995,10 @@ func yaml_parser_scan_tag_uri(parser *yaml_parser_t, directive bool, head []byte
if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
return false
}
+ hasTag = true
}
- // Check if the tag is non-empty.
- if len(s) == 0 {
+ if !hasTag {
yaml_parser_set_scanner_tag_error(parser, directive,
start_mark, "did not find expected tag URI")
return false
@@ -2474,6 +2469,10 @@ func yaml_parser_scan_flow_scalar(parser *yaml_parser_t, token *yaml_token_t, si
}
}
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+
// Check if we are at the end of the scalar.
if single {
if parser.buffer[parser.buffer_pos] == '\'' {
@@ -2486,10 +2485,6 @@ func yaml_parser_scan_flow_scalar(parser *yaml_parser_t, token *yaml_token_t, si
}
// Consume blank characters.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
-
for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) {
if is_blank(parser.buffer, parser.buffer_pos) {
// Consume a space or a tab character.
@@ -2591,19 +2586,10 @@ func yaml_parser_scan_plain_scalar(parser *yaml_parser_t, token *yaml_token_t) b
// Consume non-blank characters.
for !is_blankz(parser.buffer, parser.buffer_pos) {
- // Check for 'x:x' in the flow context. TODO: Fix the test "spec-08-13".
- if parser.flow_level > 0 &&
- parser.buffer[parser.buffer_pos] == ':' &&
- !is_blankz(parser.buffer, parser.buffer_pos+1) {
- yaml_parser_set_scanner_error(parser, "while scanning a plain scalar",
- start_mark, "found unexpected ':'")
- return false
- }
-
// Check for indicators that may end a plain scalar.
if (parser.buffer[parser.buffer_pos] == ':' && is_blankz(parser.buffer, parser.buffer_pos+1)) ||
(parser.flow_level > 0 &&
- (parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == ':' ||
+ (parser.buffer[parser.buffer_pos] == ',' ||
parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == '[' ||
parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' ||
parser.buffer[parser.buffer_pos] == '}')) {
@@ -2655,10 +2641,10 @@ func yaml_parser_scan_plain_scalar(parser *yaml_parser_t, token *yaml_token_t) b
for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) {
if is_blank(parser.buffer, parser.buffer_pos) {
- // Check for tab character that abuse indentation.
+ // Check for tab characters that abuse indentation.
if leading_blanks && parser.mark.column < indent && is_tab(parser.buffer, parser.buffer_pos) {
yaml_parser_set_scanner_error(parser, "while scanning a plain scalar",
- start_mark, "found a tab character that violate indentation")
+ start_mark, "found a tab character that violates indentation")
return false
}
diff --git a/vendor/gopkg.in/yaml.v2/sorter.go b/vendor/gopkg.in/yaml.v2/sorter.go
index 5958822f9..4c45e660a 100644
--- a/vendor/gopkg.in/yaml.v2/sorter.go
+++ b/vendor/gopkg.in/yaml.v2/sorter.go
@@ -51,6 +51,15 @@ func (l keyList) Less(i, j int) bool {
}
var ai, bi int
var an, bn int64
+ if ar[i] == '0' || br[i] == '0' {
+ for j := i-1; j >= 0 && unicode.IsDigit(ar[j]); j-- {
+ if ar[j] != '0' {
+ an = 1
+ bn = 1
+ break
+ }
+ }
+ }
for ai = i; ai < len(ar) && unicode.IsDigit(ar[ai]); ai++ {
an = an*10 + int64(ar[ai]-'0')
}
diff --git a/vendor/gopkg.in/yaml.v2/writerc.go b/vendor/gopkg.in/yaml.v2/writerc.go
index 190362f25..a2dde608c 100644
--- a/vendor/gopkg.in/yaml.v2/writerc.go
+++ b/vendor/gopkg.in/yaml.v2/writerc.go
@@ -18,72 +18,9 @@ func yaml_emitter_flush(emitter *yaml_emitter_t) bool {
return true
}
- // If the output encoding is UTF-8, we don't need to recode the buffer.
- if emitter.encoding == yaml_UTF8_ENCODING {
- if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil {
- return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error())
- }
- emitter.buffer_pos = 0
- return true
- }
-
- // Recode the buffer into the raw buffer.
- var low, high int
- if emitter.encoding == yaml_UTF16LE_ENCODING {
- low, high = 0, 1
- } else {
- high, low = 1, 0
- }
-
- pos := 0
- for pos < emitter.buffer_pos {
- // See the "reader.c" code for more details on UTF-8 encoding. Note
- // that we assume that the buffer contains a valid UTF-8 sequence.
-
- // Read the next UTF-8 character.
- octet := emitter.buffer[pos]
-
- var w int
- var value rune
- switch {
- case octet&0x80 == 0x00:
- w, value = 1, rune(octet&0x7F)
- case octet&0xE0 == 0xC0:
- w, value = 2, rune(octet&0x1F)
- case octet&0xF0 == 0xE0:
- w, value = 3, rune(octet&0x0F)
- case octet&0xF8 == 0xF0:
- w, value = 4, rune(octet&0x07)
- }
- for k := 1; k < w; k++ {
- octet = emitter.buffer[pos+k]
- value = (value << 6) + (rune(octet) & 0x3F)
- }
- pos += w
-
- // Write the character.
- if value < 0x10000 {
- var b [2]byte
- b[high] = byte(value >> 8)
- b[low] = byte(value & 0xFF)
- emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1])
- } else {
- // Write the character using a surrogate pair (check "reader.c").
- var b [4]byte
- value -= 0x10000
- b[high] = byte(0xD8 + (value >> 18))
- b[low] = byte((value >> 10) & 0xFF)
- b[high+2] = byte(0xDC + ((value >> 8) & 0xFF))
- b[low+2] = byte(value & 0xFF)
- emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1], b[2], b[3])
- }
- }
-
- // Write the raw buffer.
- if err := emitter.write_handler(emitter, emitter.raw_buffer); err != nil {
+ if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil {
return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error())
}
emitter.buffer_pos = 0
- emitter.raw_buffer = emitter.raw_buffer[:0]
return true
}
diff --git a/vendor/gopkg.in/yaml.v2/yaml.go b/vendor/gopkg.in/yaml.v2/yaml.go
index 36d6b883a..de85aa4cd 100644
--- a/vendor/gopkg.in/yaml.v2/yaml.go
+++ b/vendor/gopkg.in/yaml.v2/yaml.go
@@ -9,6 +9,7 @@ package yaml
import (
"errors"
"fmt"
+ "io"
"reflect"
"strings"
"sync"
@@ -77,8 +78,65 @@ type Marshaler interface {
// supported tag options.
//
func Unmarshal(in []byte, out interface{}) (err error) {
+ return unmarshal(in, out, false)
+}
+
+// UnmarshalStrict is like Unmarshal except that any fields that are found
+// in the data that do not have corresponding struct members, or mapping
+// keys that are duplicates, will result in
+// an error.
+func UnmarshalStrict(in []byte, out interface{}) (err error) {
+ return unmarshal(in, out, true)
+}
+
+// A Decorder reads and decodes YAML values from an input stream.
+type Decoder struct {
+ strict bool
+ parser *parser
+}
+
+// NewDecoder returns a new decoder that reads from r.
+//
+// The decoder introduces its own buffering and may read
+// data from r beyond the YAML values requested.
+func NewDecoder(r io.Reader) *Decoder {
+ return &Decoder{
+ parser: newParserFromReader(r),
+ }
+}
+
+// SetStrict sets whether strict decoding behaviour is enabled when
+// decoding items in the data (see UnmarshalStrict). By default, decoding is not strict.
+func (dec *Decoder) SetStrict(strict bool) {
+ dec.strict = strict
+}
+
+// Decode reads the next YAML-encoded value from its input
+// and stores it in the value pointed to by v.
+//
+// See the documentation for Unmarshal for details about the
+// conversion of YAML into a Go value.
+func (dec *Decoder) Decode(v interface{}) (err error) {
+ d := newDecoder(dec.strict)
+ defer handleErr(&err)
+ node := dec.parser.parse()
+ if node == nil {
+ return io.EOF
+ }
+ out := reflect.ValueOf(v)
+ if out.Kind() == reflect.Ptr && !out.IsNil() {
+ out = out.Elem()
+ }
+ d.unmarshal(node, out)
+ if len(d.terrors) > 0 {
+ return &TypeError{d.terrors}
+ }
+ return nil
+}
+
+func unmarshal(in []byte, out interface{}, strict bool) (err error) {
defer handleErr(&err)
- d := newDecoder()
+ d := newDecoder(strict)
p := newParser(in)
defer p.destroy()
node := p.parse()
@@ -99,8 +157,8 @@ func Unmarshal(in []byte, out interface{}) (err error) {
// of the generated document will reflect the structure of the value itself.
// Maps and pointers (to struct, string, int, etc) are accepted as the in value.
//
-// Struct fields are only unmarshalled if they are exported (have an upper case
-// first letter), and are unmarshalled using the field name lowercased as the
+// Struct fields are only marshalled if they are exported (have an upper case
+// first letter), and are marshalled using the field name lowercased as the
// default key. Custom keys may be defined via the "yaml" name in the field
// tag: the content preceding the first comma is used as the key, and the
// following comma-separated options are used to tweak the marshalling process.
@@ -114,7 +172,10 @@ func Unmarshal(in []byte, out interface{}) (err error) {
//
// omitempty Only include the field if it's not set to the zero
// value for the type or to empty slices or maps.
-// Does not apply to zero valued structs.
+// Zero valued structs will be omitted if all their public
+// fields are zero, unless they implement an IsZero
+// method (see the IsZeroer interface type), in which
+// case the field will be included if that method returns true.
//
// flow Marshal using a flow style (useful for structs,
// sequences and maps).
@@ -129,7 +190,7 @@ func Unmarshal(in []byte, out interface{}) (err error) {
// For example:
//
// type T struct {
-// F int "a,omitempty"
+// F int `yaml:"a,omitempty"`
// B int
// }
// yaml.Marshal(&T{B: 2}) // Returns "b: 2\n"
@@ -139,12 +200,47 @@ func Marshal(in interface{}) (out []byte, err error) {
defer handleErr(&err)
e := newEncoder()
defer e.destroy()
- e.marshal("", reflect.ValueOf(in))
+ e.marshalDoc("", reflect.ValueOf(in))
e.finish()
out = e.out
return
}
+// An Encoder writes YAML values to an output stream.
+type Encoder struct {
+ encoder *encoder
+}
+
+// NewEncoder returns a new encoder that writes to w.
+// The Encoder should be closed after use to flush all data
+// to w.
+func NewEncoder(w io.Writer) *Encoder {
+ return &Encoder{
+ encoder: newEncoderWithWriter(w),
+ }
+}
+
+// Encode writes the YAML encoding of v to the stream.
+// If multiple items are encoded to the stream, the
+// second and subsequent document will be preceded
+// with a "---" document separator, but the first will not.
+//
+// See the documentation for Marshal for details about the conversion of Go
+// values to YAML.
+func (e *Encoder) Encode(v interface{}) (err error) {
+ defer handleErr(&err)
+ e.encoder.marshalDoc("", reflect.ValueOf(v))
+ return nil
+}
+
+// Close closes the encoder by writing any remaining data.
+// It does not write a stream terminating string "...".
+func (e *Encoder) Close() (err error) {
+ defer handleErr(&err)
+ e.encoder.finish()
+ return nil
+}
+
func handleErr(err *error) {
if v := recover(); v != nil {
if e, ok := v.(yamlError); ok {
@@ -200,6 +296,9 @@ type fieldInfo struct {
Num int
OmitEmpty bool
Flow bool
+ // Id holds the unique field identifier, so we can cheaply
+ // check for field duplicates without maintaining an extra map.
+ Id int
// Inline holds the field index if the field is part of an inlined struct.
Inline []int
@@ -279,6 +378,7 @@ func getStructInfo(st reflect.Type) (*structInfo, error) {
} else {
finfo.Inline = append([]int{i}, finfo.Inline...)
}
+ finfo.Id = len(fieldsList)
fieldsMap[finfo.Key] = finfo
fieldsList = append(fieldsList, finfo)
}
@@ -300,11 +400,16 @@ func getStructInfo(st reflect.Type) (*structInfo, error) {
return nil, errors.New(msg)
}
+ info.Id = len(fieldsList)
fieldsList = append(fieldsList, info)
fieldsMap[info.Key] = info
}
- sinfo = &structInfo{fieldsMap, fieldsList, inlineMap}
+ sinfo = &structInfo{
+ FieldsMap: fieldsMap,
+ FieldsList: fieldsList,
+ InlineMap: inlineMap,
+ }
fieldMapMutex.Lock()
structMap[st] = sinfo
@@ -312,8 +417,23 @@ func getStructInfo(st reflect.Type) (*structInfo, error) {
return sinfo, nil
}
+// IsZeroer is used to check whether an object is zero to
+// determine whether it should be omitted when marshaling
+// with the omitempty flag. One notable implementation
+// is time.Time.
+type IsZeroer interface {
+ IsZero() bool
+}
+
func isZero(v reflect.Value) bool {
- switch v.Kind() {
+ kind := v.Kind()
+ if z, ok := v.Interface().(IsZeroer); ok {
+ if (kind == reflect.Ptr || kind == reflect.Interface) && v.IsNil() {
+ return true
+ }
+ return z.IsZero()
+ }
+ switch kind {
case reflect.String:
return len(v.String()) == 0
case reflect.Interface, reflect.Ptr:
diff --git a/vendor/gopkg.in/yaml.v2/yamlh.go b/vendor/gopkg.in/yaml.v2/yamlh.go
index d60a6b6b0..e25cee563 100644
--- a/vendor/gopkg.in/yaml.v2/yamlh.go
+++ b/vendor/gopkg.in/yaml.v2/yamlh.go
@@ -1,6 +1,7 @@
package yaml
import (
+ "fmt"
"io"
)
@@ -239,6 +240,27 @@ const (
yaml_MAPPING_END_EVENT // A MAPPING-END event.
)
+var eventStrings = []string{
+ yaml_NO_EVENT: "none",
+ yaml_STREAM_START_EVENT: "stream start",
+ yaml_STREAM_END_EVENT: "stream end",
+ yaml_DOCUMENT_START_EVENT: "document start",
+ yaml_DOCUMENT_END_EVENT: "document end",
+ yaml_ALIAS_EVENT: "alias",
+ yaml_SCALAR_EVENT: "scalar",
+ yaml_SEQUENCE_START_EVENT: "sequence start",
+ yaml_SEQUENCE_END_EVENT: "sequence end",
+ yaml_MAPPING_START_EVENT: "mapping start",
+ yaml_MAPPING_END_EVENT: "mapping end",
+}
+
+func (e yaml_event_type_t) String() string {
+ if e < 0 || int(e) >= len(eventStrings) {
+ return fmt.Sprintf("unknown event %d", e)
+ }
+ return eventStrings[e]
+}
+
// The event structure.
type yaml_event_t struct {
@@ -508,7 +530,7 @@ type yaml_parser_t struct {
problem string // Error description.
- // The byte about which the problem occured.
+ // The byte about which the problem occurred.
problem_offset int
problem_value int
problem_mark yaml_mark_t
@@ -521,9 +543,9 @@ type yaml_parser_t struct {
read_handler yaml_read_handler_t // Read handler.
- input_file io.Reader // File input data.
- input []byte // String input data.
- input_pos int
+ input_reader io.Reader // File input data.
+ input []byte // String input data.
+ input_pos int
eof bool // EOF flag
@@ -632,7 +654,7 @@ type yaml_emitter_t struct {
write_handler yaml_write_handler_t // Write handler.
output_buffer *[]byte // String output data.
- output_file io.Writer // File output data.
+ output_writer io.Writer // File output data.
buffer []byte // The working buffer.
buffer_pos int // The current position of the buffer.